diff --git a/build.gradle b/build.gradle index 0d77f8fd59ef0..a75d093664fe6 100644 --- a/build.gradle +++ b/build.gradle @@ -435,6 +435,9 @@ allprojects { if (isEclipse) { // set this so generated dirs will be relative to eclipse build project.buildDir = eclipseBuild + // Work around https://docs.gradle.org/current/userguide/java_gradle_plugin.html confusing Eclipse by the metadata + // it adds to the classpath + project.file("$buildDir/pluginUnderTestMetadata").mkdirs() } eclipse.classpath.file.whenMerged { classpath -> // give each source folder a unique corresponding output folder diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 4e31de08829cc..3d100daf7d65f 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -128,6 +128,10 @@ if (project == rootProject) { } mavenCentral() } + test { + include "**/*Tests.class" + exclude "**/*IT.class" + } } /***************************************************************************** @@ -152,6 +156,18 @@ if (project != rootProject) { jarHell.enabled = false thirdPartyAudit.enabled = false + // tests can't be run with randomized test runner + // it's fine as we run them as part of :buildSrc + test.enabled = false + task integTest(type: Test) { + exclude "**/*Tests.class" + include "**/*IT.class" + testClassesDirs = sourceSets.test.output.classesDirs + classpath = sourceSets.test.runtimeClasspath + inputs.dir(file("src/testKit")) + } + check.dependsOn(integTest) + // TODO: re-enable once randomizedtesting gradle code is published and removed from here licenseHeaders.enabled = false diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 04fcbe0776b1a..89e10c50ff782 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -691,6 +691,7 @@ class BuildPlugin implements Plugin { systemProperty 'tests.task', path systemProperty 'tests.security.manager', 'true' systemProperty 'jna.nosys', 'true' + systemProperty 'es.scripting.exception_for_missing_value', 'true' // TODO: remove setting logging level via system property systemProperty 'tests.logger.level', 'WARN' for (Map.Entry property : System.properties.entrySet()) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index 455d30f95db32..de3c0dfc3285f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -526,7 +526,11 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(batsPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(batsPackagingTest) + // these tests are temporarily disabled for suse boxes while we debug an issue + // https://github.com/elastic/elasticsearch/issues/30295 + if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { + packagingTest.dependsOn(batsPackagingTest) + } } } @@ -546,9 +550,15 @@ class VagrantTestPlugin implements Plugin { javaPackagingTest.command = 'ssh' javaPackagingTest.args = ['--command', 'sudo bash "$PACKAGING_TESTS/run-tests.sh"'] } else { + // powershell sessions run over winrm always run as administrator, whether --elevated is passed or not. however + // remote sessions have some restrictions on what they can do, such as impersonating another user (or the same user + // without administrator elevation), which we need to do for these tests. passing --elevated runs the session + // as a scheduled job locally on the vm as a true administrator to get around this limitation + // + // https://github.com/hashicorp/vagrant/blob/9c299a2a357fcf87f356bb9d56e18a037a53d138/plugins/communicators/winrm/communicator.rb#L195-L225 + // https://devops-collective-inc.gitbooks.io/secrets-of-powershell-remoting/content/manuscript/accessing-remote-computers.html javaPackagingTest.command = 'winrm' - // winrm commands run as administrator - javaPackagingTest.args = ['--command', 'powershell -File "$Env:PACKAGING_TESTS/run-tests.ps1"'] + javaPackagingTest.args = ['--elevated', '--command', 'powershell -File "$Env:PACKAGING_TESTS/run-tests.ps1"'] } TaskExecutionAdapter javaPackagingReproListener = createReproListener(project, javaPackagingTest.path) @@ -559,7 +569,11 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(javaPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(javaPackagingTest) + // these tests are temporarily disabled for suse boxes while we debug an issue + // https://github.com/elastic/elasticsearch/issues/30295 + if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { + packagingTest.dependsOn(javaPackagingTest) + } } /* diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 222de9608aeb9..451452759f507 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -1,5 +1,3 @@ -import org.elasticsearch.gradle.precommit.PrecommitTasks - /* * Licensed to Elasticsearch under one or more contributor * license agreements. See the NOTICE file distributed with @@ -18,29 +16,86 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks * specific language governing permissions and limitations * under the License. */ + +import org.elasticsearch.gradle.precommit.PrecommitTasks +import org.gradle.api.XmlProvider +import org.gradle.api.publish.maven.MavenPublication + +buildscript { + repositories { + maven { + url 'https://plugins.gradle.org/m2/' + } + } + dependencies { + classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' + } +} + apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.rest-test' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' +apply plugin: 'com.github.johnrengelman.shadow' group = 'org.elasticsearch.client' archivesBaseName = 'elasticsearch-rest-high-level-client' publishing { - publications { - nebula { - artifactId = archivesBaseName + publications { + nebula(MavenPublication) { + artifact shadowJar + artifactId = archivesBaseName + /* + * Configure the pom to include the "shadow" as compile dependencies + * because that is how we're using them but remove all other dependencies + * because they've been shaded into the jar. + */ + pom.withXml { XmlProvider xml -> + Node root = xml.asNode() + root.remove(root.dependencies) + Node dependenciesNode = root.appendNode('dependencies') + project.configurations.shadow.allDependencies.each { + if (false == it instanceof SelfResolvingDependency) { + Node dependencyNode = dependenciesNode.appendNode('dependency') + dependencyNode.appendNode('groupId', it.group) + dependencyNode.appendNode('artifactId', it.name) + dependencyNode.appendNode('version', it.version) + dependencyNode.appendNode('scope', 'compile') + } } + } } + } +} + +/* + * We need somewhere to configure dependencies that we don't wish to shade + * into the high level REST client. The shadow plugin creates a "shadow" + * configuration which is *almost* exactly that. It is never bundled into + * the shaded jar but is used for main source compilation. Unfortunately, + * by default it is not used for *test* source compilation and isn't used + * in tests at all. This change makes it available for test compilation. + * A change below makes it available for testing. + */ +sourceSets { + test { + compileClasspath += configurations.shadow + } } dependencies { - compile "org.elasticsearch:elasticsearch:${version}" - compile "org.elasticsearch.client:elasticsearch-rest-client:${version}" - compile "org.elasticsearch.plugin:parent-join-client:${version}" - compile "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" - compile "org.elasticsearch.plugin:rank-eval-client:${version}" - compile "org.elasticsearch.plugin:lang-mustache-client:${version}" + /* + * Everything in the "shadow" configuration is *not* copied into the + * shadowJar. + */ + shadow "org.elasticsearch:elasticsearch:${version}" + shadow "org.elasticsearch.client:elasticsearch-rest-client:${version}" + shadow "org.elasticsearch.plugin:parent-join-client:${version}" + shadow "org.elasticsearch.plugin:aggs-matrix-stats-client:${version}" + shadow "org.elasticsearch.plugin:rank-eval-client:${version}" + shadow "org.elasticsearch.plugin:lang-mustache-client:${version}" + compile project(':x-pack:protocol') testCompile "org.elasticsearch.client:test:${version}" testCompile "org.elasticsearch.test:framework:${version}" @@ -63,3 +118,48 @@ forbiddenApisMain { signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')] signaturesURLs += [file('src/main/resources/forbidden/rest-high-level-signatures.txt').toURI().toURL()] } + +shadowJar { + classifier = null + mergeServiceFiles() +} + +// We don't need normal jar, we use shadow jar instead +jar.enabled = false +assemble.dependsOn shadowJar + +javadoc { + /* + * Bundle all of the javadoc from all of the shaded projects into this one + * so we don't *have* to publish javadoc for all of the "client" jars. + */ + configurations.compile.dependencies.all { Dependency dep -> + Project p = dependencyToProject(dep) + if (p != null) { + evaluationDependsOn(p.path) + source += p.sourceSets.main.allJava + } + } +} + +/* + * Use the jar for testing so we have tests of the bundled jar. + * Use the "shadow" configuration for testing because we need things + * in it. + */ +test { + classpath -= compileJava.outputs.files + classpath -= configurations.compile + classpath -= configurations.runtime + classpath += configurations.shadow + classpath += shadowJar.outputs.files + dependsOn shadowJar +} +integTestRunner { + classpath -= compileJava.outputs.files + classpath -= configurations.compile + classpath -= configurations.runtime + classpath += configurations.shadow + classpath += shadowJar.outputs.files + dependsOn shadowJar +} diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index efc48d9057550..126a9c7d4b4ec 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -43,6 +43,7 @@ import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.get.GetAliasesRequest; import org.elasticsearch.action.admin.indices.analyze.AnalyzeRequest; @@ -104,6 +105,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -115,8 +117,10 @@ import java.net.URI; import java.net.URISyntaxException; import java.nio.charset.Charset; +import java.util.EnumSet; import java.util.Locale; import java.util.StringJoiner; +import java.util.stream.Collectors; final class RequestConverters { static final XContentType REQUEST_BODY_CONTENT_TYPE = XContentType.JSON; @@ -960,6 +964,20 @@ static Request getSnapshots(GetSnapshotsRequest getSnapshotsRequest) { return request; } + static Request snapshotsStatus(SnapshotsStatusRequest snapshotsStatusRequest) { + String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") + .addPathPart(snapshotsStatusRequest.repository()) + .addCommaSeparatedPathParts(snapshotsStatusRequest.snapshots()) + .addPathPartAsIs("_status") + .build(); + Request request = new Request(HttpGet.METHOD_NAME, endpoint); + + Params parameters = new Params(request); + parameters.withMasterTimeout(snapshotsStatusRequest.masterNodeTimeout()); + parameters.withIgnoreUnavailable(snapshotsStatusRequest.ignoreUnavailable()); + return request; + } + static Request deleteSnapshot(DeleteSnapshotRequest deleteSnapshotRequest) { String endpoint = new EndpointBuilder().addPathPartAsIs("_snapshot") .addPathPart(deleteSnapshotRequest.repository()) @@ -1065,6 +1083,19 @@ static Request deleteScript(DeleteStoredScriptRequest deleteStoredScriptRequest) return request; } + static Request xPackInfo(XPackInfoRequest infoRequest) { + Request request = new Request(HttpGet.METHOD_NAME, "/_xpack"); + if (false == infoRequest.isVerbose()) { + request.addParameter("human", "false"); + } + if (false == infoRequest.getCategories().equals(EnumSet.allOf(XPackInfoRequest.Category.class))) { + request.addParameter("categories", infoRequest.getCategories().stream() + .map(c -> c.toString().toLowerCase(Locale.ROOT)) + .collect(Collectors.joining(","))); + } + return request; + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); @@ -1246,7 +1277,7 @@ Params withWaitForActiveShards(ActiveShardCount activeShardCount, ActiveShardCou } Params withIndicesOptions(IndicesOptions indicesOptions) { - putParam("ignore_unavailable", Boolean.toString(indicesOptions.ignoreUnavailable())); + withIgnoreUnavailable(indicesOptions.ignoreUnavailable()); putParam("allow_no_indices", Boolean.toString(indicesOptions.allowNoIndices())); String expandWildcards; if (indicesOptions.expandWildcardsOpen() == false && indicesOptions.expandWildcardsClosed() == false) { @@ -1265,6 +1296,12 @@ Params withIndicesOptions(IndicesOptions indicesOptions) { return this; } + Params withIgnoreUnavailable(boolean ignoreUnavailable) { + // Always explicitly place the ignore_unavailable value. + putParam("ignore_unavailable", Boolean.toString(ignoreUnavailable)); + return this; + } + Params withHuman(boolean human) { if (human) { putParam("human", Boolean.toString(human)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index 48277d67e6d15..df674ea898ed1 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -202,6 +202,7 @@ public class RestHighLevelClient implements Closeable { private final IngestClient ingestClient = new IngestClient(this); private final SnapshotClient snapshotClient = new SnapshotClient(this); private final TasksClient tasksClient = new TasksClient(this); + private final XPackClient xPackClient = new XPackClient(this); /** * Creates a {@link RestHighLevelClient} given the low level {@link RestClientBuilder} that allows to build the @@ -292,6 +293,19 @@ public final TasksClient tasks() { return tasksClient; } + /** + * A wrapper for the {@link RestHighLevelClient} that provides methods for + * accessing the Elastic Licensed X-Pack APIs that are shipped with the + * default distribution of Elasticsearch. All of these APIs will 404 if run + * against the OSS distribution of Elasticsearch. + *

+ * See the + * X-Pack APIs on elastic.co for more information. + */ + public final XPackClient xpack() { + return xPackClient; + } + /** * Executes a bulk request using the Bulk API. * See Bulk API on elastic.co @@ -668,7 +682,7 @@ public final RankEvalResponse rankEval(RankEvalRequest rankEvalRequest, RequestO emptySet()); } - + /** * Executes a request using the Multi Search Template API. * @@ -678,9 +692,9 @@ public final RankEvalResponse rankEval(RankEvalRequest rankEvalRequest, RequestO public final MultiSearchTemplateResponse multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, - options, MultiSearchTemplateResponse::fromXContext, emptySet()); - } - + options, MultiSearchTemplateResponse::fromXContext, emptySet()); + } + /** * Asynchronously executes a request using the Multi Search Template API * @@ -692,7 +706,7 @@ public final void multiSearchTemplateAsync(MultiSearchTemplateRequest multiSearc ActionListener listener) { performRequestAsyncAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, options, MultiSearchTemplateResponse::fromXContext, listener, emptySet()); - } + } /** * Asynchronously executes a request using the Ranking Evaluation API. diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index fa147a338de0a..bc0bbe95488f4 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -30,6 +30,8 @@ import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; @@ -221,6 +223,35 @@ public void getAsync(GetSnapshotsRequest getSnapshotsRequest, RequestOptions opt GetSnapshotsResponse::fromXContent, listener, emptySet()); } + /** + * Gets the status of requested snapshots. + * See Snapshot and Restore + * API on elastic.co + * @param snapshotsStatusRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public SnapshotsStatusResponse status(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options) + throws IOException { + return restHighLevelClient.performRequestAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options, + SnapshotsStatusResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously gets the status of requested snapshots. + * See Snapshot and Restore + * API on elastic.co + * @param snapshotsStatusRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void statusAsync(SnapshotsStatusRequest snapshotsStatusRequest, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(snapshotsStatusRequest, RequestConverters::snapshotsStatus, options, + SnapshotsStatusResponse::fromXContent, listener, emptySet()); + } + /** * Deletes a snapshot. * See Snapshot and Restore diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java new file mode 100644 index 0000000000000..5942bfa35a477 --- /dev/null +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.client; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; + +import java.io.IOException; + +import static java.util.Collections.emptySet; + +/** + * A wrapper for the {@link RestHighLevelClient} that provides methods for + * accessing the Elastic Licensed X-Pack APIs that are shipped with the + * default distribution of Elasticsearch. All of these APIs will 404 if run + * against the OSS distribution of Elasticsearch. + *

+ * See the + * X-Pack APIs on elastic.co for more information. + */ +public final class XPackClient { + private final RestHighLevelClient restHighLevelClient; + + XPackClient(RestHighLevelClient restHighLevelClient) { + this.restHighLevelClient = restHighLevelClient; + } + + /** + * Fetch information about X-Pack from the cluster. + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public XPackInfoResponse info(XPackInfoRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xPackInfo, options, + XPackInfoResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously fetch information about X-Pack from the cluster. + * See + * the docs for more. + * @param request the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void infoAsync(XPackInfoRequest request, RequestOptions options, + ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options, + XPackInfoResponse::fromXContent, listener, emptySet()); + } +} diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 39070a07b31d6..88cf445d436fe 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -612,7 +612,7 @@ public void testOpenExistingIndex() throws IOException { createIndex(index, Settings.EMPTY); closeIndex(index); ResponseException exception = expectThrows(ResponseException.class, - () -> client().performRequest(HttpGet.METHOD_NAME, index + "/_search")); + () -> client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search"))); assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); assertThat(exception.getMessage().contains(index), equalTo(true)); @@ -621,7 +621,7 @@ public void testOpenExistingIndex() throws IOException { highLevelClient().indices()::openAsync); assertTrue(openIndexResponse.isAcknowledged()); - Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_search"); + Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search")); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); } @@ -650,7 +650,7 @@ public void testOpenNonExistentIndex() throws IOException { public void testCloseExistingIndex() throws IOException { String index = "index"; createIndex(index, Settings.EMPTY); - Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_search"); + Response response = client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search")); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); CloseIndexRequest closeIndexRequest = new CloseIndexRequest(index); @@ -659,7 +659,7 @@ public void testCloseExistingIndex() throws IOException { assertTrue(closeIndexResponse.isAcknowledged()); ResponseException exception = expectThrows(ResponseException.class, - () -> client().performRequest(HttpGet.METHOD_NAME, index + "/_search")); + () -> client().performRequest(new Request(HttpGet.METHOD_NAME, index + "/_search"))); assertThat(exception.getResponse().getStatusLine().getStatusCode(), equalTo(RestStatus.BAD_REQUEST.getStatus())); assertThat(exception.getMessage().contains(index), equalTo(true)); } @@ -817,7 +817,7 @@ public void testExistsAlias() throws IOException { assertFalse(execute(getAliasesRequest, highLevelClient().indices()::existsAlias, highLevelClient().indices()::existsAliasAsync)); createIndex("index", Settings.EMPTY); - client().performRequest(HttpPut.METHOD_NAME, "/index/_alias/alias"); + client().performRequest(new Request(HttpPut.METHOD_NAME, "/index/_alias/alias")); assertTrue(execute(getAliasesRequest, highLevelClient().indices()::existsAlias, highLevelClient().indices()::existsAliasAsync)); GetAliasesRequest getAliasesRequest2 = new GetAliasesRequest(); @@ -936,10 +936,10 @@ public void testRollover() throws IOException { public void testGetAlias() throws IOException { { createIndex("index1", Settings.EMPTY); - client().performRequest(HttpPut.METHOD_NAME, "/index1/_alias/alias1"); + client().performRequest(new Request(HttpPut.METHOD_NAME, "/index1/_alias/alias1")); createIndex("index2", Settings.EMPTY); - client().performRequest(HttpPut.METHOD_NAME, "/index2/_alias/alias2"); + client().performRequest(new Request(HttpPut.METHOD_NAME, "/index2/_alias/alias2")); createIndex("index3", Settings.EMPTY); } @@ -1075,7 +1075,7 @@ public void testGetAliasesNonExistentIndexOrAlias() throws IOException { assertThat(getAliasesResponse.getError(), equalTo("alias [" + alias + "] missing")); } createIndex(index, Settings.EMPTY); - client().performRequest(HttpPut.METHOD_NAME, index + "/_alias/" + alias); + client().performRequest(new Request(HttpPut.METHOD_NAME, index + "/_alias/" + alias)); { GetAliasesRequest getAliasesRequest = new GetAliasesRequest().indices(index, "non_existent_index"); GetAliasesResponse getAliasesResponse = execute(getAliasesRequest, highLevelClient().indices()::getAlias, diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java index 057ea49f9a969..b45f52f9e441c 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/PingAndInfoIT.java @@ -21,8 +21,13 @@ import org.apache.http.client.methods.HttpGet; import org.elasticsearch.action.main.MainResponse; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import java.io.IOException; +import java.util.EnumSet; import java.util.Map; public class PingAndInfoIT extends ESRestHighLevelClientTestCase { @@ -31,16 +36,16 @@ public void testPing() throws IOException { assertTrue(highLevelClient().ping(RequestOptions.DEFAULT)); } - @SuppressWarnings("unchecked") public void testInfo() throws IOException { MainResponse info = highLevelClient().info(RequestOptions.DEFAULT); // compare with what the low level client outputs - Map infoAsMap = entityAsMap(adminClient().performRequest(HttpGet.METHOD_NAME, "/")); + Map infoAsMap = entityAsMap(adminClient().performRequest(new Request(HttpGet.METHOD_NAME, "/"))); assertEquals(infoAsMap.get("cluster_name"), info.getClusterName().value()); assertEquals(infoAsMap.get("cluster_uuid"), info.getClusterUuid()); // only check node name existence, might be a different one from what was hit by low level client in multi-node cluster assertNotNull(info.getNodeName()); + @SuppressWarnings("unchecked") Map versionMap = (Map) infoAsMap.get("version"); assertEquals(versionMap.get("build_flavor"), info.getBuild().flavor().displayName()); assertEquals(versionMap.get("build_type"), info.getBuild().type().displayName()); @@ -51,4 +56,49 @@ public void testInfo() throws IOException { assertEquals(versionMap.get("lucene_version"), info.getVersion().luceneVersion.toString()); } + public void testXPackInfo() throws IOException { + XPackInfoRequest request = new XPackInfoRequest(); + request.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class)); + request.setVerbose(true); + XPackInfoResponse info = highLevelClient().xpack().info(request, RequestOptions.DEFAULT); + + MainResponse mainResponse = highLevelClient().info(RequestOptions.DEFAULT); + + assertEquals(mainResponse.getBuild().shortHash(), info.getBuildInfo().getHash()); + + assertEquals("basic", info.getLicenseInfo().getType()); + assertEquals("basic", info.getLicenseInfo().getMode()); + assertEquals(LicenseStatus.ACTIVE, info.getLicenseInfo().getStatus()); + + FeatureSet graph = info.getFeatureSetsInfo().getFeatureSets().get("graph"); + assertNotNull(graph.description()); + assertFalse(graph.available()); + assertTrue(graph.enabled()); + assertNull(graph.nativeCodeInfo()); + FeatureSet monitoring = info.getFeatureSetsInfo().getFeatureSets().get("monitoring"); + assertNotNull(monitoring.description()); + assertTrue(monitoring.available()); + assertTrue(monitoring.enabled()); + assertNull(monitoring.nativeCodeInfo()); + FeatureSet ml = info.getFeatureSetsInfo().getFeatureSets().get("ml"); + assertNotNull(ml.description()); + assertFalse(ml.available()); + assertTrue(ml.enabled()); + assertEquals(mainResponse.getVersion().toString(), + ml.nativeCodeInfo().get("version").toString().replace("-SNAPSHOT", "")); + } + + public void testXPackInfoEmptyRequest() throws IOException { + XPackInfoResponse info = highLevelClient().xpack().info(new XPackInfoRequest(), RequestOptions.DEFAULT); + + /* + * The default in the transport client is non-verbose and returning + * no categories which is the opposite of the default when you use + * the API over REST. We don't want to break the transport client + * even though it doesn't feel like a good default. + */ + assertNull(info.getBuildInfo()); + assertNull(info.getLicenseInfo()); + assertNull(info.getFeatureSetsInfo()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java index a7a452484e023..d61fccb937193 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RankEvalIT.java @@ -19,8 +19,6 @@ package org.elasticsearch.client; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.index.query.MatchAllQueryBuilder; @@ -37,7 +35,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Collections; import java.util.List; import java.util.Map; import java.util.stream.Collectors; @@ -49,19 +46,17 @@ public class RankEvalIT extends ESRestHighLevelClientTestCase { @Before public void indexDocuments() throws IOException { - StringEntity doc = new StringEntity("{\"text\":\"berlin\"}", ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/index/doc/1", Collections.emptyMap(), doc); - doc = new StringEntity("{\"text\":\"amsterdam\"}", ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/index/doc/2", Collections.emptyMap(), doc); - client().performRequest("PUT", "/index/doc/3", Collections.emptyMap(), doc); - client().performRequest("PUT", "/index/doc/4", Collections.emptyMap(), doc); - client().performRequest("PUT", "/index/doc/5", Collections.emptyMap(), doc); - client().performRequest("PUT", "/index/doc/6", Collections.emptyMap(), doc); - client().performRequest("POST", "/index/_refresh"); - - // add another index to test basic multi index support - client().performRequest("PUT", "/index2/doc/7", Collections.emptyMap(), doc); - client().performRequest("POST", "/index2/_refresh"); + Request berlin = new Request("PUT", "/index/doc/berlin"); + berlin.setJsonEntity("{\"text\":\"berlin\"}"); + client().performRequest(berlin); + for (int i = 0; i < 6; i++) { + // add another index to test basic multi index support + String index = i == 0 ? "index2" : "index"; + Request amsterdam = new Request("PUT", "/" + index + "/doc/amsterdam" + i); + amsterdam.setJsonEntity("{\"text\":\"amsterdam\"}"); + client().performRequest(amsterdam); + } + client().performRequest(new Request("POST", "/_refresh")); } /** @@ -71,10 +66,10 @@ public void indexDocuments() throws IOException { public void testRankEvalRequest() throws IOException { SearchSourceBuilder testQuery = new SearchSourceBuilder(); testQuery.query(new MatchAllQueryBuilder()); - List amsterdamRatedDocs = createRelevant("index" , "2", "3", "4", "5"); - amsterdamRatedDocs.addAll(createRelevant("index2", "7")); + List amsterdamRatedDocs = createRelevant("index" , "amsterdam1", "amsterdam2", "amsterdam3", "amsterdam4"); + amsterdamRatedDocs.addAll(createRelevant("index2", "amsterdam0")); RatedRequest amsterdamRequest = new RatedRequest("amsterdam_query", amsterdamRatedDocs, testQuery); - RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "1"), testQuery); + RatedRequest berlinRequest = new RatedRequest("berlin_query", createRelevant("index", "berlin"), testQuery); List specifications = new ArrayList<>(); specifications.add(amsterdamRequest); specifications.add(berlinRequest); @@ -94,7 +89,7 @@ public void testRankEvalRequest() throws IOException { assertEquals(7, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { String id = hit.getSearchHit().getId(); - if (id.equals("1") || id.equals("6")) { + if (id.equals("berlin") || id.equals("amsterdam5")) { assertFalse(hit.getRating().isPresent()); } else { assertEquals(1, hit.getRating().get().intValue()); @@ -106,7 +101,7 @@ public void testRankEvalRequest() throws IOException { assertEquals(7, hitsAndRatings.size()); for (RatedSearchHit hit : hitsAndRatings) { String id = hit.getSearchHit().getId(); - if (id.equals("1")) { + if (id.equals("berlin")) { assertEquals(1, hit.getRating().get().intValue()); } else { assertFalse(hit.getRating().isPresent()); @@ -114,7 +109,7 @@ public void testRankEvalRequest() throws IOException { } // now try this when test2 is closed - client().performRequest("POST", "index2/_close", Collections.emptyMap()); + client().performRequest(new Request("POST", "index2/_close")); rankEvalRequest.indicesOptions(IndicesOptions.fromParameters(null, "true", null, SearchRequest.DEFAULT_INDICES_OPTIONS)); response = execute(rankEvalRequest, highLevelClient()::rankEval, highLevelClient()::rankEvalAsync); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java index f3a7641f9bfb9..fb4e3b22712f5 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RequestConvertersTests.java @@ -43,6 +43,7 @@ import org.elasticsearch.action.admin.cluster.snapshots.get.GetSnapshotsRequest; import org.elasticsearch.action.admin.cluster.storedscripts.DeleteStoredScriptRequest; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; import org.elasticsearch.action.admin.indices.alias.Alias; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest; import org.elasticsearch.action.admin.indices.alias.IndicesAliasesRequest.AliasActions; @@ -123,6 +124,7 @@ import org.elasticsearch.index.rankeval.RankEvalSpec; import org.elasticsearch.index.rankeval.RatedRequest; import org.elasticsearch.index.rankeval.RestRankEvalAction; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.repositories.fs.FsRepository; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.ScriptType; @@ -150,6 +152,7 @@ import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; +import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.List; @@ -173,6 +176,7 @@ import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.hasEntry; import static org.hamcrest.Matchers.hasKey; +import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.nullValue; @@ -2169,6 +2173,29 @@ public void testGetAllSnapshots() { assertNull(request.getEntity()); } + public void testSnapshotsStatus() { + Map expectedParams = new HashMap<>(); + String repository = randomIndicesNames(1, 1)[0]; + String[] snapshots = randomIndicesNames(1, 5); + StringBuilder snapshotNames = new StringBuilder(snapshots[0]); + for (int idx = 1; idx < snapshots.length; idx++) { + snapshotNames.append(",").append(snapshots[idx]); + } + boolean ignoreUnavailable = randomBoolean(); + String endpoint = "/_snapshot/" + repository + "/" + snapshotNames.toString() + "/_status"; + + SnapshotsStatusRequest snapshotsStatusRequest = new SnapshotsStatusRequest(repository, snapshots); + setRandomMasterTimeout(snapshotsStatusRequest, expectedParams); + snapshotsStatusRequest.ignoreUnavailable(ignoreUnavailable); + expectedParams.put("ignore_unavailable", Boolean.toString(ignoreUnavailable)); + + Request request = RequestConverters.snapshotsStatus(snapshotsStatusRequest); + assertThat(request.getEndpoint(), equalTo(endpoint)); + assertThat(request.getMethod(), equalTo(HttpGet.METHOD_NAME)); + assertThat(request.getParameters(), equalTo(expectedParams)); + assertThat(request.getEntity(), is(nullValue())); + } + public void testDeleteSnapshot() { Map expectedParams = new HashMap<>(); String repository = randomIndicesNames(1, 1)[0]; @@ -2465,6 +2492,37 @@ public void testEnforceSameContentType() { + "previous requests have content-type [" + xContentType + "]", exception.getMessage()); } + public void testXPackInfo() { + XPackInfoRequest infoRequest = new XPackInfoRequest(); + Map expectedParams = new HashMap<>(); + infoRequest.setVerbose(randomBoolean()); + if (false == infoRequest.isVerbose()) { + expectedParams.put("human", "false"); + } + int option = between(0, 2); + switch (option) { + case 0: + infoRequest.setCategories(EnumSet.allOf(XPackInfoRequest.Category.class)); + break; + case 1: + infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES)); + expectedParams.put("categories", "features"); + break; + case 2: + infoRequest.setCategories(EnumSet.of(XPackInfoRequest.Category.FEATURES, XPackInfoRequest.Category.BUILD)); + expectedParams.put("categories", "build,features"); + break; + default: + throw new IllegalArgumentException("invalid option [" + option + "]"); + } + + Request request = RequestConverters.xPackInfo(infoRequest); + assertEquals(HttpGet.METHOD_NAME, request.getMethod()); + assertEquals("/_xpack", request.getEndpoint()); + assertNull(request.getEntity()); + assertEquals(expectedParams, request.getParameters()); + } + /** * Randomize the {@link FetchSourceContext} request parameters. */ diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index 18a43ffa8d404..ce9091a91ff8b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -19,12 +19,8 @@ package org.elasticsearch.client; -import org.apache.http.HttpEntity; import org.apache.http.client.methods.HttpPost; import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.action.explain.ExplainRequest; @@ -101,85 +97,106 @@ public class SearchIT extends ESRestHighLevelClientTestCase { @Before public void indexDocuments() throws IOException { - StringEntity doc1 = new StringEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index/type/1", Collections.emptyMap(), doc1); - StringEntity doc2 = new StringEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index/type/2", Collections.emptyMap(), doc2); - StringEntity doc3 = new StringEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index/type/3", Collections.emptyMap(), doc3); - StringEntity doc4 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index/type/4", Collections.emptyMap(), doc4); - StringEntity doc5 = new StringEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index/type/5", Collections.emptyMap(), doc5); - client().performRequest(HttpPost.METHOD_NAME, "/index/_refresh"); - - - StringEntity doc = new StringEntity("{\"field\":\"value1\", \"rating\": 7}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/1", Collections.emptyMap(), doc); - doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index1/doc/2", Collections.emptyMap(), doc); - - StringEntity mappings = new StringEntity( - "{" + - " \"mappings\": {" + - " \"doc\": {" + - " \"properties\": {" + - " \"rating\": {" + - " \"type\": \"keyword\"" + - " }" + - " }" + - " }" + - " }" + - "}}", - ContentType.APPLICATION_JSON); - client().performRequest("PUT", "/index2", Collections.emptyMap(), mappings); - doc = new StringEntity("{\"field\":\"value1\", \"rating\": \"good\"}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/3", Collections.emptyMap(), doc); - doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index2/doc/4", Collections.emptyMap(), doc); - - doc = new StringEntity("{\"field\":\"value1\"}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/5", Collections.emptyMap(), doc); - doc = new StringEntity("{\"field\":\"value2\"}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index3/doc/6", Collections.emptyMap(), doc); - - mappings = new StringEntity( - "{" + + { + Request doc1 = new Request(HttpPut.METHOD_NAME, "/index/type/1"); + doc1.setJsonEntity("{\"type\":\"type1\", \"num\":10, \"num2\":50}"); + client().performRequest(doc1); + Request doc2 = new Request(HttpPut.METHOD_NAME, "/index/type/2"); + doc2.setJsonEntity("{\"type\":\"type1\", \"num\":20, \"num2\":40}"); + client().performRequest(doc2); + Request doc3 = new Request(HttpPut.METHOD_NAME, "/index/type/3"); + doc3.setJsonEntity("{\"type\":\"type1\", \"num\":50, \"num2\":35}"); + client().performRequest(doc3); + Request doc4 = new Request(HttpPut.METHOD_NAME, "/index/type/4"); + doc4.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}"); + client().performRequest(doc4); + Request doc5 = new Request(HttpPut.METHOD_NAME, "/index/type/5"); + doc5.setJsonEntity("{\"type\":\"type2\", \"num\":100, \"num2\":10}"); + client().performRequest(doc5); + } + + { + Request doc1 = new Request(HttpPut.METHOD_NAME, "/index1/doc/1"); + doc1.setJsonEntity("{\"field\":\"value1\", \"rating\": 7}"); + client().performRequest(doc1); + Request doc2 = new Request(HttpPut.METHOD_NAME, "/index1/doc/2"); + doc2.setJsonEntity("{\"field\":\"value2\"}"); + client().performRequest(doc2); + } + + { + Request create = new Request("PUT", "/index2"); + create.setJsonEntity( + "{" + " \"mappings\": {" + " \"doc\": {" + " \"properties\": {" + - " \"field1\": {" + - " \"type\": \"keyword\"," + - " \"store\": true" + - " }," + - " \"field2\": {" + - " \"type\": \"keyword\"," + - " \"store\": true" + + " \"rating\": {" + + " \"type\": \"keyword\"" + " }" + " }" + " }" + " }" + - "}}", - ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index4", Collections.emptyMap(), mappings); - doc = new StringEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/index4/doc/1", Collections.emptyMap(), doc); - StringEntity aliasFilter = new StringEntity( - "{" + - " \"actions\" : [" + - " {" + - " \"add\" : {" + - " \"index\" : \"index4\"," + - " \"alias\" : \"alias4\"," + - " \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" + - " }" + - " }" + - " ]" + - "}", - ContentType.APPLICATION_JSON); - client().performRequest(HttpPost.METHOD_NAME, "/_aliases", Collections.emptyMap(), aliasFilter); + "}"); + client().performRequest(create); + Request doc3 = new Request(HttpPut.METHOD_NAME, "/index2/doc/3"); + doc3.setJsonEntity("{\"field\":\"value1\", \"rating\": \"good\"}"); + client().performRequest(doc3); + Request doc4 = new Request(HttpPut.METHOD_NAME, "/index2/doc/4"); + doc4.setJsonEntity("{\"field\":\"value2\"}"); + client().performRequest(doc4); + } + + { + Request doc5 = new Request(HttpPut.METHOD_NAME, "/index3/doc/5"); + doc5.setJsonEntity("{\"field\":\"value1\"}"); + client().performRequest(doc5); + Request doc6 = new Request(HttpPut.METHOD_NAME, "/index3/doc/6"); + doc6.setJsonEntity("{\"field\":\"value2\"}"); + client().performRequest(doc6); + } + + { + Request create = new Request(HttpPut.METHOD_NAME, "/index4"); + create.setJsonEntity( + "{" + + " \"mappings\": {" + + " \"doc\": {" + + " \"properties\": {" + + " \"field1\": {" + + " \"type\": \"keyword\"," + + " \"store\": true" + + " }," + + " \"field2\": {" + + " \"type\": \"keyword\"," + + " \"store\": true" + + " }" + + " }" + + " }" + + " }" + + "}"); + client().performRequest(create); + Request doc1 = new Request(HttpPut.METHOD_NAME, "/index4/doc/1"); + doc1.setJsonEntity("{\"field1\":\"value1\", \"field2\":\"value2\"}"); + client().performRequest(doc1); + + Request createFilteredAlias = new Request(HttpPost.METHOD_NAME, "/_aliases"); + createFilteredAlias.setJsonEntity( + "{" + + " \"actions\" : [" + + " {" + + " \"add\" : {" + + " \"index\" : \"index4\"," + + " \"alias\" : \"alias4\"," + + " \"filter\" : { \"term\" : { \"field2\" : \"value1\" } }" + + " }" + + " }" + + " ]" + + "}"); + client().performRequest(createFilteredAlias); + } - client().performRequest(HttpPost.METHOD_NAME, "/index1,index2,index3,index4/_refresh"); + client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh")); } public void testSearchNoQuery() throws IOException { @@ -377,7 +394,9 @@ public void testSearchWithMatrixStats() throws IOException { public void testSearchWithParentJoin() throws IOException { final String indexName = "child_example"; - StringEntity parentMapping = new StringEntity("{\n" + + Request createIndex = new Request(HttpPut.METHOD_NAME, "/" + indexName); + createIndex.setJsonEntity( + "{\n" + " \"mappings\": {\n" + " \"qa\" : {\n" + " \"properties\" : {\n" + @@ -388,9 +407,11 @@ public void testSearchWithParentJoin() throws IOException { " }\n" + " }\n" + " }" + - "}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/" + indexName, Collections.emptyMap(), parentMapping); - StringEntity questionDoc = new StringEntity("{\n" + + "}"); + client().performRequest(createIndex); + Request questionDoc = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/1"); + questionDoc.setJsonEntity( + "{\n" + " \"body\": \"

I have Windows 2003 server and i bought a new Windows 2008 server...\",\n" + " \"title\": \"Whats the best way to file transfer my site from server to a newer one?\",\n" + " \"tags\": [\n" + @@ -399,9 +420,12 @@ public void testSearchWithParentJoin() throws IOException { " \"file-transfer\"\n" + " ],\n" + " \"qa_join_field\" : \"question\"\n" + - "}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/1", Collections.emptyMap(), questionDoc); - StringEntity answerDoc1 = new StringEntity("{\n" + + "}"); + client().performRequest(questionDoc); + Request answerDoc1 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/2"); + answerDoc1.addParameter("routing", "1"); + answerDoc1.setJsonEntity( + "{\n" + " \"owner\": {\n" + " \"location\": \"Norfolk, United Kingdom\",\n" + " \"display_name\": \"Sam\",\n" + @@ -413,9 +437,12 @@ public void testSearchWithParentJoin() throws IOException { " \"parent\" : \"1\"\n" + " },\n" + " \"creation_date\": \"2009-05-04T13:45:37.030\"\n" + - "}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/2", Collections.singletonMap("routing", "1"), answerDoc1); - StringEntity answerDoc2 = new StringEntity("{\n" + + "}"); + client().performRequest(answerDoc1); + Request answerDoc2 = new Request(HttpPut.METHOD_NAME, "/" + indexName + "/qa/3"); + answerDoc2.addParameter("routing", "1"); + answerDoc2.setJsonEntity( + "{\n" + " \"owner\": {\n" + " \"location\": \"Norfolk, United Kingdom\",\n" + " \"display_name\": \"Troll\",\n" + @@ -427,9 +454,9 @@ public void testSearchWithParentJoin() throws IOException { " \"parent\" : \"1\"\n" + " },\n" + " \"creation_date\": \"2009-05-05T13:45:37.030\"\n" + - "}", ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "/" + indexName + "/qa/3", Collections.singletonMap("routing", "1"), answerDoc2); - client().performRequest(HttpPost.METHOD_NAME, "/_refresh"); + "}"); + client().performRequest(answerDoc2); + client().performRequest(new Request(HttpPost.METHOD_NAME, "/_refresh")); TermsAggregationBuilder leafTermAgg = new TermsAggregationBuilder("top-names", ValueType.STRING) .field("owner.display_name.keyword").size(10); @@ -506,9 +533,10 @@ public void testSearchWithSuggest() throws IOException { } public void testSearchWithWeirdScriptFields() throws Exception { - HttpEntity entity = new NStringEntity("{ \"field\":\"value\"}", ContentType.APPLICATION_JSON); - client().performRequest("PUT", "test/type/1", Collections.emptyMap(), entity); - client().performRequest("POST", "/test/_refresh"); + Request doc = new Request("PUT", "test/type/1"); + doc.setJsonEntity("{\"field\":\"value\"}"); + client().performRequest(doc); + client().performRequest(new Request("POST", "/test/_refresh")); { SearchRequest searchRequest = new SearchRequest("test").source(SearchSourceBuilder.searchSource() @@ -547,13 +575,13 @@ public void testSearchWithWeirdScriptFields() throws Exception { } public void testSearchScroll() throws Exception { - for (int i = 0; i < 100; i++) { XContentBuilder builder = jsonBuilder().startObject().field("field", i).endObject(); - HttpEntity entity = new NStringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON); - client().performRequest(HttpPut.METHOD_NAME, "test/type1/" + Integer.toString(i), Collections.emptyMap(), entity); + Request doc = new Request(HttpPut.METHOD_NAME, "/test/type1/" + Integer.toString(i)); + doc.setJsonEntity(Strings.toString(builder)); + client().performRequest(doc); } - client().performRequest(HttpPost.METHOD_NAME, "/test/_refresh"); + client().performRequest(new Request(HttpPost.METHOD_NAME, "/test/_refresh")); SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder().size(35).sort("field", SortOrder.ASC); SearchRequest searchRequest = new SearchRequest("test").scroll(TimeValue.timeValueMinutes(2)).source(searchSourceBuilder); @@ -878,11 +906,11 @@ public void testRenderSearchTemplate() throws IOException { assertToXContentEquivalent(expectedSource, actualSource, XContentType.JSON); } - - + + public void testMultiSearchTemplate() throws Exception { MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); - + SearchTemplateRequest goodRequest = new SearchTemplateRequest(); goodRequest.setRequest(new SearchRequest("index")); goodRequest.setScriptType(ScriptType.INLINE); @@ -900,8 +928,8 @@ public void testMultiSearchTemplate() throws Exception { goodRequest.setExplain(true); goodRequest.setProfile(true); multiSearchTemplateRequest.add(goodRequest); - - + + SearchTemplateRequest badRequest = new SearchTemplateRequest(); badRequest.setRequest(new SearchRequest("index")); badRequest.setScriptType(ScriptType.INLINE); @@ -910,17 +938,17 @@ public void testMultiSearchTemplate() throws Exception { scriptParams.put("number", 10); badRequest.setScriptParams(scriptParams); - multiSearchTemplateRequest.add(badRequest); - + multiSearchTemplateRequest.add(badRequest); + MultiSearchTemplateResponse multiSearchTemplateResponse = - execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, + execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, highLevelClient()::multiSearchTemplateAsync); - + Item[] responses = multiSearchTemplateResponse.getResponses(); - + assertEquals(2, responses.length); - - + + assertNull(responses[0].getResponse().getSource()); SearchResponse goodResponse =responses[0].getResponse().getResponse(); assertNotNull(goodResponse); @@ -930,18 +958,18 @@ public void testMultiSearchTemplate() throws Exception { assertThat(goodResponse.getHits().getMaxScore(), greaterThan(0f)); SearchHit hit = goodResponse.getHits().getHits()[0]; assertNotNull(hit.getExplanation()); - assertFalse(goodResponse.getProfileResults().isEmpty()); - - + assertFalse(goodResponse.getProfileResults().isEmpty()); + + assertNull(responses[0].getResponse().getSource()); assertThat(responses[1].isFailure(), Matchers.is(true)); - assertNotNull(responses[1].getFailureMessage()); + assertNotNull(responses[1].getFailureMessage()); assertThat(responses[1].getFailureMessage(), containsString("json_parse_exception")); } - + public void testMultiSearchTemplateAllBad() throws Exception { MultiSearchTemplateRequest multiSearchTemplateRequest = new MultiSearchTemplateRequest(); - + SearchTemplateRequest badRequest1 = new SearchTemplateRequest(); badRequest1.setRequest(new SearchRequest("index")); badRequest1.setScriptType(ScriptType.INLINE); @@ -957,8 +985,8 @@ public void testMultiSearchTemplateAllBad() throws Exception { scriptParams.put("number", "BAD NUMBER"); badRequest1.setScriptParams(scriptParams); multiSearchTemplateRequest.add(badRequest1); - - + + SearchTemplateRequest badRequest2 = new SearchTemplateRequest(); badRequest2.setRequest(new SearchRequest("index")); badRequest2.setScriptType(ScriptType.INLINE); @@ -967,13 +995,13 @@ public void testMultiSearchTemplateAllBad() throws Exception { scriptParams.put("number", "BAD NUMBER"); badRequest2.setScriptParams(scriptParams); - multiSearchTemplateRequest.add(badRequest2); - - // The whole HTTP request should fail if no nested search requests are valid + multiSearchTemplateRequest.add(badRequest2); + + // The whole HTTP request should fail if no nested search requests are valid ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, - () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, + () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, highLevelClient()::multiSearchTemplateAsync)); - + assertEquals(RestStatus.BAD_REQUEST, exception.status()); assertThat(exception.getMessage(), containsString("no requests added")); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 7ec2ee80f04ac..45f9b5bbb0b0a 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -28,6 +28,9 @@ import org.elasticsearch.action.admin.cluster.repositories.put.PutRepositoryResponse; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryRequest; import org.elasticsearch.action.admin.cluster.repositories.verify.VerifyRepositoryResponse; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; @@ -43,6 +46,7 @@ import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.is; public class SnapshotIT extends ESRestHighLevelClientTestCase { @@ -173,6 +177,34 @@ public void testGetSnapshots() throws IOException { contains("test_snapshot1", "test_snapshot2")); } + public void testSnapshotsStatus() throws IOException { + String testRepository = "test"; + String testSnapshot = "snapshot"; + String testIndex = "test_index"; + + PutRepositoryResponse putRepositoryResponse = createTestRepository(testRepository, FsRepository.TYPE, "{\"location\": \".\"}"); + assertTrue(putRepositoryResponse.isAcknowledged()); + + createIndex(testIndex, Settings.EMPTY); + + CreateSnapshotRequest createSnapshotRequest = new CreateSnapshotRequest(testRepository, testSnapshot); + createSnapshotRequest.indices(testIndex); + createSnapshotRequest.waitForCompletion(true); + CreateSnapshotResponse createSnapshotResponse = createTestSnapshot(createSnapshotRequest); + // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. + assertEquals(RestStatus.OK, createSnapshotResponse.status()); + + SnapshotsStatusRequest request = new SnapshotsStatusRequest(); + request.repository(testRepository); + request.snapshots(new String[]{testSnapshot}); + SnapshotsStatusResponse response = execute(request, highLevelClient().snapshot()::status, + highLevelClient().snapshot()::statusAsync); + assertThat(response.getSnapshots().size(), equalTo(1)); + assertThat(response.getSnapshots().get(0).getSnapshot().getRepository(), equalTo(testRepository)); + assertThat(response.getSnapshots().get(0).getSnapshot().getSnapshotId().getName(), equalTo(testSnapshot)); + assertThat(response.getSnapshots().get(0).getIndices().containsKey(testIndex), is(true)); + } + public void testDeleteSnapshot() throws IOException { String repository = "test_repository"; String snapshot = "test_snapshot"; diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index b8a6b7d2d8ad2..9dad115643cbf 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -19,8 +19,6 @@ package org.elasticsearch.client.documentation; -import org.apache.http.entity.ContentType; -import org.apache.http.nio.entity.NStringEntity; import org.elasticsearch.ElasticsearchException; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.DocWriteRequest; @@ -66,7 +64,6 @@ import org.elasticsearch.script.ScriptType; import org.elasticsearch.search.fetch.subphase.FetchSourceContext; -import java.util.Collections; import java.util.Date; import java.util.HashMap; import java.util.Map; @@ -756,7 +753,9 @@ public void onFailure(Exception e) { public void testGet() throws Exception { RestHighLevelClient client = highLevelClient(); { - String mappings = "{\n" + + Request createIndex = new Request("PUT", "/posts"); + createIndex.setJsonEntity( + "{\n" + " \"mappings\" : {\n" + " \"doc\" : {\n" + " \"properties\" : {\n" + @@ -767,10 +766,8 @@ public void testGet() throws Exception { " }\n" + " }\n" + " }\n" + - "}"; - - NStringEntity entity = new NStringEntity(mappings, ContentType.APPLICATION_JSON); - Response response = client().performRequest("PUT", "/posts", Collections.emptyMap(), entity); + "}"); + Response response = client().performRequest(createIndex); assertEquals(200, response.getStatusLine().getStatusCode()); IndexRequest indexRequest = new IndexRequest("posts", "doc", "1") @@ -1071,21 +1068,21 @@ public void testMultiGet() throws Exception { RestHighLevelClient client = highLevelClient(); { - String mappings = "{\n" + - " \"mappings\" : {\n" + - " \"type\" : {\n" + - " \"properties\" : {\n" + - " \"foo\" : {\n" + - " \"type\": \"text\",\n" + - " \"store\": true\n" + - " }\n" + - " }\n" + - " }\n" + - " }\n" + - "}"; - - NStringEntity entity = new NStringEntity(mappings, ContentType.APPLICATION_JSON); - Response response = client().performRequest("PUT", "/index", Collections.emptyMap(), entity); + Request createIndex = new Request("PUT", "/index"); + createIndex.setJsonEntity( + "{\n" + + " \"mappings\" : {\n" + + " \"type\" : {\n" + + " \"properties\" : {\n" + + " \"foo\" : {\n" + + " \"type\": \"text\",\n" + + " \"store\": true\n" + + " }\n" + + " }\n" + + " }\n" + + " }\n" + + "}"); + Response response = client().performRequest(createIndex); assertEquals(200, response.getStatusLine().getStatusCode()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java index 2186bd8ebfd30..75c14097c4581 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java @@ -22,14 +22,24 @@ import org.apache.http.HttpHost; import org.elasticsearch.Build; import org.elasticsearch.Version; +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.LatchedActionListener; import org.elasticsearch.action.main.MainResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.RestClient; import org.elasticsearch.client.RestHighLevelClient; import org.elasticsearch.cluster.ClusterName; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; import java.io.IOException; +import java.util.EnumSet; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; /** * Documentation for miscellaneous APIs in the high level java client. @@ -66,6 +76,59 @@ public void testPing() throws IOException { assertTrue(response); } + public void testXPackInfo() throws Exception { + RestHighLevelClient client = highLevelClient(); + { + //tag::x-pack-info-execute + XPackInfoRequest request = new XPackInfoRequest(); + request.setVerbose(true); // <1> + request.setCategories(EnumSet.of( // <2> + XPackInfoRequest.Category.BUILD, + XPackInfoRequest.Category.LICENSE, + XPackInfoRequest.Category.FEATURES)); + XPackInfoResponse response = client.xpack().info(request, RequestOptions.DEFAULT); + //end::x-pack-info-execute + + //tag::x-pack-info-response + BuildInfo build = response.getBuildInfo(); // <1> + LicenseInfo license = response.getLicenseInfo(); // <2> + assertEquals(XPackInfoResponse.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS, + license.getExpiryDate()); // <3> + FeatureSetsInfo features = response.getFeatureSetsInfo(); // <4> + //end::x-pack-info-response + + assertNotNull(response.getBuildInfo()); + assertNotNull(response.getLicenseInfo()); + assertNotNull(response.getFeatureSetsInfo()); + } + { + XPackInfoRequest request = new XPackInfoRequest(); + // tag::x-pack-info-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(XPackInfoResponse indexResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-info-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-info-execute-async + client.xpack().infoAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-info-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testInitializationFromClientBuilder() throws IOException { //tag::rest-high-level-client-init RestHighLevelClient client = new RestHighLevelClient( diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 48d01963e236d..403ebc7d774a4 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -37,11 +37,16 @@ import org.elasticsearch.action.support.IndicesOptions; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotRequest; import org.elasticsearch.action.admin.cluster.snapshots.delete.DeleteSnapshotResponse; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStats; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotStatus; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusRequest; +import org.elasticsearch.action.admin.cluster.snapshots.status.SnapshotsStatusResponse; import org.elasticsearch.client.ESRestHighLevelClientTestCase; import org.elasticsearch.client.Request; import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestHighLevelClient; +import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; @@ -84,8 +89,8 @@ public class SnapshotClientDocumentationIT extends ESRestHighLevelClientTestCase { private static final String repositoryName = "test_repository"; - private static final String snapshotName = "test_snapshot"; + private static final String indexName = "test_index"; public void testSnapshotCreateRepository() throws IOException { RestHighLevelClient client = highLevelClient(); @@ -466,6 +471,7 @@ public void testSnapshotGetSnapshots() throws IOException { RestHighLevelClient client = highLevelClient(); createTestRepositories(); + createTestIndex(); createTestSnapshots(); // tag::get-snapshots-request @@ -543,10 +549,84 @@ public void onFailure(Exception e) { } } + public void testSnapshotSnapshotsStatus() throws IOException { + RestHighLevelClient client = highLevelClient(); + createTestRepositories(); + createTestIndex(); + createTestSnapshots(); + + // tag::snapshots-status-request + SnapshotsStatusRequest request = new SnapshotsStatusRequest(); + // end::snapshots-status-request + + // tag::snapshots-status-request-repository + request.repository(repositoryName); // <1> + // end::snapshots-status-request-repository + // tag::snapshots-status-request-snapshots + String [] snapshots = new String[] {snapshotName}; + request.snapshots(snapshots); // <1> + // end::snapshots-status-request-snapshots + // tag::snapshots-status-request-ignoreUnavailable + request.ignoreUnavailable(true); // <1> + // end::snapshots-status-request-ignoreUnavailable + // tag::snapshots-status-request-masterTimeout + request.masterNodeTimeout(TimeValue.timeValueMinutes(1)); // <1> + request.masterNodeTimeout("1m"); // <2> + // end::snapshots-status-request-masterTimeout + + // tag::snapshots-status-execute + SnapshotsStatusResponse response = client.snapshot().status(request, RequestOptions.DEFAULT); + // end::snapshots-status-execute + + // tag::snapshots-status-response + List snapshotStatusesResponse = response.getSnapshots(); + SnapshotStatus snapshotStatus = snapshotStatusesResponse.get(0); // <1> + SnapshotsInProgress.State snapshotState = snapshotStatus.getState(); // <2> + SnapshotStats shardStats = snapshotStatus.getIndices().get(indexName).getShards().get(0).getStats(); // <3> + // end::snapshots-status-response + assertThat(snapshotStatusesResponse.size(), equalTo(1)); + assertThat(snapshotStatusesResponse.get(0).getSnapshot().getRepository(), equalTo(SnapshotClientDocumentationIT.repositoryName)); + assertThat(snapshotStatusesResponse.get(0).getSnapshot().getSnapshotId().getName(), equalTo(snapshotName)); + assertThat(snapshotState.completed(), equalTo(true)); + } + + public void testSnapshotSnapshotsStatusAsync() throws InterruptedException { + RestHighLevelClient client = highLevelClient(); + { + SnapshotsStatusRequest request = new SnapshotsStatusRequest(); + + // tag::snapshots-status-execute-listener + ActionListener listener = + new ActionListener() { + @Override + public void onResponse(SnapshotsStatusResponse snapshotsStatusResponse) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::snapshots-status-execute-listener + + // Replace the empty listener with a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::snapshots-status-execute-async + client.snapshot().statusAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::snapshots-status-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testSnapshotDeleteSnapshot() throws IOException { RestHighLevelClient client = highLevelClient(); createTestRepositories(); + createTestIndex(); createTestSnapshots(); // tag::delete-snapshot-request @@ -608,9 +688,14 @@ private void createTestRepositories() throws IOException { assertTrue(highLevelClient().snapshot().createRepository(request, RequestOptions.DEFAULT).isAcknowledged()); } + private void createTestIndex() throws IOException { + createIndex(indexName, Settings.EMPTY); + } + private void createTestSnapshots() throws IOException { Request createSnapshot = new Request("put", String.format(Locale.ROOT, "_snapshot/%s/%s", repositoryName, snapshotName)); createSnapshot.addParameter("wait_for_completion", "true"); + createSnapshot.setJsonEntity("{\"indices\":\"" + indexName + "\"}"); Response response = highLevelClient().getLowLevelClient().performRequest(createSnapshot); // check that the request went ok without parsing JSON here. When using the high level client, check acknowledgement instead. assertEquals(200, response.getStatusLine().getStatusCode()); diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java index 199b7542e62a2..30359ea90f666 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java @@ -72,11 +72,14 @@ public static void stopHttpServers() throws IOException { } public void testBuilderUsesDefaultSSLContext() throws Exception { + assumeFalse("Due to bug inside jdk, this test can't momentarily run with java 11. " + + "See: https://github.com/elastic/elasticsearch/issues/31940", + System.getProperty("java.version").contains("11")); final SSLContext defaultSSLContext = SSLContext.getDefault(); try { try (RestClient client = buildRestClient()) { try { - client.performRequest("GET", "/"); + client.performRequest(new Request("GET", "/")); fail("connection should have been rejected due to SSL handshake"); } catch (Exception e) { assertThat(e.getMessage(), containsString("General SSLEngine problem")); @@ -85,7 +88,7 @@ public void testBuilderUsesDefaultSSLContext() throws Exception { SSLContext.setDefault(getSslContext()); try (RestClient client = buildRestClient()) { - Response response = client.performRequest("GET", "/"); + Response response = client.performRequest(new Request("GET", "/")); assertEquals(200, response.getStatusLine().getStatusCode()); } } finally { diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java index 114d34c73da89..6b5bb3c98ee5b 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientSingleHostIntegTests.java @@ -256,35 +256,51 @@ public void testGetWithBody() throws IOException { public void testEncodeParams() throws IOException { { - Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "this/is/the/routing")); + Request request = new Request("PUT", "/200"); + request.addParameter("routing", "this/is/the/routing"); + Response response = restClient.performRequest(request); assertEquals(pathPrefix + "/200?routing=this%2Fis%2Fthe%2Frouting", response.getRequestLine().getUri()); } { - Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "this|is|the|routing")); + Request request = new Request("PUT", "/200"); + request.addParameter("routing", "this|is|the|routing"); + Response response = restClient.performRequest(request); assertEquals(pathPrefix + "/200?routing=this%7Cis%7Cthe%7Crouting", response.getRequestLine().getUri()); } { - Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "routing#1")); + Request request = new Request("PUT", "/200"); + request.addParameter("routing", "routing#1"); + Response response = restClient.performRequest(request); assertEquals(pathPrefix + "/200?routing=routing%231", response.getRequestLine().getUri()); } { - Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "中文")); + Request request = new Request("PUT", "/200"); + request.addParameter("routing", "中文"); + Response response = restClient.performRequest(request); assertEquals(pathPrefix + "/200?routing=%E4%B8%AD%E6%96%87", response.getRequestLine().getUri()); } { - Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo bar")); + Request request = new Request("PUT", "/200"); + request.addParameter("routing", "foo bar"); + Response response = restClient.performRequest(request); assertEquals(pathPrefix + "/200?routing=foo+bar", response.getRequestLine().getUri()); } { - Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo+bar")); + Request request = new Request("PUT", "/200"); + request.addParameter("routing", "foo+bar"); + Response response = restClient.performRequest(request); assertEquals(pathPrefix + "/200?routing=foo%2Bbar", response.getRequestLine().getUri()); } { - Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo/bar")); + Request request = new Request("PUT", "/200"); + request.addParameter("routing", "foo/bar"); + Response response = restClient.performRequest(request); assertEquals(pathPrefix + "/200?routing=foo%2Fbar", response.getRequestLine().getUri()); } { - Response response = restClient.performRequest("PUT", "/200", Collections.singletonMap("routing", "foo^bar")); + Request request = new Request("PUT", "/200"); + request.addParameter("routing", "foo^bar"); + Response response = restClient.performRequest(request); assertEquals(pathPrefix + "/200?routing=foo%5Ebar", response.getRequestLine().getUri()); } } @@ -341,14 +357,14 @@ public void testAuthCredentialsAreNotClearedOnAuthChallenge() throws IOException public void testUrlWithoutLeadingSlash() throws Exception { if (pathPrefix.length() == 0) { try { - restClient.performRequest("GET", "200"); + restClient.performRequest(new Request("GET", "200")); fail("request should have failed"); } catch (ResponseException e) { assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); } } else { { - Response response = restClient.performRequest("GET", "200"); + Response response = restClient.performRequest(new Request("GET", "200")); //a trailing slash gets automatically added if a pathPrefix is configured assertEquals(200, response.getStatusLine().getStatusCode()); } @@ -357,7 +373,7 @@ public void testUrlWithoutLeadingSlash() throws Exception { try (RestClient restClient = RestClient.builder( new HttpHost(httpServer.getAddress().getHostString(), httpServer.getAddress().getPort())) .setPathPrefix(pathPrefix.substring(1)).build()) { - Response response = restClient.performRequest("GET", "200"); + Response response = restClient.performRequest(new Request("GET", "200")); //a trailing slash gets automatically added if a pathPrefix is configured assertEquals(200, response.getStatusLine().getStatusCode()); } diff --git a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java index d347353a1fb55..ce2e0907560cd 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java +++ b/client/rest/src/test/java/org/elasticsearch/client/documentation/RestClientDocumentation.java @@ -267,7 +267,7 @@ public void onFailure(Exception exception) { } { //tag::rest-client-response2 - Response response = restClient.performRequest("GET", "/"); + Response response = restClient.performRequest(new Request("GET", "/")); RequestLine requestLine = response.getRequestLine(); // <1> HttpHost host = response.getHost(); // <2> int statusCode = response.getStatusLine().getStatusCode(); // <3> diff --git a/dev-tools/es_release_notes.pl b/dev-tools/es_release_notes.pl index 93a4ba74f643b..4ea7e124598ec 100755 --- a/dev-tools/es_release_notes.pl +++ b/dev-tools/es_release_notes.pl @@ -46,6 +46,12 @@ 'other' => 'NOT CLASSIFIED', ); +my %Area_Overrides = ( + ':ml' => 'Machine Learning', + ':beats' => 'Beats Plugin', + ':Docs' => 'Docs Infrastructure' +); + use JSON(); use Encode qw(encode_utf8); @@ -175,8 +181,14 @@ sub fetch_issues { # uncomment for including/excluding PRs already issued in other versions # next if grep {$_->{name}=~/^v2/} @{$issue->{labels}}; my %labels = map { $_->{name} => 1 } @{ $issue->{labels} }; - my ($header) = map { m{:[^/]+/(.+)} && $1 } - grep {/^:/} sort keys %labels; + my @area_labels = grep {/^:/} sort keys %labels; + my ($header) = map { m{:[^/]+/(.+)} && $1 } @area_labels; + if (scalar @area_labels > 1) { + $header = "MULTIPLE AREA LABELS"; + } + if (scalar @area_labels == 1 && exists $Area_Overrides{$area_labels[0]}) { + $header = $Area_Overrides{$area_labels[0]}; + } $header ||= 'NOT CLASSIFIED'; for (@Groups) { if ( $labels{$_} ) { diff --git a/docs/java-rest/high-level/getting-started.asciidoc b/docs/java-rest/high-level/getting-started.asciidoc index 3e9b9fa7ea08f..af03a6a86bbd5 100644 --- a/docs/java-rest/high-level/getting-started.asciidoc +++ b/docs/java-rest/high-level/getting-started.asciidoc @@ -130,12 +130,11 @@ include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[rest-high-level-cl -------------------------------------------------- The high-level client will internally create the low-level client used to -perform requests based on the provided builder, and manage its lifecycle. - -The high-level client instance needs to be closed when no longer needed so that -all the resources used by it get properly released, as well as the underlying -http client instance and its threads. This can be done through the `close` -method, which will close the internal `RestClient` instance. +perform requests based on the provided builder. That low-level client +maintains a pool of connections and starts some threads so you should +close the high-level client when you are well and truly done with +it and it will in turn close the internal low-level client to free those +resources. This can be done through the `close`: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc b/docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc new file mode 100644 index 0000000000000..f877ed720db69 --- /dev/null +++ b/docs/java-rest/high-level/miscellaneous/x-pack-info.asciidoc @@ -0,0 +1,64 @@ +[[java-rest-high-x-pack-info]] +=== X-Pack Info API + +[[java-rest-high-x-pack-info-execution]] +==== Execution + +General information about the installed {xpack} features can be retrieved +using the `xPackInfo()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-info-execute] +-------------------------------------------------- +<1> Enable verbose mode. The default is `false` but `true` will return +more information. +<2> Set the categories of information to retrieve. The the default is to +return no information which is useful for checking if {xpack} is installed +but not much else. + +[[java-rest-high-x-pack-info-response]] +==== Response + +The returned `XPackInfoResponse` can contain `BuildInfo`, `LicenseInfo`, +and `FeatureSetsInfo` depending on the categories requested. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-info-response] +-------------------------------------------------- +<1> `BuildInfo` contains the commit hash from which Elasticsearch was +built and the timestamp that the x-pack module was created. +<2> `LicenseInfo` contains the type of license that the cluster is using +and its expiration date. +<3> Basic licenses do not expire and will return this constant. +<4> `FeatureSetsInfo` contains a `Map` from the name of a feature to +information about a feature like whether or not it is available under +the current license. + +[[java-rest-high-x-pack-info-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-info-execute-async] +-------------------------------------------------- +<1> The `XPackInfoRequest` to execute and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `XPackInfoResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-info-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/docs/java-rest/high-level/snapshot/snapshots_status.asciidoc b/docs/java-rest/high-level/snapshot/snapshots_status.asciidoc new file mode 100644 index 0000000000000..8f91d774f4e19 --- /dev/null +++ b/docs/java-rest/high-level/snapshot/snapshots_status.asciidoc @@ -0,0 +1,97 @@ +[[java-rest-high-snapshot-snapshots-status]] +=== Snapshots Status API + +The Snapshots Status API allows to retrieve detailed information about snapshots in progress. + +[[java-rest-high-snapshot-snapshots-status-request]] +==== Snapshots Status Request + +A `SnapshotsStatusRequest`: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request] +-------------------------------------------------- + +==== Required Arguments +The following arguments must be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-repository] +-------------------------------------------------- +<1> Sets the repository to check for snapshot statuses + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-snapshots] +-------------------------------------------------- +<1> The list of snapshot names to check the status of + +==== Optional Arguments +The following arguments can optionally be provided: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-ignoreUnavailable] +-------------------------------------------------- +<1> The command will fail if some of the snapshots are unavailable. The `ignore_unavailable` flag +set to true will return all snapshots that are currently available. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-request-masterTimeout] +-------------------------------------------------- +<1> Timeout to connect to the master node as a `TimeValue` +<2> Timeout to connect to the master node as a `String` + +[[java-rest-high-snapshot-snapshots-status-sync]] +==== Synchronous Execution + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute] +-------------------------------------------------- + +[[java-rest-high-snapshot-snapshots-status-async]] +==== Asynchronous Execution + +The asynchronous execution of retrieving snapshot statuses requires both the +`SnapshotsStatusRequest` instance and an `ActionListener` instance to be +passed to the asynchronous method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute-async] +-------------------------------------------------- +<1> The `SnapshotsStatusRequest` to execute and the `ActionListener` +to use when the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `SnapshotsStatusResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of a failure. The raised exception is provided as an argument + +[[java-rest-high-snapshot-snapshots-status-response]] +==== Snapshots Status Response + +The returned `SnapshotsStatusResponse` allows to retrieve information about the +executed operation as follows: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[snapshots-status-response] +-------------------------------------------------- +<1> Request contains a list of snapshot statuses +<2> Each status contains information about the snapshot +<3> Example of reading snapshot statistics about a specific index and shard diff --git a/docs/java-rest/high-level/supported-apis.asciidoc b/docs/java-rest/high-level/supported-apis.asciidoc index 71420eb087f85..cf38040e865db 100644 --- a/docs/java-rest/high-level/supported-apis.asciidoc +++ b/docs/java-rest/high-level/supported-apis.asciidoc @@ -53,9 +53,11 @@ The Java High Level REST Client supports the following Miscellaneous APIs: * <> * <> +* <> include::miscellaneous/main.asciidoc[] include::miscellaneous/ping.asciidoc[] +include::miscellaneous/x-pack-info.asciidoc[] == Indices APIs @@ -152,6 +154,7 @@ The Java High Level REST Client supports the following Snapshot APIs: * <> * <> * <> +* <> * <> include::snapshot/get_repository.asciidoc[] @@ -160,6 +163,7 @@ include::snapshot/delete_repository.asciidoc[] include::snapshot/verify_repository.asciidoc[] include::snapshot/create_snapshot.asciidoc[] include::snapshot/get_snapshots.asciidoc[] +include::snapshot/snapshots_status.asciidoc[] include::snapshot/delete_snapshot.asciidoc[] == Tasks APIs @@ -181,4 +185,3 @@ The Java High Level REST Client supports the following Scripts APIs: include::script/get_script.asciidoc[] include::script/delete_script.asciidoc[] - diff --git a/docs/painless/painless-getting-started.asciidoc b/docs/painless/painless-getting-started.asciidoc index 2cf91666ba48d..887769e49abbe 100644 --- a/docs/painless/painless-getting-started.asciidoc +++ b/docs/painless/painless-getting-started.asciidoc @@ -119,6 +119,30 @@ GET hockey/_search ---------------------------------------------------------------- // CONSOLE + +[float] +===== Missing values + +If you request the value from a field `field` that isn’t in +the document, `doc['field'].value` for this document returns: + +- `0` if a `field` has a numeric datatype (long, double etc.) +- `false` is a `field` has a boolean datatype +- epoch date if a `field` has a date datatype +- `null` if a `field` has a string datatype +- `null` if a `field` has a geo datatype +- `""` if a `field` has a binary datatype + +IMPORTANT: Starting in 7.0, `doc['field'].value` throws an exception if +the field is missing in a document. To enable this behavior now, +set a {ref}/jvm-options.html[`jvm.option`] +`-Des.scripting.exception_for_missing_value=true` on a node. If you do not enable +this behavior, a deprecation warning is logged on start up. + +To check if a document is missing a value, you can call +`doc['field'].size() == 0`. + + [float] ==== Updating Fields with Painless diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc index c2d1614ad6e56..efbd8ef7389bb 100644 --- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc +++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc @@ -33,7 +33,7 @@ Available expressions for interval: `year` (`1y`), `quarter` (`1q`), `month` (`1 Time values can also be specified via abbreviations supported by <> parsing. Note that fractional time values are not supported, but you can address this by shifting to another time unit (e.g., `1.5h` could instead be specified as `90m`). Also note that time intervals larger than -than days do not support arbitrary values but can only be one unit large (e.g. `1y` is valid, `2y` is not). +days do not support arbitrary values but can only be one unit large (e.g. `1y` is valid, `2y` is not). [source,js] -------------------------------------------------- diff --git a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc index a8f2108b57a0c..8be5647e10f27 100644 --- a/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-graph-tokenfilter.asciidoc @@ -50,7 +50,49 @@ PUT /test_index The above configures a `search_synonyms` filter, with a path of `analysis/synonym.txt` (relative to the `config` location). The `search_synonyms` analyzer is then configured with the filter. -Additional settings are: `expand` (defaults to `true`). + +Additional settings are: + +* `expand` (defaults to `true`). +* `lenient` (defaults to `false`). If `true` ignores exceptions while parsing the synonym configuration. It is important +to note that only those synonym rules which cannot get parsed are ignored. For instance consider the following request: + +[source,js] +-------------------------------------------------- +PUT /test_index +{ + "settings": { + "index" : { + "analysis" : { + "analyzer" : { + "synonym" : { + "tokenizer" : "standard", + "filter" : ["my_stop", "synonym_graph"] + } + }, + "filter" : { + "my_stop": { + "type" : "stop", + "stopwords": ["bar"] + }, + "synonym_graph" : { + "type" : "synonym_graph", + "lenient": true, + "synonyms" : ["foo, bar => baz"] + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +With the above request the word `bar` gets skipped but a mapping `foo => baz` is still added. However, if the mapping +being added was "foo, baz => bar" nothing would get added to the synonym list. This is because the target word for the +mapping is itself eliminated because it was a stop word. Similarly, if the mapping was "bar, foo, baz" and `expand` was +set to `false` no mapping would get added as when `expand=false` the target mapping is the first word. However, if +`expand=true` then the mappings added would be equivalent to `foo, baz => foo, baz` i.e, all mappings other than the +stop word. [float] ==== `tokenizer` and `ignore_case` are deprecated diff --git a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc index 68d3f444b2d82..5a6a84493ab60 100644 --- a/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/synonym-tokenfilter.asciidoc @@ -33,12 +33,55 @@ PUT /test_index The above configures a `synonym` filter, with a path of `analysis/synonym.txt` (relative to the `config` location). The -`synonym` analyzer is then configured with the filter. Additional -settings is: `expand` (defaults to `true`). +`synonym` analyzer is then configured with the filter. This filter tokenize synonyms with whatever tokenizer and token filters appear before it in the chain. +Additional settings are: + +* `expand` (defaults to `true`). +* `lenient` (defaults to `false`). If `true` ignores exceptions while parsing the synonym configuration. It is important +to note that only those synonym rules which cannot get parsed are ignored. For instance consider the following request: + +[source,js] +-------------------------------------------------- +PUT /test_index +{ + "settings": { + "index" : { + "analysis" : { + "analyzer" : { + "synonym" : { + "tokenizer" : "standard", + "filter" : ["my_stop", "synonym"] + } + }, + "filter" : { + "my_stop": { + "type" : "stop", + "stopwords": ["bar"] + }, + "synonym" : { + "type" : "synonym", + "lenient": true, + "synonyms" : ["foo, bar => baz"] + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE +With the above request the word `bar` gets skipped but a mapping `foo => baz` is still added. However, if the mapping +being added was "foo, baz => bar" nothing would get added to the synonym list. This is because the target word for the +mapping is itself eliminated because it was a stop word. Similarly, if the mapping was "bar, foo, baz" and `expand` was +set to `false` no mapping would get added as when `expand=false` the target mapping is the first word. However, if +`expand=true` then the mappings added would be equivalent to `foo, baz => foo, baz` i.e, all mappings other than the +stop word. + + [float] ==== `tokenizer` and `ignore_case` are deprecated diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index 39006d1ab53f8..a29a743fed82f 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -104,6 +104,11 @@ With that out of the way, let's get started with the fun part... == Installation +You can skip installation completely by using our hosted +Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is +available on AWS and GCP. You can +https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free. + Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed): [source,sh] diff --git a/docs/reference/ingest/ingest-node.asciidoc b/docs/reference/ingest/ingest-node.asciidoc index c8c459312a5da..3991b738e1323 100644 --- a/docs/reference/ingest/ingest-node.asciidoc +++ b/docs/reference/ingest/ingest-node.asciidoc @@ -1767,8 +1767,9 @@ Removes existing fields. If one field doesn't exist, an exception will be thrown .Remove Options [options="header"] |====== -| Name | Required | Default | Description -| `field` | yes | - | Fields to be removed +| Name | Required | Default | Description +| `field` | yes | - | Fields to be removed +| `ignore_missing` | no | `false` | If `true` and `field` does not exist or is `null`, the processor quietly exits without modifying the document |====== Here is an example to remove a single field: diff --git a/docs/reference/migration/migrate_7_0/search.asciidoc b/docs/reference/migration/migrate_7_0/search.asciidoc index 123ad201cbbaf..11f4650912723 100644 --- a/docs/reference/migration/migrate_7_0/search.asciidoc +++ b/docs/reference/migration/migrate_7_0/search.asciidoc @@ -85,6 +85,13 @@ for a particular index with the index setting `index.max_regex_length`. Search requests with extra content after the main object will no longer be accepted by the `_search` endpoint. A parsing exception will be thrown instead. +==== Context Completion Suggester + +The ability to query and index context enabled suggestions without context, +deprecated in 6.x, has been removed. Context enabled suggestion queries +without contexts have to visit every suggestion, which degrades the search performance +considerably. + ==== Semantics changed for `max_concurrent_shard_requests` `max_concurrent_shard_requests` used to limit the total number of concurrent shard diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index 783cb804e7af1..7675e5ad14659 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -1,6 +1,11 @@ [[install-elasticsearch]] == Installing Elasticsearch +Elasticsearch can be run on your own hardware or using our hosted +Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is +available on AWS and GCP. You can +https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free. + Elasticsearch is provided in the following package formats: [horizontal] @@ -38,7 +43,7 @@ Elasticsearch on Windows. MSIs may be downloaded from the Elasticsearch website. `docker`:: Images are available for running Elasticsearch as Docker containers. They may be -downloaded from the Elastic Docker Registry. +downloaded from the Elastic Docker Registry. + {ref}/docker.html[Install {es} with Docker] diff --git a/docs/reference/setup/install/docker.asciidoc b/docs/reference/setup/install/docker.asciidoc index b18f7c57a1668..523217b921a43 100644 --- a/docs/reference/setup/install/docker.asciidoc +++ b/docs/reference/setup/install/docker.asciidoc @@ -8,8 +8,6 @@ A list of all published Docker images and tags can be found in https://www.docker.elastic.co[www.docker.elastic.co]. The source code can be found on https://github.com/elastic/elasticsearch-docker/tree/{branch}[GitHub]. -==== Image types - These images are free to use under the Elastic license. They contain open source and free commercial features and access to paid commercial features. {xpack-ref}/license-management.html[Start a 30-day trial] to try out all of the @@ -17,9 +15,6 @@ paid commercial features. See the https://www.elastic.co/subscriptions[Subscriptions] page for information about Elastic license levels. -Alternatively, you can download `-oss` images, which contain only features that -are available under the Apache 2.0 license. - ==== Pulling the image Obtaining {es} for Docker is as simple as issuing a +docker pull+ command @@ -34,14 +29,17 @@ endif::[] ifeval::["{release-state}"!="unreleased"] -Docker images can be retrieved with the following commands: +For example, the Docker image can be retrieved with the following command: ["source","sh",subs="attributes"] -------------------------------------------- docker pull {docker-repo}:{version} -docker pull {docker-repo}-oss:{version} -------------------------------------------- +Alternatively, you can download other Docker images that contain only features +that are available under the Apache 2.0 license from +https://www.docker.elastic.co[www.docker.elastic.co]. + endif::[] [[docker-cli-run]] diff --git a/docs/reference/setup/install/windows.asciidoc b/docs/reference/setup/install/windows.asciidoc index 1535e5415e4a4..f5e248598ca9f 100644 --- a/docs/reference/setup/install/windows.asciidoc +++ b/docs/reference/setup/install/windows.asciidoc @@ -34,10 +34,6 @@ ifeval::["{release-state}"!="unreleased"] Download the `.msi` package for Elasticsearch v{version} from https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-{version}.msi -Alternatively, you can download the following package, which contains only -features that are available under the Apache 2.0 license: -https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-{version}.msi - endif::[] [[install-msi-gui]] diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/BytesChannelContext.java b/libs/nio/src/main/java/org/elasticsearch/nio/BytesChannelContext.java index a82d381951b76..9984f1a18e5f9 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/BytesChannelContext.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/BytesChannelContext.java @@ -21,12 +21,19 @@ import java.io.IOException; import java.util.function.Consumer; +import java.util.function.Predicate; public class BytesChannelContext extends SocketChannelContext { public BytesChannelContext(NioSocketChannel channel, NioSelector selector, Consumer exceptionHandler, ReadWriteHandler handler, InboundChannelBuffer channelBuffer) { - super(channel, selector, exceptionHandler, handler, channelBuffer); + this(channel, selector, exceptionHandler, handler, channelBuffer, ALWAYS_ALLOW_CHANNEL); + } + + public BytesChannelContext(NioSocketChannel channel, NioSelector selector, Consumer exceptionHandler, + ReadWriteHandler handler, InboundChannelBuffer channelBuffer, + Predicate allowChannelPredicate) { + super(channel, selector, exceptionHandler, handler, channelBuffer, allowChannelPredicate); } @Override @@ -77,7 +84,7 @@ public void closeChannel() { @Override public boolean selectorShouldClose() { - return isPeerClosed() || hasIOException() || isClosing.get(); + return closeNow() || isClosing.get(); } /** diff --git a/libs/nio/src/main/java/org/elasticsearch/nio/ChannelContext.java b/libs/nio/src/main/java/org/elasticsearch/nio/ChannelContext.java index a7cb21d95f537..b26636cb1581e 100644 --- a/libs/nio/src/main/java/org/elasticsearch/nio/ChannelContext.java +++ b/libs/nio/src/main/java/org/elasticsearch/nio/ChannelContext.java @@ -47,6 +47,11 @@ public abstract class ChannelContext { + public static final Predicate ALWAYS_ALLOW_CHANNEL = (c) -> true; + protected final NioSocketChannel channel; protected final InboundChannelBuffer channelBuffer; protected final AtomicBoolean isClosing = new AtomicBoolean(false); private final ReadWriteHandler readWriteHandler; + private final Predicate allowChannelPredicate; private final NioSelector selector; private final CompletableContext connectContext = new CompletableContext<>(); private final LinkedList pendingFlushes = new LinkedList<>(); - private boolean ioException; - private boolean peerClosed; + private boolean closeNow; private Exception connectException; protected SocketChannelContext(NioSocketChannel channel, NioSelector selector, Consumer exceptionHandler, - ReadWriteHandler readWriteHandler, InboundChannelBuffer channelBuffer) { + ReadWriteHandler readWriteHandler, InboundChannelBuffer channelBuffer, + Predicate allowChannelPredicate) { super(channel.getRawChannel(), exceptionHandler); this.selector = selector; this.channel = channel; this.readWriteHandler = readWriteHandler; this.channelBuffer = channelBuffer; + this.allowChannelPredicate = allowChannelPredicate; } @Override @@ -161,6 +166,14 @@ protected FlushOperation getPendingFlush() { return pendingFlushes.peekFirst(); } + @Override + protected void register() throws IOException { + super.register(); + if (allowChannelPredicate.test(channel) == false) { + closeNow = true; + } + } + @Override public void closeFromSelector() throws IOException { getSelector().assertOnSelectorThread(); @@ -217,24 +230,20 @@ public boolean readyForFlush() { */ public abstract boolean selectorShouldClose(); - protected boolean hasIOException() { - return ioException; - } - - protected boolean isPeerClosed() { - return peerClosed; + protected boolean closeNow() { + return closeNow; } protected int readFromChannel(ByteBuffer buffer) throws IOException { try { int bytesRead = rawChannel.read(buffer); if (bytesRead < 0) { - peerClosed = true; + closeNow = true; bytesRead = 0; } return bytesRead; } catch (IOException e) { - ioException = true; + closeNow = true; throw e; } } @@ -243,12 +252,12 @@ protected int readFromChannel(ByteBuffer[] buffers) throws IOException { try { int bytesRead = (int) rawChannel.read(buffers); if (bytesRead < 0) { - peerClosed = true; + closeNow = true; bytesRead = 0; } return bytesRead; } catch (IOException e) { - ioException = true; + closeNow = true; throw e; } } @@ -257,7 +266,7 @@ protected int flushToChannel(ByteBuffer buffer) throws IOException { try { return rawChannel.write(buffer); } catch (IOException e) { - ioException = true; + closeNow = true; throw e; } } @@ -266,7 +275,7 @@ protected int flushToChannel(ByteBuffer[] buffers) throws IOException { try { return (int) rawChannel.write(buffers); } catch (IOException e) { - ioException = true; + closeNow = true; throw e; } } diff --git a/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java b/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java index dee50724f34c9..bc9a7c33f0f77 100644 --- a/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java +++ b/libs/nio/src/test/java/org/elasticsearch/nio/SocketChannelContextTests.java @@ -33,6 +33,7 @@ import java.util.concurrent.atomic.AtomicReference; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.function.Supplier; import static org.mockito.Matchers.any; @@ -77,7 +78,7 @@ public void testIOExceptionSetIfEncountered() throws IOException { when(rawChannel.write(any(ByteBuffer.class))).thenThrow(new IOException()); when(rawChannel.read(any(ByteBuffer[].class), anyInt(), anyInt())).thenThrow(new IOException()); when(rawChannel.read(any(ByteBuffer.class))).thenThrow(new IOException()); - assertFalse(context.hasIOException()); + assertFalse(context.closeNow()); expectThrows(IOException.class, () -> { if (randomBoolean()) { context.read(); @@ -85,15 +86,31 @@ public void testIOExceptionSetIfEncountered() throws IOException { context.flushChannel(); } }); - assertTrue(context.hasIOException()); + assertTrue(context.closeNow()); } public void testSignalWhenPeerClosed() throws IOException { when(rawChannel.read(any(ByteBuffer[].class), anyInt(), anyInt())).thenReturn(-1L); when(rawChannel.read(any(ByteBuffer.class))).thenReturn(-1); - assertFalse(context.isPeerClosed()); + assertFalse(context.closeNow()); context.read(); - assertTrue(context.isPeerClosed()); + assertTrue(context.closeNow()); + } + + public void testValidateInRegisterCanSucceed() throws IOException { + InboundChannelBuffer channelBuffer = InboundChannelBuffer.allocatingInstance(); + context = new TestSocketChannelContext(channel, selector, exceptionHandler, readWriteHandler, channelBuffer, (c) -> true); + assertFalse(context.closeNow()); + context.register(); + assertFalse(context.closeNow()); + } + + public void testValidateInRegisterCanFail() throws IOException { + InboundChannelBuffer channelBuffer = InboundChannelBuffer.allocatingInstance(); + context = new TestSocketChannelContext(channel, selector, exceptionHandler, readWriteHandler, channelBuffer, (c) -> false); + assertFalse(context.closeNow()); + context.register(); + assertTrue(context.closeNow()); } public void testConnectSucceeds() throws IOException { @@ -277,7 +294,13 @@ private static class TestSocketChannelContext extends SocketChannelContext { private TestSocketChannelContext(NioSocketChannel channel, NioSelector selector, Consumer exceptionHandler, ReadWriteHandler readWriteHandler, InboundChannelBuffer channelBuffer) { - super(channel, selector, exceptionHandler, readWriteHandler, channelBuffer); + this(channel, selector, exceptionHandler, readWriteHandler, channelBuffer, ALWAYS_ALLOW_CHANNEL); + } + + private TestSocketChannelContext(NioSocketChannel channel, NioSelector selector, Consumer exceptionHandler, + ReadWriteHandler readWriteHandler, InboundChannelBuffer channelBuffer, + Predicate allowChannelPredicate) { + super(channel, selector, exceptionHandler, readWriteHandler, channelBuffer, allowChannelPredicate); } @Override @@ -309,6 +332,11 @@ public boolean selectorShouldClose() { public void closeChannel() { isClosing.set(true); } + + @Override + void doSelectorRegister() { + // We do not want to call the actual register with selector method as it will throw a NPE + } } private static byte[] createMessage(int length) { diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java index d61bd8a5dbbdb..e880781fad781 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ConstructingObjectParser.java @@ -294,6 +294,7 @@ public void declareNamedObjects(BiConsumer> consumer, NamedOb } } + @Override public String getName() { return objectParser.getName(); } diff --git a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java index dfcc4271b922e..d0cc929b56d24 100644 --- a/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java +++ b/libs/x-content/src/main/java/org/elasticsearch/common/xcontent/ObjectParser.java @@ -411,6 +411,7 @@ public enum ValueType { INT_ARRAY(START_ARRAY, VALUE_NUMBER, VALUE_STRING), BOOLEAN_ARRAY(START_ARRAY, VALUE_BOOLEAN), OBJECT(START_OBJECT), + OBJECT_OR_NULL(START_OBJECT, VALUE_NULL), OBJECT_ARRAY(START_OBJECT, START_ARRAY), OBJECT_OR_BOOLEAN(START_OBJECT, VALUE_BOOLEAN), OBJECT_OR_STRING(START_OBJECT, VALUE_STRING), diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java index b44eaa3bfa3ca..0d6253c88f9fa 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java @@ -32,6 +32,8 @@ import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.TemplateScript; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.format.DateTimeFormat; @@ -42,21 +44,22 @@ public final class DateIndexNameProcessor extends AbstractProcessor { public static final String TYPE = "date_index_name"; private final String field; - private final String indexNamePrefix; - private final String dateRounding; - private final String indexNameFormat; + private final TemplateScript.Factory indexNamePrefixTemplate; + private final TemplateScript.Factory dateRoundingTemplate; + private final TemplateScript.Factory indexNameFormatTemplate; private final DateTimeZone timezone; private final List> dateFormats; DateIndexNameProcessor(String tag, String field, List> dateFormats, DateTimeZone timezone, - String indexNamePrefix, String dateRounding, String indexNameFormat) { + TemplateScript.Factory indexNamePrefixTemplate, TemplateScript.Factory dateRoundingTemplate, + TemplateScript.Factory indexNameFormatTemplate) { super(tag); this.field = field; this.timezone = timezone; this.dateFormats = dateFormats; - this.indexNamePrefix = indexNamePrefix; - this.dateRounding = dateRounding; - this.indexNameFormat = indexNameFormat; + this.indexNamePrefixTemplate = indexNamePrefixTemplate; + this.dateRoundingTemplate = dateRoundingTemplate; + this.indexNameFormatTemplate = indexNameFormatTemplate; } @Override @@ -83,6 +86,9 @@ public void execute(IngestDocument ingestDocument) throws Exception { if (dateTime == null) { throw new IllegalArgumentException("unable to parse date [" + date + "]", lastException); } + String indexNamePrefix = ingestDocument.renderTemplate(indexNamePrefixTemplate); + String indexNameFormat = ingestDocument.renderTemplate(indexNameFormatTemplate); + String dateRounding = ingestDocument.renderTemplate(dateRoundingTemplate); DateTimeFormatter formatter = DateTimeFormat.forPattern(indexNameFormat); StringBuilder builder = new StringBuilder() @@ -106,16 +112,16 @@ String getField() { return field; } - String getIndexNamePrefix() { - return indexNamePrefix; + TemplateScript.Factory getIndexNamePrefixTemplate() { + return indexNamePrefixTemplate; } - String getDateRounding() { - return dateRounding; + TemplateScript.Factory getDateRoundingTemplate() { + return dateRoundingTemplate; } - String getIndexNameFormat() { - return indexNameFormat; + TemplateScript.Factory getIndexNameFormatTemplate() { + return indexNameFormatTemplate; } DateTimeZone getTimezone() { @@ -128,6 +134,12 @@ List> getDateFormats() { public static final class Factory implements Processor.Factory { + private final ScriptService scriptService; + + public Factory(ScriptService scriptService) { + this.scriptService = scriptService; + } + @Override public DateIndexNameProcessor create(Map registry, String tag, Map config) throws Exception { @@ -154,9 +166,16 @@ public DateIndexNameProcessor create(Map registry, St String field = ConfigurationUtils.readStringProperty(TYPE, tag, config, "field"); String indexNamePrefix = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_prefix", ""); + TemplateScript.Factory indexNamePrefixTemplate = + ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_prefix", indexNamePrefix, scriptService); String dateRounding = ConfigurationUtils.readStringProperty(TYPE, tag, config, "date_rounding"); + TemplateScript.Factory dateRoundingTemplate = + ConfigurationUtils.compileTemplate(TYPE, tag, "date_rounding", dateRounding, scriptService); String indexNameFormat = ConfigurationUtils.readStringProperty(TYPE, tag, config, "index_name_format", "yyyy-MM-dd"); - return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefix, dateRounding, indexNameFormat); + TemplateScript.Factory indexNameFormatTemplate = + ConfigurationUtils.compileTemplate(TYPE, tag, "index_name_format", indexNameFormat, scriptService); + return new DateIndexNameProcessor(tag, field, dateFormats, timezone, indexNamePrefixTemplate, + dateRoundingTemplate, indexNameFormatTemplate); } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java index bc900d325104a..bc475a2a00539 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/IngestCommonPlugin.java @@ -62,7 +62,7 @@ public Map getProcessors(Processor.Parameters paramet processors.put(DateProcessor.TYPE, new DateProcessor.Factory(parameters.scriptService)); processors.put(SetProcessor.TYPE, new SetProcessor.Factory(parameters.scriptService)); processors.put(AppendProcessor.TYPE, new AppendProcessor.Factory(parameters.scriptService)); - processors.put(RenameProcessor.TYPE, new RenameProcessor.Factory()); + processors.put(RenameProcessor.TYPE, new RenameProcessor.Factory(parameters.scriptService)); processors.put(RemoveProcessor.TYPE, new RemoveProcessor.Factory(parameters.scriptService)); processors.put(SplitProcessor.TYPE, new SplitProcessor.Factory()); processors.put(JoinProcessor.TYPE, new JoinProcessor.Factory()); @@ -73,7 +73,7 @@ public Map getProcessors(Processor.Parameters paramet processors.put(GsubProcessor.TYPE, new GsubProcessor.Factory()); processors.put(FailProcessor.TYPE, new FailProcessor.Factory(parameters.scriptService)); processors.put(ForEachProcessor.TYPE, new ForEachProcessor.Factory()); - processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory()); + processors.put(DateIndexNameProcessor.TYPE, new DateIndexNameProcessor.Factory(parameters.scriptService)); processors.put(SortProcessor.TYPE, new SortProcessor.Factory()); processors.put(GrokProcessor.TYPE, new GrokProcessor.Factory(GROK_PATTERNS, createGrokThreadWatchdog(parameters))); processors.put(ScriptProcessor.TYPE, new ScriptProcessor.Factory(parameters.scriptService)); @@ -97,12 +97,12 @@ public List getRestHandlers(Settings settings, RestController restC Supplier nodesInCluster) { return Arrays.asList(new GrokProcessorGetAction.RestAction(settings, restController)); } - + @Override public List> getSettings() { return Arrays.asList(WATCHDOG_INTERVAL, WATCHDOG_MAX_EXECUTION_TIME); } - + private static ThreadWatchdog createGrokThreadWatchdog(Processor.Parameters parameters) { long intervalMillis = WATCHDOG_INTERVAL.get(parameters.env.settings()).getMillis(); long maxExecutionTimeMillis = WATCHDOG_MAX_EXECUTION_TIME.get(parameters.env.settings()).getMillis(); diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java index 3425bb8abe236..2b9eaa9a13d18 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RemoveProcessor.java @@ -39,10 +39,12 @@ public final class RemoveProcessor extends AbstractProcessor { public static final String TYPE = "remove"; private final List fields; + private final boolean ignoreMissing; - RemoveProcessor(String tag, List fields) { + RemoveProcessor(String tag, List fields, boolean ignoreMissing) { super(tag); this.fields = new ArrayList<>(fields); + this.ignoreMissing = ignoreMissing; } public List getFields() { @@ -51,7 +53,16 @@ public List getFields() { @Override public void execute(IngestDocument document) { - fields.forEach(document::removeField); + if (ignoreMissing) { + fields.forEach(field -> { + String path = document.renderTemplate(field); + if (document.hasField(path)) { + document.removeField(path); + } + }); + } else { + fields.forEach(document::removeField); + } } @Override @@ -83,7 +94,8 @@ public RemoveProcessor create(Map registry, String pr final List compiledTemplates = fields.stream() .map(f -> ConfigurationUtils.compileTemplate(TYPE, processorTag, "field", f, scriptService)) .collect(Collectors.toList()); - return new RemoveProcessor(processorTag, compiledTemplates); + boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); + return new RemoveProcessor(processorTag, compiledTemplates, ignoreMissing); } } } diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java index 90a6389d82e46..a35a164ddd3f1 100644 --- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java +++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/RenameProcessor.java @@ -23,6 +23,8 @@ import org.elasticsearch.ingest.ConfigurationUtils; import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.TemplateScript; import java.util.Map; @@ -33,22 +35,22 @@ public final class RenameProcessor extends AbstractProcessor { public static final String TYPE = "rename"; - private final String field; - private final String targetField; + private final TemplateScript.Factory field; + private final TemplateScript.Factory targetField; private final boolean ignoreMissing; - RenameProcessor(String tag, String field, String targetField, boolean ignoreMissing) { + RenameProcessor(String tag, TemplateScript.Factory field, TemplateScript.Factory targetField, boolean ignoreMissing) { super(tag); this.field = field; this.targetField = targetField; this.ignoreMissing = ignoreMissing; } - String getField() { + TemplateScript.Factory getField() { return field; } - String getTargetField() { + TemplateScript.Factory getTargetField() { return targetField; } @@ -58,28 +60,30 @@ boolean isIgnoreMissing() { @Override public void execute(IngestDocument document) { - if (document.hasField(field, true) == false) { + String path = document.renderTemplate(field); + if (document.hasField(path, true) == false) { if (ignoreMissing) { return; } else { - throw new IllegalArgumentException("field [" + field + "] doesn't exist"); + throw new IllegalArgumentException("field [" + path + "] doesn't exist"); } } // We fail here if the target field point to an array slot that is out of range. // If we didn't do this then we would fail if we set the value in the target_field // and then on failure processors would not see that value we tried to rename as we already // removed it. - if (document.hasField(targetField, true)) { - throw new IllegalArgumentException("field [" + targetField + "] already exists"); + String target = document.renderTemplate(targetField); + if (document.hasField(target, true)) { + throw new IllegalArgumentException("field [" + target + "] already exists"); } - Object value = document.getFieldValue(field, Object.class); - document.removeField(field); + Object value = document.getFieldValue(path, Object.class); + document.removeField(path); try { - document.setFieldValue(targetField, value); + document.setFieldValue(target, value); } catch (Exception e) { // setting the value back to the original field shouldn't as we just fetched it from that field: - document.setFieldValue(field, value); + document.setFieldValue(path, value); throw e; } } @@ -90,13 +94,24 @@ public String getType() { } public static final class Factory implements Processor.Factory { + + private final ScriptService scriptService; + + public Factory(ScriptService scriptService) { + this.scriptService = scriptService; + } + @Override public RenameProcessor create(Map registry, String processorTag, Map config) throws Exception { String field = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "field"); + TemplateScript.Factory fieldTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, + "field", field, scriptService); String targetField = ConfigurationUtils.readStringProperty(TYPE, processorTag, config, "target_field"); + TemplateScript.Factory targetFieldTemplate = ConfigurationUtils.compileTemplate(TYPE, processorTag, + "target_field", targetField, scriptService); boolean ignoreMissing = ConfigurationUtils.readBooleanProperty(TYPE, processorTag, config, "ignore_missing", false); - return new RenameProcessor(processorTag, field, targetField, ignoreMissing); + return new RenameProcessor(processorTag, fieldTemplate, targetFieldTemplate , ignoreMissing); } } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java index 3b9e2121c9511..2735cf55776b0 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameFactoryTests.java @@ -20,18 +20,20 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; import org.joda.time.DateTimeZone; import java.util.Arrays; +import java.util.Collections; import java.util.HashMap; import java.util.Map; public class DateIndexNameFactoryTests extends ESTestCase { public void testDefaults() throws Exception { - DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); + DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance()); Map config = new HashMap<>(); config.put("field", "_field"); config.put("date_rounding", "y"); @@ -39,14 +41,14 @@ public void testDefaults() throws Exception { DateIndexNameProcessor processor = factory.create(null, null, config); assertThat(processor.getDateFormats().size(), Matchers.equalTo(1)); assertThat(processor.getField(), Matchers.equalTo("_field")); - assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("")); - assertThat(processor.getDateRounding(), Matchers.equalTo("y")); - assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyy-MM-dd")); + assertThat(processor.getIndexNamePrefixTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("")); + assertThat(processor.getDateRoundingTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("y")); + assertThat(processor.getIndexNameFormatTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("yyyy-MM-dd")); assertThat(processor.getTimezone(), Matchers.equalTo(DateTimeZone.UTC)); } public void testSpecifyOptionalSettings() throws Exception { - DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); + DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance()); Map config = new HashMap<>(); config.put("field", "_field"); config.put("index_name_prefix", "_prefix"); @@ -63,7 +65,7 @@ public void testSpecifyOptionalSettings() throws Exception { config.put("index_name_format", "yyyyMMdd"); processor = factory.create(null, null, config); - assertThat(processor.getIndexNameFormat(), Matchers.equalTo("yyyyMMdd")); + assertThat(processor.getIndexNameFormatTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("yyyyMMdd")); config = new HashMap<>(); config.put("field", "_field"); @@ -80,11 +82,11 @@ public void testSpecifyOptionalSettings() throws Exception { config.put("date_rounding", "y"); processor = factory.create(null, null, config); - assertThat(processor.getIndexNamePrefix(), Matchers.equalTo("_prefix")); + assertThat(processor.getIndexNamePrefixTemplate().newInstance(Collections.emptyMap()).execute(), Matchers.equalTo("_prefix")); } public void testRequiredFields() throws Exception { - DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(); + DateIndexNameProcessor.Factory factory = new DateIndexNameProcessor.Factory(TestTemplateService.instance()); Map config = new HashMap<>(); config.put("date_rounding", "y"); ElasticsearchParseException e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); @@ -95,5 +97,4 @@ public void testRequiredFields() throws Exception { e = expectThrows(ElasticsearchParseException.class, () -> factory.create(null, null, config)); assertThat(e.getMessage(), Matchers.equalTo("[date_rounding] required property is missing")); } - } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java index eba37dc742169..c97da116e3489 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java @@ -19,11 +19,14 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ingest.IngestDocument; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; +import org.joda.time.format.DateTimeFormat; import java.util.Collections; +import java.util.List; import java.util.Locale; import java.util.function.Function; @@ -33,11 +36,8 @@ public class DateIndexNameProcessorTests extends ESTestCase { public void testJodaPattern() throws Exception { Function function = DateFormat.Joda.getFunction("yyyy-MM-dd'T'HH:mm:ss.SSSZ", DateTimeZone.UTC, Locale.ROOT); - DateIndexNameProcessor processor = new DateIndexNameProcessor( - "_tag", "_field", Collections.singletonList(function), DateTimeZone.UTC, - "events-", "y", "yyyyMMdd" - ); - + DateIndexNameProcessor processor = createProcessor("_field", Collections.singletonList(function), + DateTimeZone.UTC, "events-", "y", "yyyyMMdd"); IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", "2016-04-25T12:24:20.101Z")); processor.execute(document); @@ -46,7 +46,7 @@ public void testJodaPattern() throws Exception { public void testTAI64N()throws Exception { Function function = DateFormat.Tai64n.getFunction(null, DateTimeZone.UTC, null); - DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), + DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function), DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", (randomBoolean() ? "@" : "") + "4000000050d506482dbdf024")); @@ -56,7 +56,7 @@ public void testTAI64N()throws Exception { public void testUnixMs()throws Exception { Function function = DateFormat.UnixMs.getFunction(null, DateTimeZone.UTC, null); - DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), + DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function), DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", "1000500")); @@ -71,7 +71,7 @@ public void testUnixMs()throws Exception { public void testUnix()throws Exception { Function function = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null); - DateIndexNameProcessor dateProcessor = new DateIndexNameProcessor("_tag", "_field", Collections.singletonList(function), + DateIndexNameProcessor dateProcessor = createProcessor("_field", Collections.singletonList(function), DateTimeZone.UTC, "events-", "m", "yyyyMMdd"); IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, Collections.singletonMap("_field", "1000.5")); @@ -79,4 +79,33 @@ public void testUnix()throws Exception { assertThat(document.getSourceAndMetadata().get("_index"), equalTo("")); } + public void testTemplatedFields() throws Exception { + String indexNamePrefix = randomAlphaOfLength(10); + String dateRounding = randomFrom("y", "M", "w", "d", "h", "m", "s"); + String indexNameFormat = randomFrom("yyyy-MM-dd'T'HH:mm:ss.SSSZ", "yyyyMMdd", "MM/dd/yyyy"); + String date = Integer.toString(randomInt()); + Function dateTimeFunction = DateFormat.Unix.getFunction(null, DateTimeZone.UTC, null); + + DateIndexNameProcessor dateProcessor = createProcessor("_field", + Collections.singletonList(dateTimeFunction), DateTimeZone.UTC, indexNamePrefix, + dateRounding, indexNameFormat); + + IngestDocument document = new IngestDocument("_index", "_type", "_id", null, null, null, + Collections.singletonMap("_field", date)); + dateProcessor.execute(document); + + assertThat(document.getSourceAndMetadata().get("_index"), + equalTo("<"+indexNamePrefix+"{"+DateTimeFormat.forPattern(indexNameFormat) + .print(dateTimeFunction.apply(date))+"||/"+dateRounding+"{"+indexNameFormat+"|UTC}}>")); + } + + private DateIndexNameProcessor createProcessor(String field, List> dateFormats, + DateTimeZone timezone, String indexNamePrefix, String dateRounding, + String indexNameFormat) { + return new DateIndexNameProcessor(randomAlphaOfLength(10), field, dateFormats, timezone, + new TestTemplateService.MockTemplateScript.Factory(indexNamePrefix), + new TestTemplateService.MockTemplateScript.Factory(dateRounding), + new TestTemplateService.MockTemplateScript.Factory(indexNameFormat) + ); + } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java index 8fba759aa16f9..43a5f9245b185 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java @@ -24,9 +24,10 @@ import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.script.TemplateScript; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; +import java.time.ZoneId; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; @@ -36,19 +37,21 @@ import static org.hamcrest.CoreMatchers.containsString; import static org.hamcrest.CoreMatchers.equalTo; -import static org.joda.time.DateTimeZone.UTC; public class DateProcessorTests extends ESTestCase { + private TemplateScript.Factory templatize(Locale locale) { return new TestTemplateService.MockTemplateScript.Factory(locale.getLanguage()); } - private TemplateScript.Factory templatize(DateTimeZone timezone) { - return new TestTemplateService.MockTemplateScript.Factory(timezone.getID()); + private TemplateScript.Factory templatize(ZoneId timezone) { + // prevent writing "UTC" as string, as joda time does not parse it + String id = timezone.equals(ZoneOffset.UTC) ? "UTC" : timezone.getId(); + return new TestTemplateService.MockTemplateScript.Factory(id); } public void testJodaPattern() { DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH), + templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH), "date_as_string", Collections.singletonList("yyyy dd MM hh:mm:ss"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 06 11:05:15"); @@ -63,7 +66,7 @@ public void testJodaPatternMultipleFormats() { matchFormats.add("dd/MM/yyyy"); matchFormats.add("dd-MM-yyyy"); DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH), + templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH), "date_as_string", matchFormats, "date_as_date"); Map document = new HashMap<>(); @@ -98,7 +101,7 @@ public void testJodaPatternMultipleFormats() { public void testInvalidJodaPattern() { try { DateProcessor processor = new DateProcessor(randomAlphaOfLength(10), - templatize(UTC), templatize(randomLocale(random())), + templatize(ZoneOffset.UTC), templatize(randomLocale(random())), "date_as_string", Collections.singletonList("invalid pattern"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010"); @@ -112,7 +115,7 @@ public void testInvalidJodaPattern() { public void testJodaPatternLocale() { DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ITALIAN), + templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ITALIAN), "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010 12 giugno"); @@ -123,18 +126,18 @@ public void testJodaPatternLocale() { public void testJodaPatternDefaultYear() { DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), - templatize(DateTimeZone.forID("Europe/Amsterdam")), templatize(Locale.ENGLISH), + templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ENGLISH), "date_as_string", Collections.singletonList("dd/MM"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "12/06"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); dateProcessor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("date_as_date", String.class), - equalTo(DateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); + equalTo(ZonedDateTime.now().getYear() + "-06-12T00:00:00.000+02:00")); } public void testTAI64N() { - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(DateTimeZone.forOffsetHours(2)), + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.ofHours(2)), templatize(randomLocale(random())), "date_as_string", Collections.singletonList("TAI64N"), "date_as_date"); Map document = new HashMap<>(); @@ -146,8 +149,8 @@ public void testTAI64N() { } public void testUnixMs() { - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(UTC), templatize(randomLocale(random())), - "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date"); + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.UTC), + templatize(randomLocale(random())), "date_as_string", Collections.singletonList("UNIX_MS"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "1000500"); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); @@ -162,7 +165,7 @@ public void testUnixMs() { } public void testUnix() { - DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(UTC), + DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneOffset.UTC), templatize(randomLocale(random())), "date_as_string", Collections.singletonList("UNIX"), "date_as_date"); Map document = new HashMap<>(); @@ -186,7 +189,7 @@ public void testInvalidTimezone() { public void testInvalidLocale() { DateProcessor processor = new DateProcessor(randomAlphaOfLength(10), - templatize(UTC), new TestTemplateService.MockTemplateScript.Factory("invalid_locale"), + templatize(ZoneOffset.UTC), new TestTemplateService.MockTemplateScript.Factory("invalid_locale"), "date_as_string", Collections.singletonList("yyyy"), "date_as_date"); Map document = new HashMap<>(); document.put("date_as_string", "2010"); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java index 199c475a8f0bf..fde7f0c9b8a02 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DotExpanderProcessorTests.java @@ -21,6 +21,7 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import java.util.Collections; @@ -86,7 +87,8 @@ public void testEscapeFields_valueField() throws Exception { // so because foo is no branch field but a value field the `foo.bar` field can't be expanded // into [foo].[bar], so foo should be renamed first into `[foo].[bar]: IngestDocument document = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new RenameProcessor("_tag", "foo", "foo.bar", false); + Processor processor = new RenameProcessor("_tag", new TestTemplateService.MockTemplateScript.Factory("foo"), + new TestTemplateService.MockTemplateScript.Factory("foo.bar"), false); processor.execute(document); processor = new DotExpanderProcessor("_tag", null, "foo.bar"); processor.execute(document); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java index 0eba79523aca2..68654923ae926 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/GrokProcessorTests.java @@ -129,7 +129,7 @@ public void testMissingField() { public void testMissingFieldWithIgnoreMissing() throws Exception { String fieldName = "foo.bar"; IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); - IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); GrokProcessor processor = new GrokProcessor(randomAlphaOfLength(10), Collections.singletonMap("ONE", "1"), Collections.singletonList("%{ONE:one}"), fieldName, false, true, ThreadWatchdog.noop()); processor.execute(ingestDocument); diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java index 245285259b47a..2867ed1d24053 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/JsonProcessorTests.java @@ -33,7 +33,6 @@ import java.util.List; import java.util.Map; -import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -55,7 +54,7 @@ public void testExecute() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); jsonProcessor.execute(ingestDocument); Map jsonified = ingestDocument.getFieldValue(randomTargetField, Map.class); - assertIngestDocument(ingestDocument.getFieldValue(randomTargetField, Object.class), jsonified); + assertEquals(ingestDocument.getFieldValue(randomTargetField, Object.class), jsonified); } public void testInvalidValue() { @@ -161,13 +160,10 @@ public void testAddToRoot() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); jsonProcessor.execute(ingestDocument); - Map expected = new HashMap<>(); - expected.put("a", 1); - expected.put("b", 2); - expected.put("c", "see"); - IngestDocument expectedIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), expected); - - assertIngestDocument(ingestDocument, expectedIngestDocument); + Map sourceAndMetadata = ingestDocument.getSourceAndMetadata(); + assertEquals(1, sourceAndMetadata.get("a")); + assertEquals(2, sourceAndMetadata.get("b")); + assertEquals("see", sourceAndMetadata.get("c")); } public void testAddBoolToRoot() { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorTests.java index 641bac8393977..b420b2fe3b00b 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RemoveProcessorTests.java @@ -27,6 +27,7 @@ import java.util.Collections; import java.util.HashMap; +import java.util.Map; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; @@ -37,7 +38,7 @@ public void testRemoveFields() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String field = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); Processor processor = new RemoveProcessor(randomAlphaOfLength(10), - Collections.singletonList(new TestTemplateService.MockTemplateScript.Factory(field))); + Collections.singletonList(new TestTemplateService.MockTemplateScript.Factory(field)), false); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(field), equalTo(false)); } @@ -45,8 +46,10 @@ public void testRemoveFields() throws Exception { public void testRemoveNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RemoveProcessor(randomAlphaOfLength(10), - Collections.singletonList(new TestTemplateService.MockTemplateScript.Factory(fieldName))); + Map config = new HashMap<>(); + config.put("field", fieldName); + String processorTag = randomAlphaOfLength(10); + Processor processor = new RemoveProcessor.Factory(TestTemplateService.instance()).create(null, processorTag, config); try { processor.execute(ingestDocument); fail("remove field should have failed"); @@ -54,4 +57,15 @@ public void testRemoveNonExistingField() throws Exception { assertThat(e.getMessage(), containsString("not present as part of path [" + fieldName + "]")); } } + + public void testIgnoreMissing() throws Exception { + IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); + String fieldName = RandomDocumentPicks.randomFieldName(random()); + Map config = new HashMap<>(); + config.put("field", fieldName); + config.put("ignore_missing", true); + String processorTag = randomAlphaOfLength(10); + Processor processor = new RemoveProcessor.Factory(TestTemplateService.instance()).create(null, processorTag, config); + processor.execute(ingestDocument); + } } diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java index 1eaac36a64cca..9dd799ff33971 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorFactoryTests.java @@ -20,8 +20,11 @@ package org.elasticsearch.ingest.common; import org.elasticsearch.ElasticsearchParseException; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; +import org.junit.Before; +import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -29,21 +32,26 @@ public class RenameProcessorFactoryTests extends ESTestCase { + private RenameProcessor.Factory factory; + + @Before + public void init() { + factory = new RenameProcessor.Factory(TestTemplateService.instance()); + } + public void testCreate() throws Exception { - RenameProcessor.Factory factory = new RenameProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "old_field"); config.put("target_field", "new_field"); String processorTag = randomAlphaOfLength(10); RenameProcessor renameProcessor = factory.create(null, processorTag, config); assertThat(renameProcessor.getTag(), equalTo(processorTag)); - assertThat(renameProcessor.getField(), equalTo("old_field")); - assertThat(renameProcessor.getTargetField(), equalTo("new_field")); + assertThat(renameProcessor.getField().newInstance(Collections.emptyMap()).execute(), equalTo("old_field")); + assertThat(renameProcessor.getTargetField().newInstance(Collections.emptyMap()).execute(), equalTo("new_field")); assertThat(renameProcessor.isIgnoreMissing(), equalTo(false)); } public void testCreateWithIgnoreMissing() throws Exception { - RenameProcessor.Factory factory = new RenameProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "old_field"); config.put("target_field", "new_field"); @@ -51,13 +59,12 @@ public void testCreateWithIgnoreMissing() throws Exception { String processorTag = randomAlphaOfLength(10); RenameProcessor renameProcessor = factory.create(null, processorTag, config); assertThat(renameProcessor.getTag(), equalTo(processorTag)); - assertThat(renameProcessor.getField(), equalTo("old_field")); - assertThat(renameProcessor.getTargetField(), equalTo("new_field")); + assertThat(renameProcessor.getField().newInstance(Collections.emptyMap()).execute(), equalTo("old_field")); + assertThat(renameProcessor.getTargetField().newInstance(Collections.emptyMap()).execute(), equalTo("new_field")); assertThat(renameProcessor.isIgnoreMissing(), equalTo(true)); } public void testCreateNoFieldPresent() throws Exception { - RenameProcessor.Factory factory = new RenameProcessor.Factory(); Map config = new HashMap<>(); config.put("target_field", "new_field"); try { @@ -69,7 +76,6 @@ public void testCreateNoFieldPresent() throws Exception { } public void testCreateNoToPresent() throws Exception { - RenameProcessor.Factory factory = new RenameProcessor.Factory(); Map config = new HashMap<>(); config.put("field", "old_field"); try { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java index bf35918ad6e24..5b3d88879ba13 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/RenameProcessorTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.ingest.IngestDocument; import org.elasticsearch.ingest.Processor; import org.elasticsearch.ingest.RandomDocumentPicks; +import org.elasticsearch.ingest.TestTemplateService; import org.elasticsearch.test.ESTestCase; import java.util.ArrayList; @@ -45,7 +46,7 @@ public void testRename() throws Exception { do { newFieldName = RandomDocumentPicks.randomFieldName(random()); } while (RandomDocumentPicks.canAddField(newFieldName, ingestDocument) == false || newFieldName.equals(fieldName)); - Processor processor = new RenameProcessor(randomAlphaOfLength(10), fieldName, newFieldName, false); + Processor processor = createRenameProcessor(fieldName, newFieldName, false); processor.execute(ingestDocument); assertThat(ingestDocument.getFieldValue(newFieldName, Object.class), equalTo(fieldValue)); } @@ -63,7 +64,7 @@ public void testRenameArrayElement() throws Exception { document.put("one", one); IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document); - Processor processor = new RenameProcessor(randomAlphaOfLength(10), "list.0", "item", false); + Processor processor = createRenameProcessor("list.0", "item", false); processor.execute(ingestDocument); Object actualObject = ingestDocument.getSourceAndMetadata().get("list"); assertThat(actualObject, instanceOf(List.class)); @@ -76,7 +77,7 @@ public void testRenameArrayElement() throws Exception { assertThat(actualObject, instanceOf(String.class)); assertThat(actualObject, equalTo("item1")); - processor = new RenameProcessor(randomAlphaOfLength(10), "list.0", "list.3", false); + processor = createRenameProcessor("list.0", "list.3", false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -91,7 +92,7 @@ public void testRenameArrayElement() throws Exception { public void testRenameNonExistingField() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RenameProcessor(randomAlphaOfLength(10), fieldName, + Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), false); try { processor.execute(ingestDocument); @@ -105,7 +106,7 @@ public void testRenameNonExistingFieldWithIgnoreMissing() throws Exception { IngestDocument originalIngestDocument = RandomDocumentPicks.randomIngestDocument(random(), new HashMap<>()); IngestDocument ingestDocument = new IngestDocument(originalIngestDocument); String fieldName = RandomDocumentPicks.randomFieldName(random()); - Processor processor = new RenameProcessor(randomAlphaOfLength(10), fieldName, + Processor processor = createRenameProcessor(fieldName, RandomDocumentPicks.randomFieldName(random()), true); processor.execute(ingestDocument); assertIngestDocument(originalIngestDocument, ingestDocument); @@ -114,7 +115,7 @@ public void testRenameNonExistingFieldWithIgnoreMissing() throws Exception { public void testRenameNewFieldAlreadyExists() throws Exception { IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random()); String fieldName = RandomDocumentPicks.randomExistingFieldName(random(), ingestDocument); - Processor processor = new RenameProcessor(randomAlphaOfLength(10), RandomDocumentPicks.randomExistingFieldName( + Processor processor = createRenameProcessor(RandomDocumentPicks.randomExistingFieldName( random(), ingestDocument), fieldName, false); try { processor.execute(ingestDocument); @@ -129,7 +130,7 @@ public void testRenameExistingFieldNullValue() throws Exception { String fieldName = RandomDocumentPicks.randomFieldName(random()); ingestDocument.setFieldValue(fieldName, null); String newFieldName = randomValueOtherThanMany(ingestDocument::hasField, () -> RandomDocumentPicks.randomFieldName(random())); - Processor processor = new RenameProcessor(randomAlphaOfLength(10), fieldName, newFieldName, false); + Processor processor = createRenameProcessor(fieldName, newFieldName, false); processor.execute(ingestDocument); assertThat(ingestDocument.hasField(fieldName), equalTo(false)); assertThat(ingestDocument.hasField(newFieldName), equalTo(true)); @@ -149,7 +150,7 @@ public Object put(String key, Object value) { source.put("list", Collections.singletonList("item")); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new RenameProcessor(randomAlphaOfLength(10), "list", "new_field", false); + Processor processor = createRenameProcessor("list", "new_field", false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -173,7 +174,7 @@ public Object remove(Object key) { source.put("list", Collections.singletonList("item")); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor = new RenameProcessor(randomAlphaOfLength(10), "list", "new_field", false); + Processor processor = createRenameProcessor("list", "new_field", false); try { processor.execute(ingestDocument); fail("processor execute should have failed"); @@ -188,12 +189,12 @@ public void testRenameLeafIntoBranch() throws Exception { Map source = new HashMap<>(); source.put("foo", "bar"); IngestDocument ingestDocument = new IngestDocument(source, Collections.emptyMap()); - Processor processor1 = new RenameProcessor(randomAlphaOfLength(10), "foo", "foo.bar", false); + Processor processor1 = createRenameProcessor("foo", "foo.bar", false); processor1.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Collections.singletonMap("bar", "bar"))); assertThat(ingestDocument.getFieldValue("foo.bar", String.class), equalTo("bar")); - Processor processor2 = new RenameProcessor(randomAlphaOfLength(10), "foo.bar", "foo.bar.baz", false); + Processor processor2 = createRenameProcessor( "foo.bar", "foo.bar.baz", false); processor2.execute(ingestDocument); assertThat(ingestDocument.getFieldValue("foo", Map.class), equalTo(Collections.singletonMap("bar", Collections.singletonMap("baz", "bar")))); @@ -201,9 +202,13 @@ public void testRenameLeafIntoBranch() throws Exception { assertThat(ingestDocument.getFieldValue("foo.bar.baz", String.class), equalTo("bar")); // for fun lets try to restore it (which don't allow today) - Processor processor3 = new RenameProcessor(randomAlphaOfLength(10), "foo.bar.baz", "foo", false); + Processor processor3 = createRenameProcessor("foo.bar.baz", "foo", false); Exception e = expectThrows(IllegalArgumentException.class, () -> processor3.execute(ingestDocument)); assertThat(e.getMessage(), equalTo("field [foo] already exists")); } + private RenameProcessor createRenameProcessor(String field, String targetField, boolean ignoreMissing) { + return new RenameProcessor(randomAlphaOfLength(10), new TestTemplateService.MockTemplateScript.Factory(field), + new TestTemplateService.MockTemplateScript.Factory(targetField), ignoreMissing); + } } diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle index d287d7ee02378..fb1ea441a9dd5 100644 --- a/modules/lang-painless/build.gradle +++ b/modules/lang-painless/build.gradle @@ -17,7 +17,7 @@ * under the License. */ -import org.apache.tools.ant.types.Path + esplugin { description 'An easy, safe and fast scripting language for Elasticsearch' diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java index e715eb0090c7f..55b64b0420df1 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/Whitelist.java @@ -24,13 +24,14 @@ import java.util.Objects; /** - * Whitelist contains data structures designed to be used to generate a white-list of Java classes, + * Whitelist contains data structures designed to be used to generate a whitelist of Java classes, * constructors, methods, and fields that can be used within a Painless script at both compile-time * and run-time. * - * A white-list consists of several pieces with {@link Struct}s as the top level. Each {@link Struct} - * will contain zero-to-many {@link Constructor}s, {@link Method}s, and {@link Field}s which are what - * will be available with a Painless script. See each individual white-list object for more detail. + * A whitelist consists of several pieces with {@link WhitelistClass}s as the top level. Each + * {@link WhitelistClass} will contain zero-to-many {@link WhitelistConstructor}s, {@link WhitelistMethod}s, and + * {@link WhitelistField}s which are what will be available with a Painless script. See each individual + * whitelist object for more detail. */ public final class Whitelist { @@ -54,166 +55,14 @@ public final class Whitelist { public static final List BASE_WHITELISTS = Collections.singletonList(WhitelistLoader.loadFromResourceFiles(Whitelist.class, BASE_WHITELIST_FILES)); - /** - * Struct represents the equivalent of a Java class in Painless complete with super classes, - * constructors, methods, and fields. In Painless a class is known as a struct primarily to avoid - * naming conflicts internally. There must be a one-to-one mapping of struct names to Java classes. - * Though, since multiple white-lists may be combined into a single white-list for a specific - * {@link org.elasticsearch.script.ScriptContext}, as long as multiple structs representing the same - * Java class have the same Painless type name and have legal constructor/method overloading they - * can be merged together. - * - * Structs in Painless allow for arity overloading for constructors and methods. Arity overloading - * means that multiple constructors are allowed for a single struct as long as they have a different - * number of parameter types, and multiples methods with the same name are allowed for a single struct - * as long as they have the same return type and a different number of parameter types. - * - * Structs will automatically extend other white-listed structs if the Java class they represent is a - * subclass of other structs including Java interfaces. - */ - public static final class Struct { - - /** Information about where this struct was white-listed from. Can be used for error messages. */ - public final String origin; - - /** The Java class name this struct represents. */ - public final String javaClassName; - - /** - * Allow the Java class name to only be specified as the fully-qualified name. - */ - public final boolean onlyFQNJavaClassName; - - /** The {@link List} of white-listed ({@link Constructor}s) available to this struct. */ - public final List whitelistConstructors; - - /** The {@link List} of white-listed ({@link Method}s) available to this struct. */ - public final List whitelistMethods; - - /** The {@link List} of white-listed ({@link Field}s) available to this struct. */ - public final List whitelistFields; - - /** Standard constructor. All values must be not {@code null}. */ - public Struct(String origin, String javaClassName, boolean onlyFQNJavaClassName, - List whitelistConstructors, List whitelistMethods, List whitelistFields) { - this.origin = Objects.requireNonNull(origin); - this.javaClassName = Objects.requireNonNull(javaClassName); - this.onlyFQNJavaClassName = onlyFQNJavaClassName; - - this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(whitelistConstructors)); - this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistMethods)); - this.whitelistFields = Collections.unmodifiableList(Objects.requireNonNull(whitelistFields)); - } - } - - /** - * Constructor represents the equivalent of a Java constructor available as a white-listed struct - * constructor within Painless. Constructors for Painless structs may be accessed exactly as - * constructors for Java classes are using the 'new' keyword. Painless structs may have multiple - * constructors as long as they comply with arity overloading described for {@link Struct}. - */ - public static final class Constructor { - - /** Information about where this constructor was white-listed from. Can be used for error messages. */ - public final String origin; - - /** - * A {@link List} of {@link String}s that are the Painless type names for the parameters of the - * constructor which can be used to look up the Java constructor through reflection. - */ - public final List painlessParameterTypeNames; - - /** Standard constructor. All values must be not {@code null}. */ - public Constructor(String origin, List painlessParameterTypeNames) { - this.origin = Objects.requireNonNull(origin); - this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames)); - } - } - - /** - * Method represents the equivalent of a Java method available as a white-listed struct method - * within Painless. Methods for Painless structs may be accessed exactly as methods for Java classes - * are using the '.' operator on an existing struct variable/field. Painless structs may have multiple - * methods with the same name as long as they comply with arity overloading described for {@link Method}. - * - * Structs may also have additional methods that are not part of the Java class the struct represents - - * these are known as augmented methods. An augmented method can be added to a struct as a part of any - * Java class as long as the method is static and the first parameter of the method is the Java class - * represented by the struct. Note that the augmented method's parent Java class does not need to be - * white-listed. - */ - public static class Method { - - /** Information about where this method was white-listed from. Can be used for error messages. */ - public final String origin; - - /** - * The Java class name for the owner of an augmented method. If the method is not augmented - * this should be {@code null}. - */ - public final String javaAugmentedClassName; - - /** The Java method name used to look up the Java method through reflection. */ - public final String javaMethodName; - - /** - * The Painless type name for the return type of the method which can be used to look up the Java - * method through reflection. - */ - public final String painlessReturnTypeName; - - /** - * A {@link List} of {@link String}s that are the Painless type names for the parameters of the - * method which can be used to look up the Java method through reflection. - */ - public final List painlessParameterTypeNames; - - /** - * Standard constructor. All values must be not {@code null} with the exception of jAugmentedClass; - * jAugmentedClass will be {@code null} unless the method is augmented as described in the class documentation. - */ - public Method(String origin, String javaAugmentedClassName, String javaMethodName, - String painlessReturnTypeName, List painlessParameterTypeNames) { - this.origin = Objects.requireNonNull(origin); - this.javaAugmentedClassName = javaAugmentedClassName; - this.javaMethodName = javaMethodName; - this.painlessReturnTypeName = Objects.requireNonNull(painlessReturnTypeName); - this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames)); - } - } - - /** - * Field represents the equivalent of a Java field available as a white-listed struct field - * within Painless. Fields for Painless structs may be accessed exactly as fields for Java classes - * are using the '.' operator on an existing struct variable/field. - */ - public static class Field { - - /** Information about where this method was white-listed from. Can be used for error messages. */ - public final String origin; - - /** The Java field name used to look up the Java field through reflection. */ - public final String javaFieldName; - - /** The Painless type name for the field which can be used to look up the Java field through reflection. */ - public final String painlessFieldTypeName; - - /** Standard constructor. All values must be not {@code null}. */ - public Field(String origin, String javaFieldName, String painlessFieldTypeName) { - this.origin = Objects.requireNonNull(origin); - this.javaFieldName = Objects.requireNonNull(javaFieldName); - this.painlessFieldTypeName = Objects.requireNonNull(painlessFieldTypeName); - } - } - - /** The {@link ClassLoader} used to look up the white-listed Java classes, constructors, methods, and fields. */ + /** The {@link ClassLoader} used to look up the whitelisted Java classes, constructors, methods, and fields. */ public final ClassLoader javaClassLoader; - /** The {@link List} of all the white-listed Painless structs. */ - public final List whitelistStructs; + /** The {@link List} of all the whitelisted Painless classes. */ + public final List whitelistStructs; /** Standard constructor. All values must be not {@code null}. */ - public Whitelist(ClassLoader javaClassLoader, List whitelistStructs) { + public Whitelist(ClassLoader javaClassLoader, List whitelistStructs) { this.javaClassLoader = Objects.requireNonNull(javaClassLoader); this.whitelistStructs = Collections.unmodifiableList(Objects.requireNonNull(whitelistStructs)); } diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java new file mode 100644 index 0000000000000..12aa5f5bdd634 --- /dev/null +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistClass.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.spi; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Class represents the equivalent of a Java class in Painless complete with super classes, + * constructors, methods, and fields. There must be a one-to-one mapping of class names to Java + * classes. Though, since multiple whitelists may be combined into a single whitelist for a + * specific context, as long as multiple classes representing the same Java class have the same + * class name and have legal constructor/method overloading they can be merged together. + * + * Classes in Painless allow for arity overloading for constructors and methods. Arity overloading + * means that multiple constructors are allowed for a single class as long as they have a different + * number of parameters, and multiples methods with the same name are allowed for a single class + * as long as they have the same return type and a different number of parameters. + * + * Classes will automatically extend other whitelisted classes if the Java class they represent is a + * subclass of other classes including Java interfaces. + */ +public final class WhitelistClass { + + /** Information about where this class was white-listed from. Can be used for error messages. */ + public final String origin; + + /** The Java class name this class represents. */ + public final String javaClassName; + + /** + * Allow the Java class name to only be specified as the fully-qualified name. + */ + public final boolean onlyFQNJavaClassName; + + /** The {@link List} of whitelisted ({@link WhitelistConstructor}s) available to this class. */ + public final List whitelistConstructors; + + /** The {@link List} of whitelisted ({@link WhitelistMethod}s) available to this class. */ + public final List whitelistMethods; + + /** The {@link List} of whitelisted ({@link WhitelistField}s) available to this class. */ + public final List whitelistFields; + + /** Standard constructor. All values must be not {@code null}. */ + public WhitelistClass(String origin, String javaClassName, boolean onlyFQNJavaClassName, + List whitelistConstructors, + List whitelistMethods, + List whitelistFields) { + this.origin = Objects.requireNonNull(origin); + this.javaClassName = Objects.requireNonNull(javaClassName); + this.onlyFQNJavaClassName = onlyFQNJavaClassName; + + this.whitelistConstructors = Collections.unmodifiableList(Objects.requireNonNull(whitelistConstructors)); + this.whitelistMethods = Collections.unmodifiableList(Objects.requireNonNull(whitelistMethods)); + this.whitelistFields = Collections.unmodifiableList(Objects.requireNonNull(whitelistFields)); + } +} diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java new file mode 100644 index 0000000000000..0e70552760208 --- /dev/null +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistConstructor.java @@ -0,0 +1,48 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.spi; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Constructor represents the equivalent of a Java constructor available as a whitelisted class + * constructor within Painless. Constructors for Painless classes may be accessed exactly as + * constructors for Java classes are using the 'new' keyword. Painless classes may have multiple + * constructors as long as they comply with arity overloading described for {@link WhitelistClass}. + */ +public final class WhitelistConstructor { + + /** Information about where this constructor was whitelisted from. Can be used for error messages. */ + public final String origin; + + /** + * A {@link List} of {@link String}s that are the Painless type names for the parameters of the + * constructor which can be used to look up the Java constructor through reflection. + */ + public final List painlessParameterTypeNames; + + /** Standard constructor. All values must be not {@code null}. */ + public WhitelistConstructor(String origin, List painlessParameterTypeNames) { + this.origin = Objects.requireNonNull(origin); + this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames)); + } +} diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java new file mode 100644 index 0000000000000..116aea98fcf89 --- /dev/null +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistField.java @@ -0,0 +1,46 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.spi; + +import java.util.Objects; + +/** + * Field represents the equivalent of a Java field available as a whitelisted class field + * within Painless. Fields for Painless classes may be accessed exactly as fields for Java classes + * are using the '.' operator on an existing class variable/field. + */ +public class WhitelistField { + + /** Information about where this method was whitelisted from. Can be used for error messages. */ + public final String origin; + + /** The Java field name used to look up the Java field through reflection. */ + public final String javaFieldName; + + /** The Painless type name for the field which can be used to look up the Java field through reflection. */ + public final String painlessFieldTypeName; + + /** Standard constructor. All values must be not {@code null}. */ + public WhitelistField(String origin, String javaFieldName, String painlessFieldTypeName) { + this.origin = Objects.requireNonNull(origin); + this.javaFieldName = Objects.requireNonNull(javaFieldName); + this.painlessFieldTypeName = Objects.requireNonNull(painlessFieldTypeName); + } +} diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java index 8817bfa274c60..b104d03f1ea82 100644 --- a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistLoader.java @@ -39,25 +39,25 @@ public final class WhitelistLoader { * {@link String}s with a single {@link Class} to be be used to load the resources where each {@link String} * is the path of a single text file. The {@link Class}'s {@link ClassLoader} will be used to lookup the Java * reflection objects for each individual {@link Class}, {@link Constructor}, {@link Method}, and {@link Field} - * specified as part of the white-list in the text file. + * specified as part of the whitelist in the text file. * - * A single pass is made through each file to collect all the information about each struct, constructor, method, - * and field. Most validation will be done at a later point after all white-lists have been gathered and their + * A single pass is made through each file to collect all the information about each class, constructor, method, + * and field. Most validation will be done at a later point after all whitelists have been gathered and their * merging takes place. * * A painless type name is one of the following: *

    *
  • def - The Painless dynamic type which is automatically included without a need to be - * white-listed.
  • - *
  • fully-qualified Java type name - Any white-listed Java class will have the equivalent name as + * whitelisted.
  • + *
  • fully-qualified Java type name - Any whitelisted Java class will have the equivalent name as * a Painless type name with the exception that any dollar symbols used as part of inner classes will * be replaced with dot symbols.
  • *
  • short Java type name - The text after the final dot symbol of any specified Java class. A - * short type Java name may be excluded by using the 'only_fqn' token during Painless struct parsing + * short type Java name may be excluded by using the 'only_fqn' token during Painless class parsing * as described later.
  • *
* - * The following can be parsed from each white-list text file: + * The following can be parsed from each whitelist text file: *
    *
  • Blank lines will be ignored by the parser.
  • *
  • Comments may be created starting with a pound '#' symbol and end with a newline. These will @@ -71,19 +71,19 @@ public final class WhitelistLoader { *
      *
    • A constructor may be specified starting with an opening parenthesis, followed by a * comma-delimited list of Painless type names corresponding to the type/class names for - * the equivalent Java parameter types (these must be white-listed as well), a closing + * the equivalent Java parameter types (these must be whitelisted as well), a closing * parenthesis, and a newline.
    • *
    • A method may be specified starting with a Painless type name for the return type, * followed by the Java name of the method (which will also be the Painless name for the * method), an opening parenthesis, a comma-delimited list of Painless type names * corresponding to the type/class names for the equivalent Java parameter types - * (these must be white-listed as well), a closing parenthesis, and a newline.
    • + * (these must be whitelisted as well), a closing parenthesis, and a newline. *
    • An augmented method may be specified starting with a Painless type name for the return * type, followed by the fully qualified Java name of the class the augmented method is - * part of (this class does not need to be white-listed), the Java name of the method + * part of (this class does not need to be whitelisted), the Java name of the method * (which will also be the Painless name for the method), an opening parenthesis, a * comma-delimited list of Painless type names corresponding to the type/class names - * for the equivalent Java parameter types (these must be white-listed as well), a closing + * for the equivalent Java parameter types (these must be whitelisted as well), a closing * parenthesis, and a newline.
    • *
    • A field may be specified starting with a Painless type name for the equivalent Java type * of the field, followed by the Java name of the field (which all be the Painless name @@ -99,7 +99,7 @@ public final class WhitelistLoader { * fully-qualified Java class name. Method argument types, method return types, and field types * must be specified with Painless type names (def, fully-qualified, or short) as described earlier. * - * The following example is used to create a single white-list text file: + * The following example is used to create a single whitelist text file: * * {@code * # primitive types @@ -132,10 +132,10 @@ public final class WhitelistLoader { * } */ public static Whitelist loadFromResourceFiles(Class resource, String... filepaths) { - List whitelistStructs = new ArrayList<>(); + List whitelistStructs = new ArrayList<>(); - // Execute a single pass through the white-list text files. This will gather all the - // constructors, methods, augmented methods, and fields for each white-listed struct. + // Execute a single pass through the whitelist text files. This will gather all the + // constructors, methods, augmented methods, and fields for each whitelisted class. for (String filepath : filepaths) { String line; int number = -1; @@ -146,9 +146,9 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep String whitelistStructOrigin = null; String javaClassName = null; boolean onlyFQNJavaClassName = false; - List whitelistConstructors = null; - List whitelistMethods = null; - List whitelistFields = null; + List whitelistConstructors = null; + List whitelistMethods = null; + List whitelistFields = null; while ((line = reader.readLine()) != null) { number = reader.getLineNumber(); @@ -159,13 +159,13 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep continue; } - // Handle a new struct by resetting all the variables necessary to construct a new Whitelist.Struct for the white-list. + // Handle a new class by resetting all the variables necessary to construct a new WhitelistClass for the whitelist. // Expects the following format: 'class' ID 'only_fqn'? '{' '\n' if (line.startsWith("class ")) { // Ensure the final token of the line is '{'. if (line.endsWith("{") == false) { throw new IllegalArgumentException( - "invalid struct definition: failed to parse class opening bracket [" + line + "]"); + "invalid class definition: failed to parse class opening bracket [" + line + "]"); } // Parse the Java class name. @@ -175,29 +175,29 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep if (tokens.length == 2 && "only_fqn".equals(tokens[1])) { onlyFQNJavaClassName = true; } else if (tokens.length != 1) { - throw new IllegalArgumentException("invalid struct definition: failed to parse class name [" + line + "]"); + throw new IllegalArgumentException("invalid class definition: failed to parse class name [" + line + "]"); } whitelistStructOrigin = "[" + filepath + "]:[" + number + "]"; javaClassName = tokens[0]; - // Reset all the constructors, methods, and fields to support a new struct. + // Reset all the constructors, methods, and fields to support a new class. whitelistConstructors = new ArrayList<>(); whitelistMethods = new ArrayList<>(); whitelistFields = new ArrayList<>(); - // Handle the end of a struct, by creating a new Whitelist.Struct with all the previously gathered - // constructors, methods, augmented methods, and fields, and adding it to the list of white-listed structs. + // Handle the end of a class, by creating a new WhitelistClass with all the previously gathered + // constructors, methods, augmented methods, and fields, and adding it to the list of whitelisted classes. // Expects the following format: '}' '\n' } else if (line.equals("}")) { if (javaClassName == null) { - throw new IllegalArgumentException("invalid struct definition: extraneous closing bracket"); + throw new IllegalArgumentException("invalid class definition: extraneous closing bracket"); } - whitelistStructs.add(new Whitelist.Struct(whitelistStructOrigin, javaClassName, onlyFQNJavaClassName, + whitelistStructs.add(new WhitelistClass(whitelistStructOrigin, javaClassName, onlyFQNJavaClassName, whitelistConstructors, whitelistMethods, whitelistFields)); - // Set all the variables to null to ensure a new struct definition is found before other parsable values. + // Set all the variables to null to ensure a new class definition is found before other parsable values. whitelistStructOrigin = null; javaClassName = null; onlyFQNJavaClassName = false; @@ -210,7 +210,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep // Mark the origin of this parsable object. String origin = "[" + filepath + "]:[" + number + "]"; - // Ensure we have a defined struct before adding any constructors, methods, augmented methods, or fields. + // Ensure we have a defined class before adding any constructors, methods, augmented methods, or fields. if (javaClassName == null) { throw new IllegalArgumentException("invalid object definition: expected a class name [" + line + "]"); } @@ -232,7 +232,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep tokens = new String[0]; } - whitelistConstructors.add(new Whitelist.Constructor(origin, Arrays.asList(tokens))); + whitelistConstructors.add(new WhitelistConstructor(origin, Arrays.asList(tokens))); // Handle the case for a method or augmented method definition. // Expects the following format: ID ID? ID '(' ( ID ( ',' ID )* )? ')' '\n' @@ -271,7 +271,7 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep tokens = new String[0]; } - whitelistMethods.add(new Whitelist.Method(origin, javaAugmentedClassName, javaMethodName, + whitelistMethods.add(new WhitelistMethod(origin, javaAugmentedClassName, javaMethodName, painlessReturnTypeName, Arrays.asList(tokens))); // Handle the case for a field definition. @@ -285,14 +285,14 @@ public static Whitelist loadFromResourceFiles(Class resource, String... filep throw new IllegalArgumentException("invalid field definition: unexpected format [" + line + "]"); } - whitelistFields.add(new Whitelist.Field(origin, tokens[1], tokens[0])); + whitelistFields.add(new WhitelistField(origin, tokens[1], tokens[0])); } } } - // Ensure all structs end with a '}' token before the end of the file. + // Ensure all classes end with a '}' token before the end of the file. if (javaClassName != null) { - throw new IllegalArgumentException("invalid struct definition: expected closing bracket"); + throw new IllegalArgumentException("invalid class definition: expected closing bracket"); } } catch (Exception exception) { throw new RuntimeException("error in [" + filepath + "] at line [" + number + "]", exception); diff --git a/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java new file mode 100644 index 0000000000000..df86619055b08 --- /dev/null +++ b/modules/lang-painless/spi/src/main/java/org/elasticsearch/painless/spi/WhitelistMethod.java @@ -0,0 +1,76 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.spi; + +import java.util.Collections; +import java.util.List; +import java.util.Objects; + +/** + * Method represents the equivalent of a Java method available as a whitelisted class method + * within Painless. Methods for Painless classes may be accessed exactly as methods for Java classes + * are using the '.' operator on an existing class variable/field. Painless classes may have multiple + * methods with the same name as long as they comply with arity overloading described for {@link WhitelistMethod}. + * + * Classes may also have additional methods that are not part of the Java class the class represents - + * these are known as augmented methods. An augmented method can be added to a class as a part of any + * Java class as long as the method is static and the first parameter of the method is the Java class + * represented by the class. Note that the augmented method's parent Java class does not need to be + * whitelisted. + */ +public class WhitelistMethod { + + /** Information about where this method was whitelisted from. Can be used for error messages. */ + public final String origin; + + /** + * The Java class name for the owner of an augmented method. If the method is not augmented + * this should be {@code null}. + */ + public final String javaAugmentedClassName; + + /** The Java method name used to look up the Java method through reflection. */ + public final String javaMethodName; + + /** + * The Painless type name for the return type of the method which can be used to look up the Java + * method through reflection. + */ + public final String painlessReturnTypeName; + + /** + * A {@link List} of {@link String}s that are the Painless type names for the parameters of the + * method which can be used to look up the Java method through reflection. + */ + public final List painlessParameterTypeNames; + + /** + * Standard constructor. All values must be not {@code null} with the exception of jAugmentedClass; + * jAugmentedClass will be {@code null} unless the method is augmented as described in the class documentation. + */ + public WhitelistMethod(String origin, String javaAugmentedClassName, String javaMethodName, + String painlessReturnTypeName, List painlessParameterTypeNames) { + this.origin = Objects.requireNonNull(origin); + this.javaAugmentedClassName = javaAugmentedClassName; + this.javaMethodName = javaMethodName; + this.painlessReturnTypeName = Objects.requireNonNull(painlessReturnTypeName); + this.painlessParameterTypeNames = Collections.unmodifiableList(Objects.requireNonNull(painlessParameterTypeNames)); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index abba62de39c19..69ef57faad63e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -19,8 +19,9 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import java.util.Objects; @@ -30,7 +31,7 @@ */ public final class AnalyzerCaster { - public static Cast getLegalCast(Location location, Class actual, Class expected, boolean explicit, boolean internal) { + public static PainlessCast getLegalCast(Location location, Class actual, Class expected, boolean explicit, boolean internal) { Objects.requireNonNull(actual); Objects.requireNonNull(expected); @@ -40,421 +41,421 @@ public static Cast getLegalCast(Location location, Class actual, Class exp if (actual == def.class) { if (expected == boolean.class) { - return Cast.unboxTo(def.class, Boolean.class, explicit, boolean.class); + return PainlessCast.unboxTo(def.class, Boolean.class, explicit, boolean.class); } else if (expected == byte.class) { - return Cast.unboxTo(def.class, Byte.class, explicit, byte.class); + return PainlessCast.unboxTo(def.class, Byte.class, explicit, byte.class); } else if (expected == short.class) { - return Cast.unboxTo(def.class, Short.class, explicit, short.class); + return PainlessCast.unboxTo(def.class, Short.class, explicit, short.class); } else if (expected == char.class) { - return Cast.unboxTo(def.class, Character.class, explicit, char.class); + return PainlessCast.unboxTo(def.class, Character.class, explicit, char.class); } else if (expected == int.class) { - return Cast.unboxTo(def.class, Integer.class, explicit, int.class); + return PainlessCast.unboxTo(def.class, Integer.class, explicit, int.class); } else if (expected == long.class) { - return Cast.unboxTo(def.class, Long.class, explicit, long.class); + return PainlessCast.unboxTo(def.class, Long.class, explicit, long.class); } else if (expected == float.class) { - return Cast.unboxTo(def.class, Float.class, explicit, float.class); + return PainlessCast.unboxTo(def.class, Float.class, explicit, float.class); } else if (expected == double.class) { - return Cast.unboxTo(def.class, Double.class, explicit, double.class); + return PainlessCast.unboxTo(def.class, Double.class, explicit, double.class); } } else if (actual == Object.class) { if (expected == byte.class && explicit && internal) { - return Cast.unboxTo(Object.class, Byte.class, true, byte.class); + return PainlessCast.unboxTo(Object.class, Byte.class, true, byte.class); } else if (expected == short.class && explicit && internal) { - return Cast.unboxTo(Object.class, Short.class, true, short.class); + return PainlessCast.unboxTo(Object.class, Short.class, true, short.class); } else if (expected == char.class && explicit && internal) { - return Cast.unboxTo(Object.class, Character.class, true, char.class); + return PainlessCast.unboxTo(Object.class, Character.class, true, char.class); } else if (expected == int.class && explicit && internal) { - return Cast.unboxTo(Object.class, Integer.class, true, int.class); + return PainlessCast.unboxTo(Object.class, Integer.class, true, int.class); } else if (expected == long.class && explicit && internal) { - return Cast.unboxTo(Object.class, Long.class, true, long.class); + return PainlessCast.unboxTo(Object.class, Long.class, true, long.class); } else if (expected == float.class && explicit && internal) { - return Cast.unboxTo(Object.class, Float.class, true, float.class); + return PainlessCast.unboxTo(Object.class, Float.class, true, float.class); } else if (expected == double.class && explicit && internal) { - return Cast.unboxTo(Object.class, Double.class, true, double.class); + return PainlessCast.unboxTo(Object.class, Double.class, true, double.class); } } else if (actual == Number.class) { if (expected == byte.class && explicit && internal) { - return Cast.unboxTo(Number.class, Byte.class, true, byte.class); + return PainlessCast.unboxTo(Number.class, Byte.class, true, byte.class); } else if (expected == short.class && explicit && internal) { - return Cast.unboxTo(Number.class, Short.class, true, short.class); + return PainlessCast.unboxTo(Number.class, Short.class, true, short.class); } else if (expected == char.class && explicit && internal) { - return Cast.unboxTo(Number.class, Character.class, true, char.class); + return PainlessCast.unboxTo(Number.class, Character.class, true, char.class); } else if (expected == int.class && explicit && internal) { - return Cast.unboxTo(Number.class, Integer.class, true, int.class); + return PainlessCast.unboxTo(Number.class, Integer.class, true, int.class); } else if (expected == long.class && explicit && internal) { - return Cast.unboxTo(Number.class, Long.class, true, long.class); + return PainlessCast.unboxTo(Number.class, Long.class, true, long.class); } else if (expected == float.class && explicit && internal) { - return Cast.unboxTo(Number.class, Float.class, true, float.class); + return PainlessCast.unboxTo(Number.class, Float.class, true, float.class); } else if (expected == double.class && explicit && internal) { - return Cast.unboxTo(Number.class, Double.class, true, double.class); + return PainlessCast.unboxTo(Number.class, Double.class, true, double.class); } } else if (actual == String.class) { if (expected == char.class && explicit) { - return Cast.standard(String.class, char.class, true); + return PainlessCast.standard(String.class, char.class, true); } } else if (actual == boolean.class) { if (expected == def.class) { - return Cast.boxFrom(Boolean.class, def.class, explicit, boolean.class); + return PainlessCast.boxFrom(Boolean.class, def.class, explicit, boolean.class); } else if (expected == Object.class && internal) { - return Cast.boxFrom(Boolean.class, Object.class, explicit, boolean.class); + return PainlessCast.boxFrom(Boolean.class, Object.class, explicit, boolean.class); } else if (expected == Boolean.class && internal) { - return Cast.boxTo(boolean.class, boolean.class, explicit, boolean.class); + return PainlessCast.boxTo(boolean.class, boolean.class, explicit, boolean.class); } } else if (actual == byte.class) { if (expected == def.class) { - return Cast.boxFrom(Byte.class, def.class, explicit, byte.class); + return PainlessCast.boxFrom(Byte.class, def.class, explicit, byte.class); } else if (expected == Object.class && internal) { - return Cast.boxFrom(Byte.class, Object.class, explicit, byte.class); + return PainlessCast.boxFrom(Byte.class, Object.class, explicit, byte.class); } else if (expected == Number.class && internal) { - return Cast.boxFrom(Byte.class, Number.class, explicit, byte.class); + return PainlessCast.boxFrom(Byte.class, Number.class, explicit, byte.class); } else if (expected == short.class) { - return Cast.standard(byte.class, short.class, explicit); + return PainlessCast.standard(byte.class, short.class, explicit); } else if (expected == char.class && explicit) { - return Cast.standard(byte.class, char.class, true); + return PainlessCast.standard(byte.class, char.class, true); } else if (expected == int.class) { - return Cast.standard(byte.class, int.class, explicit); + return PainlessCast.standard(byte.class, int.class, explicit); } else if (expected == long.class) { - return Cast.standard(byte.class, long.class, explicit); + return PainlessCast.standard(byte.class, long.class, explicit); } else if (expected == float.class) { - return Cast.standard(byte.class, float.class, explicit); + return PainlessCast.standard(byte.class, float.class, explicit); } else if (expected == double.class) { - return Cast.standard(byte.class, double.class, explicit); + return PainlessCast.standard(byte.class, double.class, explicit); } else if (expected == Byte.class && internal) { - return Cast.boxTo(byte.class, byte.class, explicit, byte.class); + return PainlessCast.boxTo(byte.class, byte.class, explicit, byte.class); } else if (expected == Short.class && internal) { - return Cast.boxTo(byte.class, short.class, explicit, short.class); + return PainlessCast.boxTo(byte.class, short.class, explicit, short.class); } else if (expected == Character.class && explicit && internal) { - return Cast.boxTo(byte.class, char.class, true, char.class); + return PainlessCast.boxTo(byte.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return Cast.boxTo(byte.class, int.class, explicit, int.class); + return PainlessCast.boxTo(byte.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return Cast.boxTo(byte.class, long.class, explicit, long.class); + return PainlessCast.boxTo(byte.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return Cast.boxTo(byte.class, float.class, explicit, float.class); + return PainlessCast.boxTo(byte.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return Cast.boxTo(byte.class, double.class, explicit, double.class); + return PainlessCast.boxTo(byte.class, double.class, explicit, double.class); } } else if (actual == short.class) { if (expected == def.class) { - return Cast.boxFrom(Short.class, def.class, explicit, short.class); + return PainlessCast.boxFrom(Short.class, def.class, explicit, short.class); } else if (expected == Object.class && internal) { - return Cast.boxFrom(Short.class, Object.class, explicit, short.class); + return PainlessCast.boxFrom(Short.class, Object.class, explicit, short.class); } else if (expected == Number.class && internal) { - return Cast.boxFrom(Short.class, Number.class, explicit, short.class); + return PainlessCast.boxFrom(Short.class, Number.class, explicit, short.class); } else if (expected == byte.class && explicit) { - return Cast.standard(short.class, byte.class, true); + return PainlessCast.standard(short.class, byte.class, true); } else if (expected == char.class && explicit) { - return Cast.standard(short.class, char.class, true); + return PainlessCast.standard(short.class, char.class, true); } else if (expected == int.class) { - return Cast.standard(short.class, int.class, explicit); + return PainlessCast.standard(short.class, int.class, explicit); } else if (expected == long.class) { - return Cast.standard(short.class, long.class, explicit); + return PainlessCast.standard(short.class, long.class, explicit); } else if (expected == float.class) { - return Cast.standard(short.class, float.class, explicit); + return PainlessCast.standard(short.class, float.class, explicit); } else if (expected == double.class) { - return Cast.standard(short.class, double.class, explicit); + return PainlessCast.standard(short.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return Cast.boxTo(short.class, byte.class, true, byte.class); + return PainlessCast.boxTo(short.class, byte.class, true, byte.class); } else if (expected == Short.class && internal) { - return Cast.boxTo(short.class, short.class, explicit, short.class); + return PainlessCast.boxTo(short.class, short.class, explicit, short.class); } else if (expected == Character.class && explicit && internal) { - return Cast.boxTo(short.class, char.class, true, char.class); + return PainlessCast.boxTo(short.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return Cast.boxTo(short.class, int.class, explicit, int.class); + return PainlessCast.boxTo(short.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return Cast.boxTo(short.class, long.class, explicit, long.class); + return PainlessCast.boxTo(short.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return Cast.boxTo(short.class, float.class, explicit, float.class); + return PainlessCast.boxTo(short.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return Cast.boxTo(short.class, double.class, explicit, double.class); + return PainlessCast.boxTo(short.class, double.class, explicit, double.class); } } else if (actual == char.class) { if (expected == def.class) { - return Cast.boxFrom(Character.class, def.class, explicit, char.class); + return PainlessCast.boxFrom(Character.class, def.class, explicit, char.class); } else if (expected == Object.class && internal) { - return Cast.boxFrom(Character.class, Object.class, explicit, char.class); + return PainlessCast.boxFrom(Character.class, Object.class, explicit, char.class); } else if (expected == Number.class && internal) { - return Cast.boxFrom(Character.class, Number.class, explicit, char.class); + return PainlessCast.boxFrom(Character.class, Number.class, explicit, char.class); } else if (expected == String.class) { - return Cast.standard(char.class, String.class, explicit); + return PainlessCast.standard(char.class, String.class, explicit); } else if (expected == byte.class && explicit) { - return Cast.standard(char.class, byte.class, true); + return PainlessCast.standard(char.class, byte.class, true); } else if (expected == short.class && explicit) { - return Cast.standard(char.class, short.class, true); + return PainlessCast.standard(char.class, short.class, true); } else if (expected == int.class) { - return Cast.standard(char.class, int.class, explicit); + return PainlessCast.standard(char.class, int.class, explicit); } else if (expected == long.class) { - return Cast.standard(char.class, long.class, explicit); + return PainlessCast.standard(char.class, long.class, explicit); } else if (expected == float.class) { - return Cast.standard(char.class, float.class, explicit); + return PainlessCast.standard(char.class, float.class, explicit); } else if (expected == double.class) { - return Cast.standard(char.class, double.class, explicit); + return PainlessCast.standard(char.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return Cast.boxTo(char.class, byte.class, true, byte.class); + return PainlessCast.boxTo(char.class, byte.class, true, byte.class); } else if (expected == Short.class && internal) { - return Cast.boxTo(char.class, short.class, explicit, short.class); + return PainlessCast.boxTo(char.class, short.class, explicit, short.class); } else if (expected == Character.class && internal) { - return Cast.boxTo(char.class, char.class, true, char.class); + return PainlessCast.boxTo(char.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return Cast.boxTo(char.class, int.class, explicit, int.class); + return PainlessCast.boxTo(char.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return Cast.boxTo(char.class, long.class, explicit, long.class); + return PainlessCast.boxTo(char.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return Cast.boxTo(char.class, float.class, explicit, float.class); + return PainlessCast.boxTo(char.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return Cast.boxTo(char.class, double.class, explicit, double.class); + return PainlessCast.boxTo(char.class, double.class, explicit, double.class); } } else if (actual == int.class) { if (expected == def.class) { - return Cast.boxFrom(Integer.class, def.class, explicit, int.class); + return PainlessCast.boxFrom(Integer.class, def.class, explicit, int.class); } else if (expected == Object.class && internal) { - return Cast.boxFrom(Integer.class, Object.class, explicit, int.class); + return PainlessCast.boxFrom(Integer.class, Object.class, explicit, int.class); } else if (expected == Number.class && internal) { - return Cast.boxFrom(Integer.class, Number.class, explicit, int.class); + return PainlessCast.boxFrom(Integer.class, Number.class, explicit, int.class); } else if (expected == byte.class && explicit) { - return Cast.standard(int.class, byte.class, true); + return PainlessCast.standard(int.class, byte.class, true); } else if (expected == char.class && explicit) { - return Cast.standard(int.class, char.class, true); + return PainlessCast.standard(int.class, char.class, true); } else if (expected == short.class && explicit) { - return Cast.standard(int.class, short.class, true); + return PainlessCast.standard(int.class, short.class, true); } else if (expected == long.class) { - return Cast.standard(int.class, long.class, explicit); + return PainlessCast.standard(int.class, long.class, explicit); } else if (expected == float.class) { - return Cast.standard(int.class, float.class, explicit); + return PainlessCast.standard(int.class, float.class, explicit); } else if (expected == double.class) { - return Cast.standard(int.class, double.class, explicit); + return PainlessCast.standard(int.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return Cast.boxTo(int.class, byte.class, true, byte.class); + return PainlessCast.boxTo(int.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return Cast.boxTo(int.class, short.class, true, short.class); + return PainlessCast.boxTo(int.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return Cast.boxTo(int.class, char.class, true, char.class); + return PainlessCast.boxTo(int.class, char.class, true, char.class); } else if (expected == Integer.class && internal) { - return Cast.boxTo(int.class, int.class, explicit, int.class); + return PainlessCast.boxTo(int.class, int.class, explicit, int.class); } else if (expected == Long.class && internal) { - return Cast.boxTo(int.class, long.class, explicit, long.class); + return PainlessCast.boxTo(int.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return Cast.boxTo(int.class, float.class, explicit, float.class); + return PainlessCast.boxTo(int.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return Cast.boxTo(int.class, double.class, explicit, double.class); + return PainlessCast.boxTo(int.class, double.class, explicit, double.class); } } else if (actual == long.class) { if (expected == def.class) { - return Cast.boxFrom(Long.class, def.class, explicit, long.class); + return PainlessCast.boxFrom(Long.class, def.class, explicit, long.class); } else if (expected == Object.class && internal) { - return Cast.boxFrom(Long.class, Object.class, explicit, long.class); + return PainlessCast.boxFrom(Long.class, Object.class, explicit, long.class); } else if (expected == Number.class && internal) { - return Cast.boxFrom(Long.class, Number.class, explicit, long.class); + return PainlessCast.boxFrom(Long.class, Number.class, explicit, long.class); } else if (expected == byte.class && explicit) { - return Cast.standard(long.class, byte.class, true); + return PainlessCast.standard(long.class, byte.class, true); } else if (expected == char.class && explicit) { - return Cast.standard(long.class, char.class, true); + return PainlessCast.standard(long.class, char.class, true); } else if (expected == short.class && explicit) { - return Cast.standard(long.class, short.class, true); + return PainlessCast.standard(long.class, short.class, true); } else if (expected == int.class && explicit) { - return Cast.standard(long.class, int.class, true); + return PainlessCast.standard(long.class, int.class, true); } else if (expected == float.class) { - return Cast.standard(long.class, float.class, explicit); + return PainlessCast.standard(long.class, float.class, explicit); } else if (expected == double.class) { - return Cast.standard(long.class, double.class, explicit); + return PainlessCast.standard(long.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return Cast.boxTo(long.class, byte.class, true, byte.class); + return PainlessCast.boxTo(long.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return Cast.boxTo(long.class, short.class, true, short.class); + return PainlessCast.boxTo(long.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return Cast.boxTo(long.class, char.class, true, char.class); + return PainlessCast.boxTo(long.class, char.class, true, char.class); } else if (expected == Integer.class && explicit && internal) { - return Cast.boxTo(long.class, int.class, true, int.class); + return PainlessCast.boxTo(long.class, int.class, true, int.class); } else if (expected == Long.class && internal) { - return Cast.boxTo(long.class, long.class, explicit, long.class); + return PainlessCast.boxTo(long.class, long.class, explicit, long.class); } else if (expected == Float.class && internal) { - return Cast.boxTo(long.class, float.class, explicit, float.class); + return PainlessCast.boxTo(long.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return Cast.boxTo(long.class, double.class, explicit, double.class); + return PainlessCast.boxTo(long.class, double.class, explicit, double.class); } } else if (actual == float.class) { if (expected == def.class) { - return Cast.boxFrom(Float.class, def.class, explicit, float.class); + return PainlessCast.boxFrom(Float.class, def.class, explicit, float.class); } else if (expected == Object.class && internal) { - return Cast.boxFrom(Float.class, Object.class, explicit, float.class); + return PainlessCast.boxFrom(Float.class, Object.class, explicit, float.class); } else if (expected == Number.class && internal) { - return Cast.boxFrom(Float.class, Number.class, explicit, float.class); + return PainlessCast.boxFrom(Float.class, Number.class, explicit, float.class); } else if (expected == byte.class && explicit) { - return Cast.standard(float.class, byte.class, true); + return PainlessCast.standard(float.class, byte.class, true); } else if (expected == char.class && explicit) { - return Cast.standard(float.class, char.class, true); + return PainlessCast.standard(float.class, char.class, true); } else if (expected == short.class && explicit) { - return Cast.standard(float.class, short.class, true); + return PainlessCast.standard(float.class, short.class, true); } else if (expected == int.class && explicit) { - return Cast.standard(float.class, int.class, true); + return PainlessCast.standard(float.class, int.class, true); } else if (expected == long.class && explicit) { - return Cast.standard(float.class, long.class, true); + return PainlessCast.standard(float.class, long.class, true); } else if (expected == double.class) { - return Cast.standard(float.class, double.class, explicit); + return PainlessCast.standard(float.class, double.class, explicit); } else if (expected == Byte.class && explicit && internal) { - return Cast.boxTo(float.class, byte.class, true, byte.class); + return PainlessCast.boxTo(float.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return Cast.boxTo(float.class, short.class, true, short.class); + return PainlessCast.boxTo(float.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return Cast.boxTo(float.class, char.class, true, char.class); + return PainlessCast.boxTo(float.class, char.class, true, char.class); } else if (expected == Integer.class && explicit && internal) { - return Cast.boxTo(float.class, int.class, true, int.class); + return PainlessCast.boxTo(float.class, int.class, true, int.class); } else if (expected == Long.class && explicit && internal) { - return Cast.boxTo(float.class, long.class, true, long.class); + return PainlessCast.boxTo(float.class, long.class, true, long.class); } else if (expected == Float.class && internal) { - return Cast.boxTo(float.class, float.class, explicit, float.class); + return PainlessCast.boxTo(float.class, float.class, explicit, float.class); } else if (expected == Double.class && internal) { - return Cast.boxTo(float.class, double.class, explicit, double.class); + return PainlessCast.boxTo(float.class, double.class, explicit, double.class); } } else if (actual == double.class) { if (expected == def.class) { - return Cast.boxFrom(Double.class, def.class, explicit, double.class); + return PainlessCast.boxFrom(Double.class, def.class, explicit, double.class); } else if (expected == Object.class && internal) { - return Cast.boxFrom(Double.class, Object.class, explicit, double.class); + return PainlessCast.boxFrom(Double.class, Object.class, explicit, double.class); } else if (expected == Number.class && internal) { - return Cast.boxFrom(Double.class, Number.class, explicit, double.class); + return PainlessCast.boxFrom(Double.class, Number.class, explicit, double.class); } else if (expected == byte.class && explicit) { - return Cast.standard(double.class, byte.class, true); + return PainlessCast.standard(double.class, byte.class, true); } else if (expected == char.class && explicit) { - return Cast.standard(double.class, char.class, true); + return PainlessCast.standard(double.class, char.class, true); } else if (expected == short.class && explicit) { - return Cast.standard(double.class, short.class, true); + return PainlessCast.standard(double.class, short.class, true); } else if (expected == int.class && explicit) { - return Cast.standard(double.class, int.class, true); + return PainlessCast.standard(double.class, int.class, true); } else if (expected == long.class && explicit) { - return Cast.standard(double.class, long.class, true); + return PainlessCast.standard(double.class, long.class, true); } else if (expected == float.class && explicit) { - return Cast.standard(double.class, float.class, true); + return PainlessCast.standard(double.class, float.class, true); } else if (expected == Byte.class && explicit && internal) { - return Cast.boxTo(double.class, byte.class, true, byte.class); + return PainlessCast.boxTo(double.class, byte.class, true, byte.class); } else if (expected == Short.class && explicit && internal) { - return Cast.boxTo(double.class, short.class, true, short.class); + return PainlessCast.boxTo(double.class, short.class, true, short.class); } else if (expected == Character.class && explicit && internal) { - return Cast.boxTo(double.class, char.class, true, char.class); + return PainlessCast.boxTo(double.class, char.class, true, char.class); } else if (expected == Integer.class && explicit && internal) { - return Cast.boxTo(double.class, int.class, true, int.class); + return PainlessCast.boxTo(double.class, int.class, true, int.class); } else if (expected == Long.class && explicit && internal) { - return Cast.boxTo(double.class, long.class, true, long.class); + return PainlessCast.boxTo(double.class, long.class, true, long.class); } else if (expected == Float.class && explicit && internal) { - return Cast.boxTo(double.class, float.class, true, float.class); + return PainlessCast.boxTo(double.class, float.class, true, float.class); } else if (expected == Double.class && internal) { - return Cast.boxTo(double.class, double.class, explicit, double.class); + return PainlessCast.boxTo(double.class, double.class, explicit, double.class); } } else if (actual == Boolean.class) { if (expected == boolean.class && internal) { - return Cast.unboxFrom(boolean.class, boolean.class, explicit, boolean.class); + return PainlessCast.unboxFrom(boolean.class, boolean.class, explicit, boolean.class); } } else if (actual == Byte.class) { if (expected == byte.class && internal) { - return Cast.unboxFrom(byte.class, byte.class, explicit, byte.class); + return PainlessCast.unboxFrom(byte.class, byte.class, explicit, byte.class); } else if (expected == short.class && internal) { - return Cast.unboxFrom(byte.class, short.class, explicit, byte.class); + return PainlessCast.unboxFrom(byte.class, short.class, explicit, byte.class); } else if (expected == char.class && explicit && internal) { - return Cast.unboxFrom(byte.class, char.class, true, byte.class); + return PainlessCast.unboxFrom(byte.class, char.class, true, byte.class); } else if (expected == int.class && internal) { - return Cast.unboxFrom(byte.class, int.class, explicit, byte.class); + return PainlessCast.unboxFrom(byte.class, int.class, explicit, byte.class); } else if (expected == long.class && internal) { - return Cast.unboxFrom(byte.class, long.class, explicit, byte.class); + return PainlessCast.unboxFrom(byte.class, long.class, explicit, byte.class); } else if (expected == float.class && internal) { - return Cast.unboxFrom(byte.class, float.class, explicit, byte.class); + return PainlessCast.unboxFrom(byte.class, float.class, explicit, byte.class); } else if (expected == double.class && internal) { - return Cast.unboxFrom(byte.class, double.class, explicit, byte.class); + return PainlessCast.unboxFrom(byte.class, double.class, explicit, byte.class); } } else if (actual == Short.class) { if (expected == byte.class && explicit && internal) { - return Cast.unboxFrom(short.class, byte.class, true, short.class); + return PainlessCast.unboxFrom(short.class, byte.class, true, short.class); } else if (expected == short.class && internal) { - return Cast.unboxFrom(short.class, short.class, explicit, short.class); + return PainlessCast.unboxFrom(short.class, short.class, explicit, short.class); } else if (expected == char.class && explicit && internal) { - return Cast.unboxFrom(short.class, char.class, true, short.class); + return PainlessCast.unboxFrom(short.class, char.class, true, short.class); } else if (expected == int.class && internal) { - return Cast.unboxFrom(short.class, int.class, explicit, short.class); + return PainlessCast.unboxFrom(short.class, int.class, explicit, short.class); } else if (expected == long.class && internal) { - return Cast.unboxFrom(short.class, long.class, explicit, short.class); + return PainlessCast.unboxFrom(short.class, long.class, explicit, short.class); } else if (expected == float.class && internal) { - return Cast.unboxFrom(short.class, float.class, explicit, short.class); + return PainlessCast.unboxFrom(short.class, float.class, explicit, short.class); } else if (expected == double.class && internal) { - return Cast.unboxFrom(short.class, double.class, explicit, short.class); + return PainlessCast.unboxFrom(short.class, double.class, explicit, short.class); } } else if (actual == Character.class) { if (expected == byte.class && explicit && internal) { - return Cast.unboxFrom(char.class, byte.class, true, char.class); + return PainlessCast.unboxFrom(char.class, byte.class, true, char.class); } else if (expected == short.class && explicit && internal) { - return Cast.unboxFrom(char.class, short.class, true, char.class); + return PainlessCast.unboxFrom(char.class, short.class, true, char.class); } else if (expected == char.class && internal) { - return Cast.unboxFrom(char.class, char.class, explicit, char.class); + return PainlessCast.unboxFrom(char.class, char.class, explicit, char.class); } else if (expected == int.class && internal) { - return Cast.unboxFrom(char.class, int.class, explicit, char.class); + return PainlessCast.unboxFrom(char.class, int.class, explicit, char.class); } else if (expected == long.class && internal) { - return Cast.unboxFrom(char.class, long.class, explicit, char.class); + return PainlessCast.unboxFrom(char.class, long.class, explicit, char.class); } else if (expected == float.class && internal) { - return Cast.unboxFrom(char.class, float.class, explicit, char.class); + return PainlessCast.unboxFrom(char.class, float.class, explicit, char.class); } else if (expected == double.class && internal) { - return Cast.unboxFrom(char.class, double.class, explicit, char.class); + return PainlessCast.unboxFrom(char.class, double.class, explicit, char.class); } } else if (actual == Integer.class) { if (expected == byte.class && explicit && internal) { - return Cast.unboxFrom(int.class, byte.class, true, int.class); + return PainlessCast.unboxFrom(int.class, byte.class, true, int.class); } else if (expected == short.class && explicit && internal) { - return Cast.unboxFrom(int.class, short.class, true, int.class); + return PainlessCast.unboxFrom(int.class, short.class, true, int.class); } else if (expected == char.class && explicit && internal) { - return Cast.unboxFrom(int.class, char.class, true, int.class); + return PainlessCast.unboxFrom(int.class, char.class, true, int.class); } else if (expected == int.class && internal) { - return Cast.unboxFrom(int.class, int.class, explicit, int.class); + return PainlessCast.unboxFrom(int.class, int.class, explicit, int.class); } else if (expected == long.class && internal) { - return Cast.unboxFrom(int.class, long.class, explicit, int.class); + return PainlessCast.unboxFrom(int.class, long.class, explicit, int.class); } else if (expected == float.class && internal) { - return Cast.unboxFrom(int.class, float.class, explicit, int.class); + return PainlessCast.unboxFrom(int.class, float.class, explicit, int.class); } else if (expected == double.class && internal) { - return Cast.unboxFrom(int.class, double.class, explicit, int.class); + return PainlessCast.unboxFrom(int.class, double.class, explicit, int.class); } } else if (actual == Long.class) { if (expected == byte.class && explicit && internal) { - return Cast.unboxFrom(long.class, byte.class, true, long.class); + return PainlessCast.unboxFrom(long.class, byte.class, true, long.class); } else if (expected == short.class && explicit && internal) { - return Cast.unboxFrom(long.class, short.class, true, long.class); + return PainlessCast.unboxFrom(long.class, short.class, true, long.class); } else if (expected == char.class && explicit && internal) { - return Cast.unboxFrom(long.class, char.class, true, long.class); + return PainlessCast.unboxFrom(long.class, char.class, true, long.class); } else if (expected == int.class && explicit && internal) { - return Cast.unboxFrom(long.class, int.class, true, long.class); + return PainlessCast.unboxFrom(long.class, int.class, true, long.class); } else if (expected == long.class && internal) { - return Cast.unboxFrom(long.class, long.class, explicit, long.class); + return PainlessCast.unboxFrom(long.class, long.class, explicit, long.class); } else if (expected == float.class && internal) { - return Cast.unboxFrom(long.class, float.class, explicit, long.class); + return PainlessCast.unboxFrom(long.class, float.class, explicit, long.class); } else if (expected == double.class && internal) { - return Cast.unboxFrom(long.class, double.class, explicit, long.class); + return PainlessCast.unboxFrom(long.class, double.class, explicit, long.class); } } else if (actual == Float.class) { if (expected == byte.class && explicit && internal) { - return Cast.unboxFrom(float.class, byte.class, true, float.class); + return PainlessCast.unboxFrom(float.class, byte.class, true, float.class); } else if (expected == short.class && explicit && internal) { - return Cast.unboxFrom(float.class, short.class, true, float.class); + return PainlessCast.unboxFrom(float.class, short.class, true, float.class); } else if (expected == char.class && explicit && internal) { - return Cast.unboxFrom(float.class, char.class, true, float.class); + return PainlessCast.unboxFrom(float.class, char.class, true, float.class); } else if (expected == int.class && explicit && internal) { - return Cast.unboxFrom(float.class, int.class, true, float.class); + return PainlessCast.unboxFrom(float.class, int.class, true, float.class); } else if (expected == long.class && explicit && internal) { - return Cast.unboxFrom(float.class, long.class, true, float.class); + return PainlessCast.unboxFrom(float.class, long.class, true, float.class); } else if (expected == float.class && internal) { - return Cast.unboxFrom(float.class, float.class, explicit, float.class); + return PainlessCast.unboxFrom(float.class, float.class, explicit, float.class); } else if (expected == double.class && internal) { - return Cast.unboxFrom(float.class, double.class, explicit, float.class); + return PainlessCast.unboxFrom(float.class, double.class, explicit, float.class); } } else if (actual == Double.class) { if (expected == byte.class && explicit && internal) { - return Cast.unboxFrom(double.class, byte.class, true, double.class); + return PainlessCast.unboxFrom(double.class, byte.class, true, double.class); } else if (expected == short.class && explicit && internal) { - return Cast.unboxFrom(double.class, short.class, true, double.class); + return PainlessCast.unboxFrom(double.class, short.class, true, double.class); } else if (expected == char.class && explicit && internal) { - return Cast.unboxFrom(double.class, char.class, true, double.class); + return PainlessCast.unboxFrom(double.class, char.class, true, double.class); } else if (expected == int.class && explicit && internal) { - return Cast.unboxFrom(double.class, int.class, true, double.class); + return PainlessCast.unboxFrom(double.class, int.class, true, double.class); } else if (expected == long.class && explicit && internal) { - return Cast.unboxFrom(double.class, long.class, true, double.class); + return PainlessCast.unboxFrom(double.class, long.class, true, double.class); } else if (expected == float.class && explicit && internal) { - return Cast.unboxFrom(double.class, float.class, true, double.class); + return PainlessCast.unboxFrom(double.class, float.class, true, double.class); } else if (expected == double.class && internal) { - return Cast.unboxFrom(double.class, double.class, explicit, double.class); + return PainlessCast.unboxFrom(double.class, double.class, explicit, double.class); } } @@ -462,14 +463,14 @@ public static Cast getLegalCast(Location location, Class actual, Class exp (actual != void.class && expected == def.class) || expected.isAssignableFrom(actual) || (actual.isAssignableFrom(expected) && explicit)) { - return Cast.standard(actual, expected, explicit); + return PainlessCast.standard(actual, expected, explicit); } else { throw location.createError(new ClassCastException( - "Cannot cast from [" + Definition.ClassToName(actual) + "] to [" + Definition.ClassToName(expected) + "].")); + "Cannot cast from [" + PainlessLookup.ClassToName(actual) + "] to [" + PainlessLookup.ClassToName(expected) + "].")); } } - public static Object constCast(Location location, Object constant, Cast cast) { + public static Object constCast(Location location, Object constant, PainlessCast cast) { Class fsort = cast.from; Class tsort = cast.to; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java index 8102016828c30..03345fcfff35a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Compiler.java @@ -21,6 +21,7 @@ import org.elasticsearch.bootstrap.BootstrapInfo; import org.elasticsearch.painless.antlr.Walker; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.node.SSource; import org.elasticsearch.painless.spi.Whitelist; import org.objectweb.asm.util.Printer; @@ -70,26 +71,26 @@ final class Compiler { */ static final class Loader extends SecureClassLoader { private final AtomicInteger lambdaCounter = new AtomicInteger(0); - private final Definition definition; + private final PainlessLookup painlessLookup; /** * @param parent The parent ClassLoader. */ - Loader(ClassLoader parent, Definition definition) { + Loader(ClassLoader parent, PainlessLookup painlessLookup) { super(parent); - this.definition = definition; + this.painlessLookup = painlessLookup; } /** * Will check to see if the {@link Class} has already been loaded when - * the {@link Definition} was initially created. Allows for {@link Whitelist}ed + * the {@link PainlessLookup} was initially created. Allows for {@link Whitelist}ed * classes to be loaded from other modules/plugins without a direct relationship * to the module's/plugin's {@link ClassLoader}. */ @Override public Class findClass(String name) throws ClassNotFoundException { - Class found = definition.getClassFromBinaryName(name); + Class found = painlessLookup.getClassFromBinaryName(name); return found != null ? found : super.findClass(name); } @@ -135,10 +136,10 @@ int newLambdaIdentifier() { /** * Return a new {@link Loader} for a script using the - * {@link Compiler}'s specified {@link Definition}. + * {@link Compiler}'s specified {@link PainlessLookup}. */ public Loader createLoader(ClassLoader parent) { - return new Loader(parent, definition); + return new Loader(parent, painlessLookup); } /** @@ -149,16 +150,16 @@ public Loader createLoader(ClassLoader parent) { /** * The whitelist the script will use. */ - private final Definition definition; + private final PainlessLookup painlessLookup; /** * Standard constructor. * @param base The class/interface the script is guaranteed to derive/implement. - * @param definition The whitelist the script will use. + * @param painlessLookup The whitelist the script will use. */ - Compiler(Class base, Definition definition) { + Compiler(Class base, PainlessLookup painlessLookup) { this.base = base; - this.definition = definition; + this.painlessLookup = painlessLookup; } /** @@ -176,10 +177,10 @@ Constructor compile(Loader loader, MainMethodReserved reserved, String name, " plugin if a script longer than this length is a requirement."); } - ScriptClassInfo scriptClassInfo = new ScriptClassInfo(definition, base); - SSource root = Walker.buildPainlessTree(scriptClassInfo, reserved, name, source, settings, definition, + ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, base); + SSource root = Walker.buildPainlessTree(scriptClassInfo, reserved, name, source, settings, painlessLookup, null); - root.analyze(definition); + root.analyze(painlessLookup); root.write(); try { @@ -187,7 +188,7 @@ Constructor compile(Loader loader, MainMethodReserved reserved, String name, clazz.getField("$NAME").set(null, name); clazz.getField("$SOURCE").set(null, source); clazz.getField("$STATEMENTS").set(null, root.getStatements()); - clazz.getField("$DEFINITION").set(null, definition); + clazz.getField("$DEFINITION").set(null, painlessLookup); return clazz.getConstructors()[0]; } catch (Exception exception) { // Catch everything to let the user know this is something caused internally. @@ -208,10 +209,10 @@ byte[] compile(String name, String source, CompilerSettings settings, Printer de " plugin if a script longer than this length is a requirement."); } - ScriptClassInfo scriptClassInfo = new ScriptClassInfo(definition, base); - SSource root = Walker.buildPainlessTree(scriptClassInfo, new MainMethodReserved(), name, source, settings, definition, + ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, base); + SSource root = Walker.buildPainlessTree(scriptClassInfo, new MainMethodReserved(), name, source, settings, painlessLookup, debugStream); - root.analyze(definition); + root.analyze(painlessLookup); root.write(); return root.getBytes(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 8694ff7903859..f006b57f46bad 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -19,13 +19,14 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodHandles.Lookup; import java.lang.invoke.MethodType; import java.util.BitSet; import java.util.Collections; @@ -60,14 +61,15 @@ public final class Def { */ @SuppressWarnings("unused") // getArrayLength() methods are are actually used, javac just does not know :) private static final class ArrayLengthHelper { - private static final Lookup PRIV_LOOKUP = MethodHandles.lookup(); + private static final MethodHandles.Lookup PRIVATE_METHOD_HANDLES_LOOKUP = MethodHandles.lookup(); private static final Map,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap( Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class, char[].class, float[].class, double[].class, Object[].class) .collect(Collectors.toMap(Function.identity(), type -> { try { - return PRIV_LOOKUP.findStatic(PRIV_LOOKUP.lookupClass(), "getArrayLength", MethodType.methodType(int.class, type)); + return PRIVATE_METHOD_HANDLES_LOOKUP.findStatic( + PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(), "getArrayLength", MethodType.methodType(int.class, type)); } catch (ReflectiveOperationException e) { throw new AssertionError(e); } @@ -116,17 +118,17 @@ private ArrayLengthHelper() {} static final MethodHandle JAVA9_ARRAY_LENGTH_MH_FACTORY; static { - final Lookup lookup = MethodHandles.publicLookup(); + final MethodHandles.Lookup methodHandlesLookup = MethodHandles.publicLookup(); try { - MAP_GET = lookup.findVirtual(Map.class , "get", MethodType.methodType(Object.class, Object.class)); - MAP_PUT = lookup.findVirtual(Map.class , "put", MethodType.methodType(Object.class, Object.class, Object.class)); - LIST_GET = lookup.findVirtual(List.class, "get", MethodType.methodType(Object.class, int.class)); - LIST_SET = lookup.findVirtual(List.class, "set", MethodType.methodType(Object.class, int.class, Object.class)); - ITERATOR = lookup.findVirtual(Iterable.class, "iterator", MethodType.methodType(Iterator.class)); - MAP_INDEX_NORMALIZE = lookup.findStatic(Def.class, "mapIndexNormalize", + MAP_GET = methodHandlesLookup.findVirtual(Map.class , "get", MethodType.methodType(Object.class, Object.class)); + MAP_PUT = methodHandlesLookup.findVirtual(Map.class , "put", MethodType.methodType(Object.class, Object.class, Object.class)); + LIST_GET = methodHandlesLookup.findVirtual(List.class, "get", MethodType.methodType(Object.class, int.class)); + LIST_SET = methodHandlesLookup.findVirtual(List.class, "set", MethodType.methodType(Object.class, int.class, Object.class)); + ITERATOR = methodHandlesLookup.findVirtual(Iterable.class, "iterator", MethodType.methodType(Iterator.class)); + MAP_INDEX_NORMALIZE = methodHandlesLookup.findStatic(Def.class, "mapIndexNormalize", MethodType.methodType(Object.class, Map.class, Object.class)); - LIST_INDEX_NORMALIZE = lookup.findStatic(Def.class, "listIndexNormalize", + LIST_INDEX_NORMALIZE = methodHandlesLookup.findStatic(Def.class, "listIndexNormalize", MethodType.methodType(int.class, List.class, int.class)); } catch (final ReflectiveOperationException roe) { throw new AssertionError(roe); @@ -136,7 +138,7 @@ private ArrayLengthHelper() {} // https://bugs.openjdk.java.net/browse/JDK-8156915 MethodHandle arrayLengthMHFactory; try { - arrayLengthMHFactory = lookup.findStatic(MethodHandles.class, "arrayLength", + arrayLengthMHFactory = methodHandlesLookup.findStatic(MethodHandles.class, "arrayLength", MethodType.methodType(MethodHandle.class, Class.class)); } catch (final ReflectiveOperationException roe) { arrayLengthMHFactory = null; @@ -174,31 +176,31 @@ static MethodHandle arrayLengthGetter(Class arrayType) { * until it finds a matching whitelisted method. If one is not found, it throws an exception. * Otherwise it returns the matching method. *

      - * @params definition the whitelist + * @params painlessLookup the whitelist * @param receiverClass Class of the object to invoke the method on. * @param name Name of the method. * @param arity arity of method * @return matching method to invoke. never returns null. * @throws IllegalArgumentException if no matching whitelisted method was found. */ - static Method lookupMethodInternal(Definition definition, Class receiverClass, String name, int arity) { - Definition.MethodKey key = new Definition.MethodKey(name, arity); + static PainlessMethod lookupMethodInternal(PainlessLookup painlessLookup, Class receiverClass, String name, int arity) { + PainlessMethodKey key = new PainlessMethodKey(name, arity); // check whitelist for matching method for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - Struct struct = definition.getPainlessStructFromJavaClass(clazz); + PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); if (struct != null) { - Method method = struct.methods.get(key); + PainlessMethod method = struct.methods.get(key); if (method != null) { return method; } } for (Class iface : clazz.getInterfaces()) { - struct = definition.getPainlessStructFromJavaClass(iface); + struct = painlessLookup.getPainlessStructFromJavaClass(iface); if (struct != null) { - Method method = struct.methods.get(key); + PainlessMethod method = struct.methods.get(key); if (method != null) { return method; } @@ -220,8 +222,8 @@ static Method lookupMethodInternal(Definition definition, Class receiverClass * until it finds a matching whitelisted method. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching method. *

      - * @param definition the whitelist - * @param lookup caller's lookup + * @param painlessLookup the whitelist + * @param methodHandlesLookup caller's lookup * @param callSiteType callsite's type * @param receiverClass Class of the object to invoke the method on. * @param name Name of the method. @@ -230,13 +232,13 @@ static Method lookupMethodInternal(Definition definition, Class receiverClass * @throws IllegalArgumentException if no matching whitelisted method was found. * @throws Throwable if a method reference cannot be converted to an functional interface */ - static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodType callSiteType, - Class receiverClass, String name, Object args[]) throws Throwable { + static MethodHandle lookupMethod(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, MethodType callSiteType, + Class receiverClass, String name, Object args[]) throws Throwable { String recipeString = (String) args[0]; int numArguments = callSiteType.parameterCount(); // simple case: no lambdas if (recipeString.isEmpty()) { - return lookupMethodInternal(definition, receiverClass, name, numArguments - 1).handle; + return lookupMethodInternal(painlessLookup, receiverClass, name, numArguments - 1).handle; } // convert recipe string to a bitset for convenience (the code below should be refactored...) @@ -259,7 +261,7 @@ static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodTyp // lookup the method with the proper arity, then we know everything (e.g. interface types of parameters). // based on these we can finally link any remaining lambdas that were deferred. - Method method = lookupMethodInternal(definition, receiverClass, name, arity); + PainlessMethod method = lookupMethodInternal(painlessLookup, receiverClass, name, arity); MethodHandle handle = method.handle; int replaced = 0; @@ -283,8 +285,8 @@ static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodTyp if (signature.charAt(0) == 'S') { // the implementation is strongly typed, now that we know the interface type, // we have everything. - filter = lookupReferenceInternal(definition, - lookup, + filter = lookupReferenceInternal(painlessLookup, + methodHandlesLookup, interfaceType, type, call, @@ -294,13 +296,13 @@ static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodTyp // this is dynamically based on the receiver type (and cached separately, underneath // this cache). It won't blow up since we never nest here (just references) MethodType nestedType = MethodType.methodType(interfaceType, captures); - CallSite nested = DefBootstrap.bootstrap(definition, - lookup, + CallSite nested = DefBootstrap.bootstrap(painlessLookup, + methodHandlesLookup, call, nestedType, 0, DefBootstrap.REFERENCE, - Definition.ClassToName(interfaceType)); + PainlessLookup.ClassToName(interfaceType)); filter = nested.dynamicInvoker(); } else { throw new AssertionError(); @@ -322,37 +324,37 @@ static MethodHandle lookupMethod(Definition definition, Lookup lookup, MethodTyp * This is just like LambdaMetaFactory, only with a dynamic type. The interface type is known, * so we simply need to lookup the matching implementation method based on receiver type. */ - static MethodHandle lookupReference(Definition definition, Lookup lookup, String interfaceClass, - Class receiverClass, String name) throws Throwable { - Class interfaceType = definition.getJavaClassFromPainlessType(interfaceClass); - Method interfaceMethod = definition.getPainlessStructFromJavaClass(interfaceType).functionalMethod; + static MethodHandle lookupReference(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, String interfaceClass, + Class receiverClass, String name) throws Throwable { + Class interfaceType = painlessLookup.getJavaClassFromPainlessType(interfaceClass); + PainlessMethod interfaceMethod = painlessLookup.getPainlessStructFromJavaClass(interfaceType).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Class [" + interfaceClass + "] is not a functional interface"); } int arity = interfaceMethod.arguments.size(); - Method implMethod = lookupMethodInternal(definition, receiverClass, name, arity); - return lookupReferenceInternal(definition, lookup, interfaceType, implMethod.owner.name, + PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity); + return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, implMethod.owner.name, implMethod.name, receiverClass); } /** Returns a method handle to an implementation of clazz, given method reference signature. */ - private static MethodHandle lookupReferenceInternal(Definition definition, Lookup lookup, - Class clazz, String type, String call, Class... captures) + private static MethodHandle lookupReferenceInternal(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, + Class clazz, String type, String call, Class... captures) throws Throwable { final FunctionRef ref; if ("this".equals(type)) { // user written method - Method interfaceMethod = definition.getPainlessStructFromJavaClass(clazz).functionalMethod; + PainlessMethod interfaceMethod = painlessLookup.getPainlessStructFromJavaClass(clazz).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + Definition.ClassToName(clazz) + "], not a functional interface"); + "to [" + PainlessLookup.ClassToName(clazz) + "], not a functional interface"); } int arity = interfaceMethod.arguments.size() + captures.length; final MethodHandle handle; try { - MethodHandle accessor = lookup.findStaticGetter(lookup.lookupClass(), - getUserFunctionHandleFieldName(call, arity), - MethodHandle.class); + MethodHandle accessor = methodHandlesLookup.findStaticGetter(methodHandlesLookup.lookupClass(), + getUserFunctionHandleFieldName(call, arity), + MethodHandle.class); handle = (MethodHandle)accessor.invokeExact(); } catch (NoSuchFieldException | IllegalAccessException e) { // is it a synthetic method? If we generated the method ourselves, be more helpful. It can only fail @@ -366,10 +368,10 @@ private static MethodHandle lookupReferenceInternal(Definition definition, Looku ref = new FunctionRef(clazz, interfaceMethod, call, handle.type(), captures.length); } else { // whitelist lookup - ref = new FunctionRef(definition, clazz, type, call, captures.length); + ref = new FunctionRef(painlessLookup, clazz, type, call, captures.length); } final CallSite callSite = LambdaBootstrap.lambdaBootstrap( - lookup, + methodHandlesLookup, ref.interfaceMethodName, ref.factoryMethodType, ref.interfaceMethodType, @@ -407,16 +409,16 @@ public static String getUserFunctionHandleFieldName(String name, int arity) { * until it finds a matching whitelisted getter. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching getter. *

      - * @param definition the whitelist + * @param painlessLookup the whitelist * @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. * @return pointer to matching field. never returns null. * @throws IllegalArgumentException if no matching whitelisted field was found. */ - static MethodHandle lookupGetter(Definition definition, Class receiverClass, String name) { + static MethodHandle lookupGetter(PainlessLookup painlessLookup, Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - Struct struct = definition.getPainlessStructFromJavaClass(clazz); + PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -426,7 +428,7 @@ static MethodHandle lookupGetter(Definition definition, Class receiverClass, } for (final Class iface : clazz.getInterfaces()) { - struct = definition.getPainlessStructFromJavaClass(iface); + struct = painlessLookup.getPainlessStructFromJavaClass(iface); if (struct != null) { MethodHandle handle = struct.getters.get(name); @@ -478,16 +480,16 @@ static MethodHandle lookupGetter(Definition definition, Class receiverClass, * until it finds a matching whitelisted setter. If one is not found, it throws an exception. * Otherwise it returns a handle to the matching setter. *

      - * @param definition the whitelist + * @param painlessLookup the whitelist * @param receiverClass Class of the object to retrieve the field from. * @param name Name of the field. * @return pointer to matching field. never returns null. * @throws IllegalArgumentException if no matching whitelisted field was found. */ - static MethodHandle lookupSetter(Definition definition, Class receiverClass, String name) { + static MethodHandle lookupSetter(PainlessLookup painlessLookup, Class receiverClass, String name) { // first try whitelist for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { - Struct struct = definition.getPainlessStructFromJavaClass(clazz); + PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); if (struct != null) { MethodHandle handle = struct.setters.get(name); @@ -497,7 +499,7 @@ static MethodHandle lookupSetter(Definition definition, Class receiverClass, } for (final Class iface : clazz.getInterfaces()) { - struct = definition.getPainlessStructFromJavaClass(iface); + struct = painlessLookup.getPainlessStructFromJavaClass(iface); if (struct != null) { MethodHandle handle = struct.setters.get(name); @@ -592,14 +594,15 @@ static MethodHandle lookupArrayLoad(Class receiverClass) { */ @SuppressWarnings("unused") // iterator() methods are are actually used, javac just does not know :) private static final class ArrayIteratorHelper { - private static final Lookup PRIV_LOOKUP = MethodHandles.lookup(); + private static final MethodHandles.Lookup PRIVATE_METHOD_HANDLES_LOOKUP = MethodHandles.lookup(); private static final Map,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap( Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class, char[].class, float[].class, double[].class, Object[].class) .collect(Collectors.toMap(Function.identity(), type -> { try { - return PRIV_LOOKUP.findStatic(PRIV_LOOKUP.lookupClass(), "iterator", MethodType.methodType(Iterator.class, type)); + return PRIVATE_METHOD_HANDLES_LOOKUP.findStatic( + PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(), "iterator", MethodType.methodType(Iterator.class, type)); } catch (ReflectiveOperationException e) { throw new AssertionError(e); } @@ -860,14 +863,14 @@ public static int listIndexNormalize(final List value, int index) { */ @SuppressWarnings("unused") // normalizeIndex() methods are are actually used, javac just does not know :) private static final class ArrayIndexNormalizeHelper { - private static final Lookup PRIV_LOOKUP = MethodHandles.lookup(); + private static final MethodHandles.Lookup PRIVATE_METHOD_HANDLES_LOOKUP = MethodHandles.lookup(); private static final Map,MethodHandle> ARRAY_TYPE_MH_MAPPING = Collections.unmodifiableMap( Stream.of(boolean[].class, byte[].class, short[].class, int[].class, long[].class, char[].class, float[].class, double[].class, Object[].class) .collect(Collectors.toMap(Function.identity(), type -> { try { - return PRIV_LOOKUP.findStatic(PRIV_LOOKUP.lookupClass(), "normalizeIndex", + return PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(), "normalizeIndex", MethodType.methodType(int.class, type, int.class)); } catch (ReflectiveOperationException e) { throw new AssertionError(e); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java index 9c7c7f631b68d..2fadaf30964a6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefBootstrap.java @@ -20,11 +20,11 @@ package org.elasticsearch.painless; import org.elasticsearch.common.SuppressForbidden; +import org.elasticsearch.painless.lookup.PainlessLookup; import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodHandles.Lookup; import java.lang.invoke.MethodType; import java.lang.invoke.MutableCallSite; import java.lang.invoke.WrongMethodTypeException; @@ -104,20 +104,21 @@ static final class PIC extends MutableCallSite { /** maximum number of types before we go megamorphic */ static final int MAX_DEPTH = 5; - private final Definition definition; - private final Lookup lookup; + private final PainlessLookup painlessLookup; + private final MethodHandles.Lookup methodHandlesLookup; private final String name; private final int flavor; private final Object[] args; int depth; // pkg-protected for testing - PIC(Definition definition, Lookup lookup, String name, MethodType type, int initialDepth, int flavor, Object[] args) { + PIC(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, + String name, MethodType type, int initialDepth, int flavor, Object[] args) { super(type); if (type.parameterType(0) != Object.class) { throw new BootstrapMethodError("The receiver type (1st arg) of invokedynamic descriptor must be Object."); } - this.definition = definition; - this.lookup = lookup; + this.painlessLookup = painlessLookup; + this.methodHandlesLookup = methodHandlesLookup; this.name = name; this.flavor = flavor; this.args = args; @@ -144,11 +145,11 @@ static boolean checkClass(Class clazz, Object receiver) { private MethodHandle lookup(int flavor, String name, Class receiver) throws Throwable { switch(flavor) { case METHOD_CALL: - return Def.lookupMethod(definition, lookup, type(), receiver, name, args); + return Def.lookupMethod(painlessLookup, methodHandlesLookup, type(), receiver, name, args); case LOAD: - return Def.lookupGetter(definition, receiver, name); + return Def.lookupGetter(painlessLookup, receiver, name); case STORE: - return Def.lookupSetter(definition, receiver, name); + return Def.lookupSetter(painlessLookup, receiver, name); case ARRAY_LOAD: return Def.lookupArrayLoad(receiver); case ARRAY_STORE: @@ -156,7 +157,7 @@ private MethodHandle lookup(int flavor, String name, Class receiver) throws T case ITERATOR: return Def.lookupIterator(receiver); case REFERENCE: - return Def.lookupReference(definition, lookup, (String) args[0], receiver, name); + return Def.lookupReference(painlessLookup, methodHandlesLookup, (String) args[0], receiver, name); case INDEX_NORMALIZE: return Def.lookupIndexNormalize(receiver); default: throw new AssertionError(); @@ -216,17 +217,17 @@ Object fallback(final Object[] callArgs) throws Throwable { private static final MethodHandle FALLBACK; private static final MethodHandle MEGAMORPHIC_LOOKUP; static { - final Lookup lookup = MethodHandles.lookup(); - final Lookup publicLookup = MethodHandles.publicLookup(); + final MethodHandles.Lookup methodHandlesLookup = MethodHandles.lookup(); + final MethodHandles.Lookup publicMethodHandlesLookup = MethodHandles.publicLookup(); try { - CHECK_CLASS = lookup.findStatic(lookup.lookupClass(), "checkClass", - MethodType.methodType(boolean.class, Class.class, Object.class)); - FALLBACK = lookup.findVirtual(lookup.lookupClass(), "fallback", + CHECK_CLASS = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), "checkClass", + MethodType.methodType(boolean.class, Class.class, Object.class)); + FALLBACK = methodHandlesLookup.findVirtual(methodHandlesLookup.lookupClass(), "fallback", MethodType.methodType(Object.class, Object[].class)); - MethodHandle mh = publicLookup.findVirtual(ClassValue.class, "get", + MethodHandle mh = publicMethodHandlesLookup.findVirtual(ClassValue.class, "get", MethodType.methodType(Object.class, Class.class)); mh = MethodHandles.filterArguments(mh, 1, - publicLookup.findVirtual(Object.class, "getClass", MethodType.methodType(Class.class))); + publicMethodHandlesLookup.findVirtual(Object.class, "getClass", MethodType.methodType(Class.class))); MEGAMORPHIC_LOOKUP = mh.asType(mh.type().changeReturnType(MethodHandle.class)); } catch (ReflectiveOperationException e) { throw new AssertionError(e); @@ -402,16 +403,16 @@ static boolean checkBoth(Class left, Class right, Object leftObject, Objec private static final MethodHandle CHECK_BOTH; private static final MethodHandle FALLBACK; static { - final Lookup lookup = MethodHandles.lookup(); + final MethodHandles.Lookup methodHandlesLookup = MethodHandles.lookup(); try { - CHECK_LHS = lookup.findStatic(lookup.lookupClass(), "checkLHS", - MethodType.methodType(boolean.class, Class.class, Object.class)); - CHECK_RHS = lookup.findStatic(lookup.lookupClass(), "checkRHS", - MethodType.methodType(boolean.class, Class.class, Class.class, Object.class, Object.class)); - CHECK_BOTH = lookup.findStatic(lookup.lookupClass(), "checkBoth", - MethodType.methodType(boolean.class, Class.class, Class.class, Object.class, Object.class)); - FALLBACK = lookup.findVirtual(lookup.lookupClass(), "fallback", - MethodType.methodType(Object.class, Object[].class)); + CHECK_LHS = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), "checkLHS", + MethodType.methodType(boolean.class, Class.class, Object.class)); + CHECK_RHS = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), "checkRHS", + MethodType.methodType(boolean.class, Class.class, Class.class, Object.class, Object.class)); + CHECK_BOTH = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), "checkBoth", + MethodType.methodType(boolean.class, Class.class, Class.class, Object.class, Object.class)); + FALLBACK = methodHandlesLookup.findVirtual(methodHandlesLookup.lookupClass(), "fallback", + MethodType.methodType(Object.class, Object[].class)); } catch (ReflectiveOperationException e) { throw new AssertionError(e); } @@ -427,12 +428,12 @@ static boolean checkBoth(Class left, Class right, Object leftObject, Objec *

    • {@code flavor}: type of dynamic call it is (and which part of whitelist to look at). *
    • {@code args}: flavor-specific args. *
    - * And we take the {@link Definition} used to compile the script for whitelist checking. + * And we take the {@link PainlessLookup} used to compile the script for whitelist checking. *

    * see https://docs.oracle.com/javase/specs/jvms/se7/html/jvms-6.html#jvms-6.5.invokedynamic */ - public static CallSite bootstrap(Definition definition, Lookup lookup, String name, MethodType type, int initialDepth, int flavor, - Object... args) { + public static CallSite bootstrap(PainlessLookup painlessLookup, MethodHandles.Lookup methodHandlesLookup, String name, + MethodType type, int initialDepth, int flavor, Object... args) { // validate arguments switch(flavor) { // "function-call" like things get a polymorphic cache @@ -451,7 +452,7 @@ public static CallSite bootstrap(Definition definition, Lookup lookup, String na if (args.length != numLambdas + 1) { throw new BootstrapMethodError("Illegal number of parameters: expected " + numLambdas + " references"); } - return new PIC(definition, lookup, name, type, initialDepth, flavor, args); + return new PIC(painlessLookup, methodHandlesLookup, name, type, initialDepth, flavor, args); case LOAD: case STORE: case ARRAY_LOAD: @@ -461,7 +462,7 @@ public static CallSite bootstrap(Definition definition, Lookup lookup, String na if (args.length > 0) { throw new BootstrapMethodError("Illegal static bootstrap parameters for flavor: " + flavor); } - return new PIC(definition, lookup, name, type, initialDepth, flavor, args); + return new PIC(painlessLookup, methodHandlesLookup, name, type, initialDepth, flavor, args); case REFERENCE: if (args.length != 1) { throw new BootstrapMethodError("Invalid number of parameters for reference call"); @@ -469,7 +470,7 @@ public static CallSite bootstrap(Definition definition, Lookup lookup, String na if (args[0] instanceof String == false) { throw new BootstrapMethodError("Illegal parameter for reference call: " + args[0]); } - return new PIC(definition, lookup, name, type, initialDepth, flavor, args); + return new PIC(painlessLookup, methodHandlesLookup, name, type, initialDepth, flavor, args); // operators get monomorphic cache, with a generic impl for a fallback case UNARY_OPERATOR: diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefMath.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefMath.java index f903c0571b2bd..01d627e3962a2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefMath.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/DefMath.java @@ -21,7 +21,6 @@ import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodHandles.Lookup; import java.lang.invoke.MethodType; import java.util.Collections; import java.util.HashMap; @@ -1070,7 +1069,7 @@ private static Class promote(Class a, Class b) { } } - private static final Lookup PRIV_LOOKUP = MethodHandles.lookup(); + private static final MethodHandles.Lookup PRIVATE_METHOD_HANDLES_LOOKUP = MethodHandles.lookup(); private static final Map,Map> TYPE_OP_MAPPING = Collections.unmodifiableMap( Stream.of(boolean.class, int.class, long.class, float.class, double.class, Object.class) @@ -1081,26 +1080,26 @@ private static Class promote(Class a, Class b) { MethodType binary = MethodType.methodType(type, type, type); MethodType comparison = MethodType.methodType(boolean.class, type, type); MethodType shift = MethodType.methodType(type, type, long.class); - Class clazz = PRIV_LOOKUP.lookupClass(); - map.put("not", PRIV_LOOKUP.findStatic(clazz, "not", unary)); - map.put("neg", PRIV_LOOKUP.findStatic(clazz, "neg", unary)); - map.put("plus", PRIV_LOOKUP.findStatic(clazz, "plus", unary)); - map.put("mul", PRIV_LOOKUP.findStatic(clazz, "mul", binary)); - map.put("div", PRIV_LOOKUP.findStatic(clazz, "div", binary)); - map.put("rem", PRIV_LOOKUP.findStatic(clazz, "rem", binary)); - map.put("add", PRIV_LOOKUP.findStatic(clazz, "add", binary)); - map.put("sub", PRIV_LOOKUP.findStatic(clazz, "sub", binary)); - map.put("and", PRIV_LOOKUP.findStatic(clazz, "and", binary)); - map.put("or", PRIV_LOOKUP.findStatic(clazz, "or", binary)); - map.put("xor", PRIV_LOOKUP.findStatic(clazz, "xor", binary)); - map.put("eq", PRIV_LOOKUP.findStatic(clazz, "eq", comparison)); - map.put("lt", PRIV_LOOKUP.findStatic(clazz, "lt", comparison)); - map.put("lte", PRIV_LOOKUP.findStatic(clazz, "lte", comparison)); - map.put("gt", PRIV_LOOKUP.findStatic(clazz, "gt", comparison)); - map.put("gte", PRIV_LOOKUP.findStatic(clazz, "gte", comparison)); - map.put("lsh", PRIV_LOOKUP.findStatic(clazz, "lsh", shift)); - map.put("rsh", PRIV_LOOKUP.findStatic(clazz, "rsh", shift)); - map.put("ush", PRIV_LOOKUP.findStatic(clazz, "ush", shift)); + Class clazz = PRIVATE_METHOD_HANDLES_LOOKUP.lookupClass(); + map.put("not", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "not", unary)); + map.put("neg", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "neg", unary)); + map.put("plus", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "plus", unary)); + map.put("mul", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "mul", binary)); + map.put("div", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "div", binary)); + map.put("rem", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "rem", binary)); + map.put("add", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "add", binary)); + map.put("sub", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "sub", binary)); + map.put("and", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "and", binary)); + map.put("or", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "or", binary)); + map.put("xor", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "xor", binary)); + map.put("eq", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "eq", comparison)); + map.put("lt", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "lt", comparison)); + map.put("lte", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "lte", comparison)); + map.put("gt", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "gt", comparison)); + map.put("gte", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "gte", comparison)); + map.put("lsh", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "lsh", shift)); + map.put("rsh", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "rsh", shift)); + map.put("ush", PRIVATE_METHOD_HANDLES_LOOKUP.findStatic(clazz, "ush", shift)); return map; } catch (ReflectiveOperationException e) { throw new AssertionError(e); @@ -1188,14 +1187,14 @@ static Number getNumber(Object o) { private static final MethodHandle DYNAMIC_CAST; private static final MethodHandle DYNAMIC_RECEIVER_CAST; static { - final Lookup lookup = MethodHandles.lookup(); + final MethodHandles.Lookup methodHandlesLookup = MethodHandles.lookup(); try { - DYNAMIC_CAST = lookup.findStatic(lookup.lookupClass(), - "dynamicCast", - MethodType.methodType(Object.class, Class.class, Object.class)); - DYNAMIC_RECEIVER_CAST = lookup.findStatic(lookup.lookupClass(), - "dynamicReceiverCast", - MethodType.methodType(Object.class, Object.class, Object.class)); + DYNAMIC_CAST = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), + "dynamicCast", + MethodType.methodType(Object.class, Class.class, Object.class)); + DYNAMIC_RECEIVER_CAST = methodHandlesLookup.findStatic(methodHandlesLookup.lookupClass(), + "dynamicReceiverCast", + MethodType.methodType(Object.class, Object.class, Object.class)); } catch (ReflectiveOperationException e) { throw new AssertionError(e); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 1b438965538ce..6ab7292c7f65b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -19,7 +19,10 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.lang.invoke.MethodType; @@ -55,9 +58,9 @@ public class FunctionRef { public final MethodType delegateMethodType; /** interface method */ - public final Method interfaceMethod; + public final PainlessMethod interfaceMethod; /** delegate method */ - public final Method delegateMethod; + public final PainlessMethod delegateMethod; /** factory method type descriptor */ public final String factoryDescriptor; @@ -71,15 +74,15 @@ public class FunctionRef { /** * Creates a new FunctionRef, which will resolve {@code type::call} from the whitelist. - * @param definition the whitelist against which this script is being compiled + * @param painlessLookup the whitelist against which this script is being compiled * @param expected functional interface type to implement. * @param type the left hand side of a method reference expression * @param call the right hand side of a method reference expression * @param numCaptures number of captured arguments */ - public FunctionRef(Definition definition, Class expected, String type, String call, int numCaptures) { - this(expected, definition.getPainlessStructFromJavaClass(expected).functionalMethod, - lookup(definition, expected, type, call, numCaptures > 0), numCaptures); + public FunctionRef(PainlessLookup painlessLookup, Class expected, String type, String call, int numCaptures) { + this(expected, painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod, + lookup(painlessLookup, expected, type, call, numCaptures > 0), numCaptures); } /** @@ -89,7 +92,7 @@ public FunctionRef(Definition definition, Class expected, String type, String * @param delegateMethod implementation method * @param numCaptures number of captured arguments */ - public FunctionRef(Class expected, Method interfaceMethod, Method delegateMethod, int numCaptures) { + public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessMethod delegateMethod, int numCaptures) { MethodType delegateMethodType = delegateMethod.getMethodType(); interfaceMethodName = interfaceMethod.name; @@ -135,7 +138,7 @@ public FunctionRef(Class expected, Method interfaceMethod, Method delegateMet * It is for runtime use only. */ public FunctionRef(Class expected, - Method interfaceMethod, String delegateMethodName, MethodType delegateMethodType, int numCaptures) { + PainlessMethod interfaceMethod, String delegateMethodName, MethodType delegateMethodType, int numCaptures) { interfaceMethodName = interfaceMethod.name; factoryMethodType = MethodType.methodType(expected, delegateMethodType.dropParameterTypes(numCaptures, delegateMethodType.parameterCount())); @@ -158,25 +161,25 @@ public FunctionRef(Class expected, /** * Looks up {@code type::call} from the whitelist, and returns a matching method. */ - private static Definition.Method lookup(Definition definition, Class expected, - String type, String call, boolean receiverCaptured) { + private static PainlessMethod lookup(PainlessLookup painlessLookup, Class expected, + String type, String call, boolean receiverCaptured) { // check its really a functional interface // for e.g. Comparable - Method method = definition.getPainlessStructFromJavaClass(expected).functionalMethod; + PainlessMethod method = painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod; if (method == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + Definition.ClassToName(expected) + "], not a functional interface"); + "to [" + PainlessLookup.ClassToName(expected) + "], not a functional interface"); } // lookup requested method - Definition.Struct struct = definition.getPainlessStructFromJavaClass(definition.getJavaClassFromPainlessType(type)); - final Definition.Method impl; + PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(painlessLookup.getJavaClassFromPainlessType(type)); + final PainlessMethod impl; // ctor ref if ("new".equals(call)) { - impl = struct.constructors.get(new Definition.MethodKey("", method.arguments.size())); + impl = struct.constructors.get(new PainlessMethodKey("", method.arguments.size())); } else { // look for a static impl first - Definition.Method staticImpl = struct.staticMethods.get(new Definition.MethodKey(call, method.arguments.size())); + PainlessMethod staticImpl = struct.staticMethods.get(new PainlessMethodKey(call, method.arguments.size())); if (staticImpl == null) { // otherwise a virtual impl final int arity; @@ -187,7 +190,7 @@ private static Definition.Method lookup(Definition definition, Class expected // receiver passed arity = method.arguments.size() - 1; } - impl = struct.methods.get(new Definition.MethodKey(call, arity)); + impl = struct.methods.get(new PainlessMethodKey(call, arity)); } else { impl = staticImpl; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index 2498396c891d1..bab1b7942d657 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -19,8 +19,9 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.ScriptClassInfo.MethodArgument; import java.util.Arrays; @@ -59,7 +60,7 @@ public static Locals newLocalScope(Locals currentScope) { */ public static Locals newLambdaScope(Locals programScope, Class returnType, List parameters, int captureCount, int maxLoopCounter) { - Locals locals = new Locals(programScope, programScope.definition, returnType, KEYWORDS); + Locals locals = new Locals(programScope, programScope.painlessLookup, returnType, KEYWORDS); for (int i = 0; i < parameters.size(); i++) { Parameter parameter = parameters.get(i); // TODO: allow non-captures to be r/w: @@ -78,7 +79,7 @@ public static Locals newLambdaScope(Locals programScope, Class returnType, Li /** Creates a new function scope inside the current scope */ public static Locals newFunctionScope(Locals programScope, Class returnType, List parameters, int maxLoopCounter) { - Locals locals = new Locals(programScope, programScope.definition, returnType, KEYWORDS); + Locals locals = new Locals(programScope, programScope.painlessLookup, returnType, KEYWORDS); for (Parameter parameter : parameters) { locals.addVariable(parameter.location, parameter.clazz, parameter.name, false); } @@ -92,7 +93,7 @@ public static Locals newFunctionScope(Locals programScope, Class returnType, /** Creates a new main method scope */ public static Locals newMainMethodScope(ScriptClassInfo scriptClassInfo, Locals programScope, int maxLoopCounter) { Locals locals = new Locals( - programScope, programScope.definition, scriptClassInfo.getExecuteMethodReturnType(), KEYWORDS); + programScope, programScope.painlessLookup, scriptClassInfo.getExecuteMethodReturnType(), KEYWORDS); // This reference. Internal use only. locals.defineVariable(null, Object.class, THIS, true); @@ -109,9 +110,9 @@ public static Locals newMainMethodScope(ScriptClassInfo scriptClassInfo, Locals } /** Creates a new program scope: the list of methods. It is the parent for all methods */ - public static Locals newProgramScope(Definition definition, Collection methods) { - Locals locals = new Locals(null, definition, null, null); - for (Method method : methods) { + public static Locals newProgramScope(PainlessLookup painlessLookup, Collection methods) { + Locals locals = new Locals(null, painlessLookup, null, null); + for (PainlessMethod method : methods) { locals.addMethod(method); } return locals; @@ -142,8 +143,8 @@ public Variable getVariable(Location location, String name) { } /** Looks up a method. Returns null if the method does not exist. */ - public Method getMethod(MethodKey key) { - Method method = lookupMethod(key); + public PainlessMethod getMethod(PainlessMethodKey key) { + PainlessMethod method = lookupMethod(key); if (method != null) { return method; } @@ -179,14 +180,14 @@ public Locals getProgramScope() { } /** Whitelist against which this script is being compiled. */ - public Definition getDefinition() { - return definition; + public PainlessLookup getPainlessLookup() { + return painlessLookup; } ///// private impl /** Whitelist against which this script is being compiled. */ - private final Definition definition; + private final PainlessLookup painlessLookup; // parent scope private final Locals parent; // return type of this scope @@ -198,21 +199,21 @@ public Definition getDefinition() { // variable name -> variable private Map variables; // method name+arity -> methods - private Map methods; + private Map methods; /** * Create a new Locals */ private Locals(Locals parent) { - this(parent, parent.definition, parent.returnType, parent.keywords); + this(parent, parent.painlessLookup, parent.returnType, parent.keywords); } /** * Create a new Locals with specified return type */ - private Locals(Locals parent, Definition definition, Class returnType, Set keywords) { + private Locals(Locals parent, PainlessLookup painlessLookup, Class returnType, Set keywords) { this.parent = parent; - this.definition = definition; + this.painlessLookup = painlessLookup; this.returnType = returnType; this.keywords = keywords; if (parent == null) { @@ -236,7 +237,7 @@ private Variable lookupVariable(Location location, String name) { } /** Looks up a method at this scope only. Returns null if the method does not exist. */ - private Method lookupMethod(MethodKey key) { + private PainlessMethod lookupMethod(PainlessMethodKey key) { if (methods == null) { return null; } @@ -255,11 +256,11 @@ private Variable defineVariable(Location location, Class type, String name, b return variable; } - private void addMethod(Method method) { + private void addMethod(PainlessMethod method) { if (methods == null) { methods = new HashMap<>(); } - methods.put(new MethodKey(method.name, method.arguments.size()), method); + methods.put(new PainlessMethodKey(method.name, method.arguments.size()), method); // TODO: check result } @@ -291,7 +292,7 @@ public int getSlot() { @Override public String toString() { StringBuilder b = new StringBuilder(); - b.append("Variable[type=").append(Definition.ClassToName(clazz)); + b.append("Variable[type=").append(PainlessLookup.ClassToName(clazz)); b.append(",name=").append(name); b.append(",slot=").append(slot); if (readonly) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java index 5167f7d1434de..e0a780d418843 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java @@ -19,8 +19,8 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -130,7 +130,7 @@ public void writeLoopCounter(int slot, int count, Location location) { mark(end); } - public void writeCast(Cast cast) { + public void writeCast(PainlessCast cast) { if (cast != null) { if (cast.from == char.class && cast.to == String.class) { invokeStatic(UTILITY_TYPE, CHAR_TO_STRING); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java index 7ae93eba22632..1236c4977e802 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExplainError.java @@ -20,6 +20,8 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.api.Debug; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.script.ScriptException; import java.util.List; @@ -46,7 +48,7 @@ Object getObjectToExplain() { /** * Headers to be added to the {@link ScriptException} for structured rendering. */ - public Map> getHeaders(Definition definition) { + public Map> getHeaders(PainlessLookup painlessLookup) { Map> headers = new TreeMap<>(); String toString = "null"; String javaClassName = null; @@ -54,7 +56,7 @@ public Map> getHeaders(Definition definition) { if (objectToExplain != null) { toString = objectToExplain.toString(); javaClassName = objectToExplain.getClass().getName(); - Definition.Struct struct = definition.getPainlessStructFromJavaClass(objectToExplain.getClass()); + PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(objectToExplain.getClass()); if (struct != null) { painlessClassName = struct.name; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java index 339e58c763c78..ae1944c9bd3a9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.component.AbstractComponent; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.painless.Compiler.Loader; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; @@ -101,9 +102,9 @@ public PainlessScriptEngine(Settings settings, Map, List, List> entry : contexts.entrySet()) { ScriptContext context = entry.getKey(); if (context.instanceClazz.equals(SearchScript.class) || context.instanceClazz.equals(ExecutableScript.class)) { - contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class, new Definition(entry.getValue()))); + contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class, new PainlessLookup(entry.getValue()))); } else { - contextsToCompilers.put(context, new Compiler(context.instanceClazz, new Definition(entry.getValue()))); + contextsToCompilers.put(context, new Compiler(context.instanceClazz, new PainlessLookup(entry.getValue()))); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java index 0ec806282db2f..df43dba7b3476 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java @@ -19,6 +19,8 @@ package org.elasticsearch.painless; +import org.elasticsearch.painless.lookup.PainlessLookup; + import java.lang.invoke.MethodType; import java.lang.reflect.Field; import java.lang.reflect.Modifier; @@ -42,7 +44,7 @@ public class ScriptClassInfo { private final List getMethods; private final List> getReturns; - public ScriptClassInfo(Definition definition, Class baseClass) { + public ScriptClassInfo(PainlessLookup painlessLookup, Class baseClass) { this.baseClass = baseClass; // Find the main method and the uses$argName methods @@ -68,8 +70,9 @@ public ScriptClassInfo(Definition definition, Class baseClass) { } if (m.getName().startsWith("get") && m.getName().equals("getClass") == false && Modifier.isStatic(m.getModifiers()) == false) { getReturns.add( - definitionTypeForClass(definition, m.getReturnType(), componentType -> "[" + m.getName() + "] has unknown return type [" - + componentType.getName() + "]. Painless can only support getters with return types that are whitelisted.")); + definitionTypeForClass(painlessLookup, m.getReturnType(), componentType -> "[" + m.getName() + "] has unknown return " + + "type [" + componentType.getName() + "]. Painless can only support getters with return types that are " + + "whitelisted.")); getMethods.add(new org.objectweb.asm.commons.Method(m.getName(), MethodType.methodType(m.getReturnType()).toMethodDescriptorString())); @@ -78,7 +81,7 @@ public ScriptClassInfo(Definition definition, Class baseClass) { } MethodType methodType = MethodType.methodType(executeMethod.getReturnType(), executeMethod.getParameterTypes()); this.executeMethod = new org.objectweb.asm.commons.Method(executeMethod.getName(), methodType.toMethodDescriptorString()); - executeMethodReturnType = definitionTypeForClass(definition, executeMethod.getReturnType(), + executeMethodReturnType = definitionTypeForClass(painlessLookup, executeMethod.getReturnType(), componentType -> "Painless can only implement execute methods returning a whitelisted type but [" + baseClass.getName() + "#execute] returns [" + componentType.getName() + "] which isn't whitelisted."); @@ -91,7 +94,7 @@ public ScriptClassInfo(Definition definition, Class baseClass) { + baseClass.getName() + "#execute] takes [1] argument."); } for (int arg = 0; arg < types.length; arg++) { - arguments.add(methodArgument(definition, types[arg], argumentNamesConstant[arg])); + arguments.add(methodArgument(painlessLookup, types[arg], argumentNamesConstant[arg])); } this.executeArguments = unmodifiableList(arguments); this.needsMethods = unmodifiableList(needsMethods); @@ -171,22 +174,22 @@ public String getName() { } } - private MethodArgument methodArgument(Definition definition, Class clazz, String argName) { - Class defClass = definitionTypeForClass(definition, clazz, componentType -> "[" + argName + "] is of unknown type [" + private MethodArgument methodArgument(PainlessLookup painlessLookup, Class clazz, String argName) { + Class defClass = definitionTypeForClass(painlessLookup, clazz, componentType -> "[" + argName + "] is of unknown type [" + componentType.getName() + ". Painless interfaces can only accept arguments that are of whitelisted types."); return new MethodArgument(defClass, argName); } - private static Class definitionTypeForClass(Definition definition, Class type, - Function, String> unknownErrorMessageSource) { - type = Definition.ObjectClassTodefClass(type); + private static Class definitionTypeForClass(PainlessLookup painlessLookup, Class type, + Function, String> unknownErrorMessageSource) { + type = PainlessLookup.ObjectClassTodefClass(type); Class componentType = type; while (componentType.isArray()) { componentType = componentType.getComponentType(); } - if (definition.getPainlessStructFromJavaClass(componentType) == null) { + if (painlessLookup.getPainlessStructFromJavaClass(componentType) == null) { throw new IllegalArgumentException(unknownErrorMessageSource.apply(componentType)); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java index 18d7d94492e67..db3aeff0483f6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/WriterConstants.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.api.Augmentation; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.script.ScriptException; import org.objectweb.asm.Handle; import org.objectweb.asm.Opcodes; @@ -74,12 +75,12 @@ public final class WriterConstants { public static final Type STACK_OVERFLOW_ERROR_TYPE = Type.getType(StackOverflowError.class); public static final Type EXCEPTION_TYPE = Type.getType(Exception.class); public static final Type PAINLESS_EXPLAIN_ERROR_TYPE = Type.getType(PainlessExplainError.class); - public static final Method PAINLESS_EXPLAIN_ERROR_GET_HEADERS_METHOD = getAsmMethod(Map.class, "getHeaders", Definition.class); + public static final Method PAINLESS_EXPLAIN_ERROR_GET_HEADERS_METHOD = getAsmMethod(Map.class, "getHeaders", PainlessLookup.class); public static final Type OBJECT_TYPE = Type.getType(Object.class); public static final Type BITSET_TYPE = Type.getType(BitSet.class); - public static final Type DEFINITION_TYPE = Type.getType(Definition.class); + public static final Type DEFINITION_TYPE = Type.getType(PainlessLookup.class); public static final Type COLLECTIONS_TYPE = Type.getType(Collections.class); public static final Method EMPTY_MAP_METHOD = getAsmMethod(Map.class, "emptyMap"); @@ -103,10 +104,10 @@ public final class WriterConstants { public static final Type AUGMENTATION_TYPE = Type.getType(Augmentation.class); /** - * A Method instance for {@linkplain Pattern#compile}. This isn't available from Definition because we intentionally don't add it there - * so that the script can't create regexes without this syntax. Essentially, our static regex syntax has a monopoly on building regexes - * because it can do it statically. This is both faster and prevents the script from doing something super slow like building a regex - * per time it is run. + * A Method instance for {@linkplain Pattern#compile}. This isn't available from PainlessLookup because we intentionally don't add it + * there so that the script can't create regexes without this syntax. Essentially, our static regex syntax has a monopoly on building + * regexes because it can do it statically. This is both faster and prevents the script from doing something super slow like building a + * regex per time it is run. */ public static final Method PATTERN_COMPILE = getAsmMethod(Pattern.class, "compile", String.class, int.class); public static final Method PATTERN_MATCHER = getAsmMethod(Matcher.class, "matcher", CharSequence.class); @@ -118,7 +119,7 @@ public final class WriterConstants { static final Handle DEF_BOOTSTRAP_HANDLE = new Handle(Opcodes.H_INVOKESTATIC, CLASS_TYPE.getInternalName(), "$bootstrapDef", DEF_BOOTSTRAP_METHOD.getDescriptor(), false); public static final Type DEF_BOOTSTRAP_DELEGATE_TYPE = Type.getType(DefBootstrap.class); - public static final Method DEF_BOOTSTRAP_DELEGATE_METHOD = getAsmMethod(CallSite.class, "bootstrap", Definition.class, + public static final Method DEF_BOOTSTRAP_DELEGATE_METHOD = getAsmMethod(CallSite.class, "bootstrap", PainlessLookup.class, MethodHandles.Lookup.class, String.class, MethodType.class, int.class, int.class, Object[].class); public static final Type DEF_UTIL_TYPE = Type.getType(Def.class); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java index add3aaabe51e0..e6c5da057980f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java @@ -23,7 +23,7 @@ import org.antlr.v4.runtime.LexerNoViableAltException; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.misc.Interval; -import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Location; /** @@ -39,14 +39,14 @@ */ final class EnhancedPainlessLexer extends PainlessLexer { private final String sourceName; - private final Definition definition; + private final PainlessLookup painlessLookup; private Token current = null; - EnhancedPainlessLexer(CharStream charStream, String sourceName, Definition definition) { + EnhancedPainlessLexer(CharStream charStream, String sourceName, PainlessLookup painlessLookup) { super(charStream); this.sourceName = sourceName; - this.definition = definition; + this.painlessLookup = painlessLookup; } @Override @@ -75,7 +75,7 @@ public void recover(final LexerNoViableAltException lnvae) { @Override protected boolean isType(String name) { - return definition.isSimplePainlessType(name); + return painlessLookup.isSimplePainlessType(name); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java index a481c99a99d12..e2742ffb9936d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java @@ -29,7 +29,7 @@ import org.antlr.v4.runtime.atn.PredictionMode; import org.antlr.v4.runtime.tree.TerminalNode; import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.Operation; @@ -174,9 +174,9 @@ public final class Walker extends PainlessParserBaseVisitor { public static SSource buildPainlessTree(ScriptClassInfo mainMethod, MainMethodReserved reserved, String sourceName, - String sourceText, CompilerSettings settings, Definition definition, + String sourceText, CompilerSettings settings, PainlessLookup painlessLookup, Printer debugStream) { - return new Walker(mainMethod, reserved, sourceName, sourceText, settings, definition, debugStream).source; + return new Walker(mainMethod, reserved, sourceName, sourceText, settings, painlessLookup, debugStream).source; } private final ScriptClassInfo scriptClassInfo; @@ -185,14 +185,14 @@ public static SSource buildPainlessTree(ScriptClassInfo mainMethod, MainMethodRe private final Printer debugStream; private final String sourceName; private final String sourceText; - private final Definition definition; + private final PainlessLookup painlessLookup; private final Deque reserved = new ArrayDeque<>(); private final Globals globals; private int syntheticCounter = 0; private Walker(ScriptClassInfo scriptClassInfo, MainMethodReserved reserved, String sourceName, String sourceText, - CompilerSettings settings, Definition definition, Printer debugStream) { + CompilerSettings settings, PainlessLookup painlessLookup, Printer debugStream) { this.scriptClassInfo = scriptClassInfo; this.reserved.push(reserved); this.debugStream = debugStream; @@ -200,13 +200,13 @@ private Walker(ScriptClassInfo scriptClassInfo, MainMethodReserved reserved, Str this.sourceName = Location.computeSourceName(sourceName); this.sourceText = sourceText; this.globals = new Globals(new BitSet(sourceText.length())); - this.definition = definition; + this.painlessLookup = painlessLookup; this.source = (SSource)visit(buildAntlrTree(sourceText)); } private SourceContext buildAntlrTree(String source) { ANTLRInputStream stream = new ANTLRInputStream(source); - PainlessLexer lexer = new EnhancedPainlessLexer(stream, sourceName, definition); + PainlessLexer lexer = new EnhancedPainlessLexer(stream, sourceName, painlessLookup); PainlessParser parser = new PainlessParser(new CommonTokenStream(lexer)); ParserErrorStrategy strategy = new ParserErrorStrategy(sourceName); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java new file mode 100644 index 0000000000000..2440fb45d4dfb --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessCast.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +public class PainlessCast { + + /** Create a standard cast with no boxing/unboxing. */ + public static PainlessCast standard(Class from, Class to, boolean explicit) { + return new PainlessCast(from, to, explicit, null, null, null, null); + } + + /** Create a cast where the from type will be unboxed, and then the cast will be performed. */ + public static PainlessCast unboxFrom(Class from, Class to, boolean explicit, Class unboxFrom) { + return new PainlessCast(from, to, explicit, unboxFrom, null, null, null); + } + + /** Create a cast where the to type will be unboxed, and then the cast will be performed. */ + public static PainlessCast unboxTo(Class from, Class to, boolean explicit, Class unboxTo) { + return new PainlessCast(from, to, explicit, null, unboxTo, null, null); + } + + /** Create a cast where the from type will be boxed, and then the cast will be performed. */ + public static PainlessCast boxFrom(Class from, Class to, boolean explicit, Class boxFrom) { + return new PainlessCast(from, to, explicit, null, null, boxFrom, null); + } + + /** Create a cast where the to type will be boxed, and then the cast will be performed. */ + public static PainlessCast boxTo(Class from, Class to, boolean explicit, Class boxTo) { + return new PainlessCast(from, to, explicit, null, null, null, boxTo); + } + + public final Class from; + public final Class to; + public final boolean explicit; + public final Class unboxFrom; + public final Class unboxTo; + public final Class boxFrom; + public final Class boxTo; + + private PainlessCast(Class from, Class to, boolean explicit, + Class unboxFrom, Class unboxTo, Class boxFrom, Class boxTo) { + this.from = from; + this.to = to; + this.explicit = explicit; + this.unboxFrom = unboxFrom; + this.unboxTo = unboxTo; + this.boxFrom = boxFrom; + this.boxTo = boxTo; + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java new file mode 100644 index 0000000000000..7d84899b00e58 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -0,0 +1,103 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import java.lang.invoke.MethodHandle; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +public final class PainlessClass { + public final String name; + public final Class clazz; + public final org.objectweb.asm.Type type; + + public final Map constructors; + public final Map staticMethods; + public final Map methods; + + public final Map staticMembers; + public final Map members; + + public final Map getters; + public final Map setters; + + public final PainlessMethod functionalMethod; + + PainlessClass(String name, Class clazz, org.objectweb.asm.Type type) { + this.name = name; + this.clazz = clazz; + this.type = type; + + constructors = new HashMap<>(); + staticMethods = new HashMap<>(); + methods = new HashMap<>(); + + staticMembers = new HashMap<>(); + members = new HashMap<>(); + + getters = new HashMap<>(); + setters = new HashMap<>(); + + functionalMethod = null; + } + + private PainlessClass(PainlessClass struct, PainlessMethod functionalMethod) { + name = struct.name; + clazz = struct.clazz; + type = struct.type; + + constructors = Collections.unmodifiableMap(struct.constructors); + staticMethods = Collections.unmodifiableMap(struct.staticMethods); + methods = Collections.unmodifiableMap(struct.methods); + + staticMembers = Collections.unmodifiableMap(struct.staticMembers); + members = Collections.unmodifiableMap(struct.members); + + getters = Collections.unmodifiableMap(struct.getters); + setters = Collections.unmodifiableMap(struct.setters); + + this.functionalMethod = functionalMethod; + } + + public PainlessClass freeze(PainlessMethod functionalMethod) { + return new PainlessClass(this, functionalMethod); + } + + @Override + public boolean equals(Object object) { + if (this == object) { + return true; + } + + if (object == null || getClass() != object.getClass()) { + return false; + } + + PainlessClass struct = (PainlessClass)object; + + return name.equals(struct.name); + } + + @Override + public int hashCode() { + return name.hashCode(); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java new file mode 100644 index 0000000000000..7c85bd269b461 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import java.lang.invoke.MethodHandle; + +public final class PainlessField { + public final String name; + public final PainlessClass owner; + public final Class clazz; + public final String javaName; + public final int modifiers; + public final MethodHandle getter; + public final MethodHandle setter; + + PainlessField(String name, String javaName, PainlessClass owner, Class clazz, int modifiers, + MethodHandle getter, MethodHandle setter) { + this.name = name; + this.javaName = javaName; + this.owner = owner; + this.clazz = clazz; + this.modifiers = modifiers; + this.getter = getter; + this.setter = setter; + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java similarity index 71% rename from modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java rename to modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index 25145a44b5853..5833767fbd3d2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Definition.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -17,15 +17,17 @@ * under the License. */ -package org.elasticsearch.painless; +package org.elasticsearch.painless.lookup; import org.elasticsearch.painless.spi.Whitelist; -import org.objectweb.asm.Opcodes; +import org.elasticsearch.painless.spi.WhitelistClass; +import org.elasticsearch.painless.spi.WhitelistConstructor; +import org.elasticsearch.painless.spi.WhitelistField; +import org.elasticsearch.painless.spi.WhitelistMethod; import org.objectweb.asm.Type; import java.lang.invoke.MethodHandle; import java.lang.invoke.MethodHandles; -import java.lang.invoke.MethodType; import java.lang.reflect.Modifier; import java.util.ArrayList; import java.util.Arrays; @@ -34,7 +36,6 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; import java.util.Stack; import java.util.regex.Pattern; @@ -42,10 +43,10 @@ * The entire API for Painless. Also used as a whitelist for checking for legal * methods and fields during at both compile-time and runtime. */ -public final class Definition { +public final class PainlessLookup { - private static final Map methodCache = new HashMap<>(); - private static final Map fieldCache = new HashMap<>(); + private static final Map methodCache = new HashMap<>(); + private static final Map fieldCache = new HashMap<>(); private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); @@ -56,306 +57,6 @@ private def() { } } - public static class Method { - public final String name; - public final Struct owner; - public final Class augmentation; - public final Class rtn; - public final List> arguments; - public final org.objectweb.asm.commons.Method method; - public final int modifiers; - public final MethodHandle handle; - - public Method(String name, Struct owner, Class augmentation, Class rtn, List> arguments, - org.objectweb.asm.commons.Method method, int modifiers, MethodHandle handle) { - this.name = name; - this.augmentation = augmentation; - this.owner = owner; - this.rtn = rtn; - this.arguments = Collections.unmodifiableList(arguments); - this.method = method; - this.modifiers = modifiers; - this.handle = handle; - } - - /** - * Returns MethodType for this method. - *

    - * This works even for user-defined Methods (where the MethodHandle is null). - */ - public MethodType getMethodType() { - // we have a methodhandle already (e.g. whitelisted class) - // just return its type - if (handle != null) { - return handle.type(); - } - // otherwise compute it - final Class params[]; - final Class returnValue; - if (augmentation != null) { - // static method disguised as virtual/interface method - params = new Class[1 + arguments.size()]; - params[0] = augmentation; - for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = defClassToObjectClass(arguments.get(i)); - } - returnValue = defClassToObjectClass(rtn); - } else if (Modifier.isStatic(modifiers)) { - // static method: straightforward copy - params = new Class[arguments.size()]; - for (int i = 0; i < arguments.size(); i++) { - params[i] = defClassToObjectClass(arguments.get(i)); - } - returnValue = defClassToObjectClass(rtn); - } else if ("".equals(name)) { - // constructor: returns the owner class - params = new Class[arguments.size()]; - for (int i = 0; i < arguments.size(); i++) { - params[i] = defClassToObjectClass(arguments.get(i)); - } - returnValue = owner.clazz; - } else { - // virtual/interface method: add receiver class - params = new Class[1 + arguments.size()]; - params[0] = owner.clazz; - for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = defClassToObjectClass(arguments.get(i)); - } - returnValue = defClassToObjectClass(rtn); - } - return MethodType.methodType(returnValue, params); - } - - public void write(MethodWriter writer) { - final org.objectweb.asm.Type type; - final Class clazz; - if (augmentation != null) { - assert java.lang.reflect.Modifier.isStatic(modifiers); - clazz = augmentation; - type = org.objectweb.asm.Type.getType(augmentation); - } else { - clazz = owner.clazz; - type = owner.type; - } - - if (java.lang.reflect.Modifier.isStatic(modifiers)) { - // invokeStatic assumes that the owner class is not an interface, so this is a - // special case for interfaces where the interface method boolean needs to be set to - // true to reference the appropriate class constant when calling a static interface - // method since java 8 did not check, but java 9 and 10 do - if (java.lang.reflect.Modifier.isInterface(clazz.getModifiers())) { - writer.visitMethodInsn(Opcodes.INVOKESTATIC, - type.getInternalName(), name, getMethodType().toMethodDescriptorString(), true); - } else { - writer.invokeStatic(type, method); - } - } else if (java.lang.reflect.Modifier.isInterface(clazz.getModifiers())) { - writer.invokeInterface(type, method); - } else { - writer.invokeVirtual(type, method); - } - } - } - - public static final class Field { - public final String name; - public final Struct owner; - public final Class clazz; - public final String javaName; - public final int modifiers; - private final MethodHandle getter; - private final MethodHandle setter; - - private Field(String name, String javaName, Struct owner, Class clazz, int modifiers, MethodHandle getter, MethodHandle setter) { - this.name = name; - this.javaName = javaName; - this.owner = owner; - this.clazz = clazz; - this.modifiers = modifiers; - this.getter = getter; - this.setter = setter; - } - } - - // TODO: instead of hashing on this, we could have a 'next' pointer in Method itself, but it would make code more complex - // please do *NOT* under any circumstances change this to be the crappy Tuple from elasticsearch! - /** - * Key for looking up a method. - *

    - * Methods are keyed on both name and arity, and can be overloaded once per arity. - * This allows signatures such as {@code String.indexOf(String) vs String.indexOf(String, int)}. - *

    - * It is less flexible than full signature overloading where types can differ too, but - * better than just the name, and overloading types adds complexity to users, too. - */ - public static final class MethodKey { - public final String name; - public final int arity; - - /** - * Create a new lookup key - * @param name name of the method - * @param arity number of parameters - */ - public MethodKey(String name, int arity) { - this.name = Objects.requireNonNull(name); - this.arity = arity; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + arity; - result = prime * result + name.hashCode(); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - MethodKey other = (MethodKey) obj; - if (arity != other.arity) return false; - if (!name.equals(other.name)) return false; - return true; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(name); - sb.append('/'); - sb.append(arity); - return sb.toString(); - } - } - - public static final class Struct { - public final String name; - public final Class clazz; - public final org.objectweb.asm.Type type; - - public final Map constructors; - public final Map staticMethods; - public final Map methods; - - public final Map staticMembers; - public final Map members; - - public final Map getters; - public final Map setters; - - public final Method functionalMethod; - - private Struct(String name, Class clazz, org.objectweb.asm.Type type) { - this.name = name; - this.clazz = clazz; - this.type = type; - - constructors = new HashMap<>(); - staticMethods = new HashMap<>(); - methods = new HashMap<>(); - - staticMembers = new HashMap<>(); - members = new HashMap<>(); - - getters = new HashMap<>(); - setters = new HashMap<>(); - - functionalMethod = null; - } - - private Struct(Struct struct, Method functionalMethod) { - name = struct.name; - clazz = struct.clazz; - type = struct.type; - - constructors = Collections.unmodifiableMap(struct.constructors); - staticMethods = Collections.unmodifiableMap(struct.staticMethods); - methods = Collections.unmodifiableMap(struct.methods); - - staticMembers = Collections.unmodifiableMap(struct.staticMembers); - members = Collections.unmodifiableMap(struct.members); - - getters = Collections.unmodifiableMap(struct.getters); - setters = Collections.unmodifiableMap(struct.setters); - - this.functionalMethod = functionalMethod; - } - - private Struct freeze(Method functionalMethod) { - return new Struct(this, functionalMethod); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - Struct struct = (Struct)object; - - return name.equals(struct.name); - } - - @Override - public int hashCode() { - return name.hashCode(); - } - } - - public static class Cast { - - /** Create a standard cast with no boxing/unboxing. */ - public static Cast standard(Class from, Class to, boolean explicit) { - return new Cast(from, to, explicit, null, null, null, null); - } - - /** Create a cast where the from type will be unboxed, and then the cast will be performed. */ - public static Cast unboxFrom(Class from, Class to, boolean explicit, Class unboxFrom) { - return new Cast(from, to, explicit, unboxFrom, null, null, null); - } - - /** Create a cast where the to type will be unboxed, and then the cast will be performed. */ - public static Cast unboxTo(Class from, Class to, boolean explicit, Class unboxTo) { - return new Cast(from, to, explicit, null, unboxTo, null, null); - } - - /** Create a cast where the from type will be boxed, and then the cast will be performed. */ - public static Cast boxFrom(Class from, Class to, boolean explicit, Class boxFrom) { - return new Cast(from, to, explicit, null, null, boxFrom, null); - } - - /** Create a cast where the to type will be boxed, and then the cast will be performed. */ - public static Cast boxTo(Class from, Class to, boolean explicit, Class boxTo) { - return new Cast(from, to, explicit, null, null, null, boxTo); - } - - public final Class from; - public final Class to; - public final boolean explicit; - public final Class unboxFrom; - public final Class unboxTo; - public final Class boxFrom; - public final Class boxTo; - - private Cast(Class from, Class to, boolean explicit, Class unboxFrom, Class unboxTo, Class boxFrom, Class boxTo) { - this.from = from; - this.to = to; - this.explicit = explicit; - this.unboxFrom = unboxFrom; - this.unboxTo = unboxTo; - this.boxFrom = boxFrom; - this.boxTo = boxTo; - } - } - public static Class getBoxedType(Class clazz) { if (clazz == boolean.class) { return Boolean.class; @@ -520,29 +221,29 @@ private static String buildFieldCacheKey(String structName, String fieldName, St return structName + fieldName + typeName; } - public Collection getStructs() { + public Collection getStructs() { return javaClassesToPainlessStructs.values(); } private final Map> painlessTypesToJavaClasses; - private final Map, Struct> javaClassesToPainlessStructs; + private final Map, PainlessClass> javaClassesToPainlessStructs; - public Definition(List whitelists) { + public PainlessLookup(List whitelists) { painlessTypesToJavaClasses = new HashMap<>(); javaClassesToPainlessStructs = new HashMap<>(); String origin = null; painlessTypesToJavaClasses.put("def", def.class); - javaClassesToPainlessStructs.put(def.class, new Struct("def", Object.class, Type.getType(Object.class))); + javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class))); try { // first iteration collects all the Painless type names that // are used for validation during the second iteration for (Whitelist whitelist : whitelists) { - for (Whitelist.Struct whitelistStruct : whitelist.whitelistStructs) { + for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - Struct painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); + PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + @@ -561,20 +262,20 @@ public Definition(List whitelists) { // be available in Painless along with validating they exist and all their types have // been white-listed during the first iteration for (Whitelist whitelist : whitelists) { - for (Whitelist.Struct whitelistStruct : whitelist.whitelistStructs) { + for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - for (Whitelist.Constructor whitelistConstructor : whitelistStruct.whitelistConstructors) { + for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) { origin = whitelistConstructor.origin; addConstructor(painlessTypeName, whitelistConstructor); } - for (Whitelist.Method whitelistMethod : whitelistStruct.whitelistMethods) { + for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) { origin = whitelistMethod.origin; addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod); } - for (Whitelist.Field whitelistField : whitelistStruct.whitelistFields) { + for (WhitelistField whitelistField : whitelistStruct.whitelistFields) { origin = whitelistField.origin; addField(painlessTypeName, whitelistField); } @@ -587,7 +288,7 @@ public Definition(List whitelists) { // goes through each Painless struct and determines the inheritance list, // and then adds all inherited types to the Painless struct's whitelist for (Class javaClass : javaClassesToPainlessStructs.keySet()) { - Struct painlessStruct = javaClassesToPainlessStructs.get(javaClass); + PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass); List painlessSuperStructs = new ArrayList<>(); Class javaSuperClass = painlessStruct.clazz.getSuperclass(); @@ -598,7 +299,7 @@ public Definition(List whitelists) { // adds super classes to the inheritance list if (javaSuperClass != null && javaSuperClass.isInterface() == false) { while (javaSuperClass != null) { - Struct painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass); + PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass); if (painlessSuperStruct != null) { painlessSuperStructs.add(painlessSuperStruct.name); @@ -614,7 +315,7 @@ public Definition(List whitelists) { Class javaInterfaceLookup = javaInteraceLookups.pop(); for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { - Struct painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface); + PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface); if (painlessInterfaceStruct != null) { String painlessInterfaceStructName = painlessInterfaceStruct.name; @@ -635,7 +336,7 @@ public Definition(List whitelists) { // copies methods and fields from Object into interface types if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { - Struct painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class); + PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class); if (painlessObjectStruct != null) { copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); @@ -644,17 +345,17 @@ public Definition(List whitelists) { } // precompute runtime classes - for (Struct painlessStruct : javaClassesToPainlessStructs.values()) { + for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) { addRuntimeClass(painlessStruct); } // copy all structs to make them unmodifiable for outside users: - for (Map.Entry,Struct> entry : javaClassesToPainlessStructs.entrySet()) { + for (Map.Entry,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) { entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue()))); } } - private void addStruct(ClassLoader whitelistClassLoader, Whitelist.Struct whitelistStruct) { + private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) { String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); String importedPainlessTypeName = painlessTypeName; @@ -688,10 +389,10 @@ private void addStruct(ClassLoader whitelistClassLoader, Whitelist.Struct whitel } } - Struct existingStruct = javaClassesToPainlessStructs.get(javaClass); + PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass); if (existingStruct == null) { - Struct struct = new Struct(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass)); + PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass)); painlessTypesToJavaClasses.put(painlessTypeName, javaClass); javaClassesToPainlessStructs.put(javaClass, struct); } else if (existingStruct.clazz.equals(javaClass) == false) { @@ -725,8 +426,8 @@ private void addStruct(ClassLoader whitelistClassLoader, Whitelist.Struct whitel } } - private void addConstructor(String ownerStructName, Whitelist.Constructor whitelistConstructor) { - Struct ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) { + PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " + @@ -760,8 +461,8 @@ private void addConstructor(String ownerStructName, Whitelist.Constructor whitel " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception); } - MethodKey painlessMethodKey = new MethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); - Method painlessConstructor = ownerStruct.constructors.get(painlessMethodKey); + PainlessMethodKey painlessMethodKey = new PainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); + PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey); if (painlessConstructor == null) { org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor); @@ -775,7 +476,7 @@ private void addConstructor(String ownerStructName, Whitelist.Constructor whitel } painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "", painlessParametersTypes), - key -> new Method("", ownerStruct, null, void.class, painlessParametersTypes, + key -> new PainlessMethod("", ownerStruct, null, void.class, painlessParametersTypes, asmConstructor, javaConstructor.getModifiers(), javaHandle)); ownerStruct.constructors.put(painlessMethodKey, painlessConstructor); } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){ @@ -785,8 +486,8 @@ private void addConstructor(String ownerStructName, Whitelist.Constructor whitel } } - private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, Whitelist.Method whitelistMethod) { - Struct ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) { + PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -864,10 +565,11 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, "and parameters " + whitelistMethod.painlessParameterTypeNames); } - MethodKey painlessMethodKey = new MethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); + PainlessMethodKey painlessMethodKey = + new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { - Method painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey); + PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey); if (painlessMethod == null) { org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); @@ -882,8 +584,8 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, painlessMethod = methodCache.computeIfAbsent( buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new Method(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass, painlessParametersTypes, - asmMethod, javaMethod.getModifiers(), javaMethodHandle)); + key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass, + painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass && painlessMethod.arguments.equals(painlessParametersTypes)) == false) { @@ -893,7 +595,7 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments); } } else { - Method painlessMethod = ownerStruct.methods.get(painlessMethodKey); + PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey); if (painlessMethod == null) { org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod); @@ -908,7 +610,7 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, painlessMethod = methodCache.computeIfAbsent( buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new Method(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass, + key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass, painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.methods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) && @@ -921,8 +623,8 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, } } - private void addField(String ownerStructName, Whitelist.Field whitelistField) { - Struct ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + private void addField(String ownerStructName, WhitelistField whitelistField) { + PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -958,12 +660,12 @@ private void addField(String ownerStructName, Whitelist.Field whitelistField) { "with owner struct [" + ownerStruct.name + "] is not final"); } - Field painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName); + PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName); if (painlessField == null) { painlessField = fieldCache.computeIfAbsent( buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), - key -> new Field(whitelistField.javaFieldName, javaField.getName(), + key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null)); ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField); } else if (painlessField.clazz != painlessFieldClass) { @@ -987,12 +689,12 @@ private void addField(String ownerStructName, Whitelist.Field whitelistField) { " not found for class [" + ownerStruct.clazz.getName() + "]."); } - Field painlessField = ownerStruct.members.get(whitelistField.javaFieldName); + PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName); if (painlessField == null) { painlessField = fieldCache.computeIfAbsent( buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), - key -> new Field(whitelistField.javaFieldName, javaField.getName(), + key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); ownerStruct.members.put(whitelistField.javaFieldName, painlessField); } else if (painlessField.clazz != painlessFieldClass) { @@ -1003,14 +705,14 @@ private void addField(String ownerStructName, Whitelist.Field whitelistField) { } private void copyStruct(String struct, List children) { - final Struct owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct)); + final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct)); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); } for (int count = 0; count < children.size(); ++count) { - final Struct child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count))); + final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count))); if (child == null) { throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + @@ -1022,9 +724,9 @@ private void copyStruct(String struct, List children) { " is not a super type of owner struct [" + owner.name + "] in copy."); } - for (Map.Entry kvPair : child.methods.entrySet()) { - MethodKey methodKey = kvPair.getKey(); - Method method = kvPair.getValue(); + for (Map.Entry kvPair : child.methods.entrySet()) { + PainlessMethodKey methodKey = kvPair.getKey(); + PainlessMethod method = kvPair.getValue(); if (owner.methods.get(methodKey) == null) { // TODO: some of these are no longer valid or outright don't work // TODO: since classes may not come from the Painless classloader @@ -1076,10 +778,10 @@ private void copyStruct(String struct, List children) { } } - for (Field field : child.members.values()) { + for (PainlessField field : child.members.values()) { if (owner.members.get(field.name) == null) { owner.members.put(field.name, - new Field(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter)); + new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter)); } } } @@ -1088,11 +790,11 @@ private void copyStruct(String struct, List children) { /** * Precomputes a more efficient structure for dynamic method/field access. */ - private void addRuntimeClass(final Struct struct) { + private void addRuntimeClass(final PainlessClass struct) { // add all getters/setters - for (Map.Entry method : struct.methods.entrySet()) { + for (Map.Entry method : struct.methods.entrySet()) { String name = method.getKey().name; - Method m = method.getValue(); + PainlessMethod m = method.getValue(); if (m.arguments.size() == 0 && name.startsWith("get") && @@ -1124,14 +826,14 @@ private void addRuntimeClass(final Struct struct) { } // add all members - for (Map.Entry member : struct.members.entrySet()) { + for (Map.Entry member : struct.members.entrySet()) { struct.getters.put(member.getKey(), member.getValue().getter); struct.setters.put(member.getKey(), member.getValue().setter); } } /** computes the functional interface method for a class, or returns null */ - private Method computeFunctionalInterfaceMethod(Struct clazz) { + private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) { if (!clazz.clazz.isInterface()) { return null; } @@ -1166,7 +868,7 @@ private Method computeFunctionalInterfaceMethod(Struct clazz) { } // inspect the one method found from the reflection API, it should match the whitelist! java.lang.reflect.Method oneMethod = methods.get(0); - Method painless = clazz.methods.get(new Definition.MethodKey(oneMethod.getName(), oneMethod.getParameterCount())); + PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount())); if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) { throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " + "method is not whitelisted!"); @@ -1178,7 +880,7 @@ public boolean isSimplePainlessType(String painlessType) { return painlessTypesToJavaClasses.containsKey(painlessType); } - public Struct getPainlessStructFromJavaClass(Class clazz) { + public PainlessClass getPainlessStructFromJavaClass(Class clazz) { return javaClassesToPainlessStructs.get(clazz); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java new file mode 100644 index 0000000000000..e87e1d4bf38fc --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java @@ -0,0 +1,130 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import org.elasticsearch.painless.MethodWriter; +import org.objectweb.asm.Opcodes; + +import java.lang.invoke.MethodHandle; +import java.lang.invoke.MethodType; +import java.lang.reflect.Modifier; +import java.util.Collections; +import java.util.List; + +public class PainlessMethod { + public final String name; + public final PainlessClass owner; + public final Class augmentation; + public final Class rtn; + public final List> arguments; + public final org.objectweb.asm.commons.Method method; + public final int modifiers; + public final MethodHandle handle; + + public PainlessMethod(String name, PainlessClass owner, Class augmentation, Class rtn, List> arguments, + org.objectweb.asm.commons.Method method, int modifiers, MethodHandle handle) { + this.name = name; + this.augmentation = augmentation; + this.owner = owner; + this.rtn = rtn; + this.arguments = Collections.unmodifiableList(arguments); + this.method = method; + this.modifiers = modifiers; + this.handle = handle; + } + + /** + * Returns MethodType for this method. + *

    + * This works even for user-defined Methods (where the MethodHandle is null). + */ + public MethodType getMethodType() { + // we have a methodhandle already (e.g. whitelisted class) + // just return its type + if (handle != null) { + return handle.type(); + } + // otherwise compute it + final Class params[]; + final Class returnValue; + if (augmentation != null) { + // static method disguised as virtual/interface method + params = new Class[1 + arguments.size()]; + params[0] = augmentation; + for (int i = 0; i < arguments.size(); i++) { + params[i + 1] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + } + returnValue = PainlessLookup.defClassToObjectClass(rtn); + } else if (Modifier.isStatic(modifiers)) { + // static method: straightforward copy + params = new Class[arguments.size()]; + for (int i = 0; i < arguments.size(); i++) { + params[i] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + } + returnValue = PainlessLookup.defClassToObjectClass(rtn); + } else if ("".equals(name)) { + // constructor: returns the owner class + params = new Class[arguments.size()]; + for (int i = 0; i < arguments.size(); i++) { + params[i] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + } + returnValue = owner.clazz; + } else { + // virtual/interface method: add receiver class + params = new Class[1 + arguments.size()]; + params[0] = owner.clazz; + for (int i = 0; i < arguments.size(); i++) { + params[i + 1] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + } + returnValue = PainlessLookup.defClassToObjectClass(rtn); + } + return MethodType.methodType(returnValue, params); + } + + public void write(MethodWriter writer) { + final org.objectweb.asm.Type type; + final Class clazz; + if (augmentation != null) { + assert Modifier.isStatic(modifiers); + clazz = augmentation; + type = org.objectweb.asm.Type.getType(augmentation); + } else { + clazz = owner.clazz; + type = owner.type; + } + + if (Modifier.isStatic(modifiers)) { + // invokeStatic assumes that the owner class is not an interface, so this is a + // special case for interfaces where the interface method boolean needs to be set to + // true to reference the appropriate class constant when calling a static interface + // method since java 8 did not check, but java 9 and 10 do + if (Modifier.isInterface(clazz.getModifiers())) { + writer.visitMethodInsn(Opcodes.INVOKESTATIC, + type.getInternalName(), name, getMethodType().toMethodDescriptorString(), true); + } else { + writer.invokeStatic(type, method); + } + } else if (Modifier.isInterface(clazz.getModifiers())) { + writer.invokeInterface(type, method); + } else { + writer.invokeVirtual(type, method); + } + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java new file mode 100644 index 0000000000000..49413ab0c5fef --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java @@ -0,0 +1,75 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import java.util.Objects; + +/** + * Key for looking up a method. + *

    + * Methods are keyed on both name and arity, and can be overloaded once per arity. + * This allows signatures such as {@code String.indexOf(String) vs String.indexOf(String, int)}. + *

    + * It is less flexible than full signature overloading where types can differ too, but + * better than just the name, and overloading types adds complexity to users, too. + */ +public final class PainlessMethodKey { + public final String name; + public final int arity; + + /** + * Create a new lookup key + * @param name name of the method + * @param arity number of parameters + */ + public PainlessMethodKey(String name, int arity) { + this.name = Objects.requireNonNull(name); + this.arity = arity; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + arity; + result = prime * result + name.hashCode(); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) return true; + if (obj == null) return false; + if (getClass() != obj.getClass()) return false; + PainlessMethodKey other = (PainlessMethodKey) obj; + if (arity != other.arity) return false; + if (!name.equals(other.name)) return false; + return true; + } + + @Override + public String toString() { + StringBuilder sb = new StringBuilder(); + sb.append(name); + sb.append('/'); + sb.append(arity); + return sb.toString(); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index 550e9db9495b7..0dfcf4d409335 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -20,8 +20,8 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -118,7 +118,7 @@ public abstract class AExpression extends ANode { * @return The new child node for the parent node calling this method. */ AExpression cast(Locals locals) { - Cast cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal); + PainlessCast cast = AnalyzerCaster.getLegalCast(location, actual, expected, explicit, internal); if (cast == null) { if (constant == null || this instanceof EConstant) { @@ -157,7 +157,7 @@ AExpression cast(Locals locals) { return ecast; } else { - if (Definition.isConstantType(expected)) { + if (PainlessLookup.isConstantType(expected)) { // For the case where a cast is required, a constant is set, // and the constant can be immediately cast to the expected type. // An EConstant replaces this node with the constant cast appropriately diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java index dce04661dbca8..dda246b5f6cda 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java @@ -22,8 +22,8 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -49,8 +49,8 @@ public final class EAssignment extends AExpression { private boolean cat = false; private Class promote = null; private Class shiftDistance; // for shifts, the RHS is promoted independently - private Cast there = null; - private Cast back = null; + private PainlessCast there = null; + private PainlessCast back = null; public EAssignment(Location location, AExpression lhs, AExpression rhs, boolean pre, boolean post, Operation operation) { super(location); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 00168549b783e..46fbeefd6f557 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -21,8 +21,8 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -106,7 +106,7 @@ private void analyzeMul(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply multiply [*] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote; @@ -148,7 +148,7 @@ private void analyzeDiv(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply divide [/] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote; @@ -195,7 +195,7 @@ private void analyzeRem(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply remainder [%] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote; @@ -242,7 +242,7 @@ private void analyzeAdd(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply add [+] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote; @@ -300,7 +300,7 @@ private void analyzeSub(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply subtract [-] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote; @@ -358,7 +358,7 @@ private void analyzeLSH(Locals variables) { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply left shift [<<] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote = lhspromote; @@ -405,7 +405,7 @@ private void analyzeRSH(Locals variables) { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply right shift [>>] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote = lhspromote; @@ -455,7 +455,7 @@ private void analyzeUSH(Locals variables) { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply unsigned shift [>>>] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } if (lhspromote == def.class || rhspromote == def.class) { @@ -498,7 +498,7 @@ private void analyzeBWAnd(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply and [&] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote; @@ -537,7 +537,7 @@ private void analyzeXor(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply xor [^] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote; @@ -577,7 +577,7 @@ private void analyzeBWOr(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply or [|] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } actual = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index 4c977fa66e89a..ede1a2a6b9c36 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -19,8 +19,8 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -40,7 +40,7 @@ public final class ECallLocal extends AExpression { private final String name; private final List arguments; - private Method method = null; + private PainlessMethod method = null; public ECallLocal(Location location, String name, List arguments) { super(location); @@ -58,7 +58,7 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - MethodKey methodKey = new MethodKey(name, arguments.size()); + PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); method = locals.getMethod(methodKey); if (method == null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index e6f2f7ebf91f9..a3e1b4bde6a86 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -21,8 +21,8 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -69,7 +69,7 @@ void analyze(Locals locals) { defPointer = "D" + variable + "." + call + ",1"; } else { // typed implementation - defPointer = "S" + Definition.ClassToName(captured.clazz) + "." + call + ",1"; + defPointer = "S" + PainlessLookup.ClassToName(captured.clazz) + "." + call + ",1"; } actual = String.class; } else { @@ -77,7 +77,7 @@ void analyze(Locals locals) { // static case if (captured.clazz != def.class) { try { - ref = new FunctionRef(locals.getDefinition(), expected, Definition.ClassToName(captured.clazz), call, 1); + ref = new FunctionRef(locals.getPainlessLookup(), expected, PainlessLookup.ClassToName(captured.clazz), call, 1); // check casts between the interface method and the delegate method are legal for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) { @@ -109,7 +109,7 @@ void write(MethodWriter writer, Globals globals) { // typed interface, dynamic implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); Type methodType = Type.getMethodType(MethodWriter.getType(expected), MethodWriter.getType(captured.clazz)); - writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, Definition.ClassToName(expected)); + writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, PainlessLookup.ClassToName(expected)); } else { // typed interface, typed implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index 2f492fe6edb36..55a9dbf71c891 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -19,8 +19,8 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -35,9 +35,9 @@ final class ECast extends AExpression { private AExpression child; - private final Cast cast; + private final PainlessCast cast; - ECast(Location location, AExpression child, Cast cast) { + ECast(Location location, AExpression child, PainlessCast cast) { super(location); this.child = Objects.requireNonNull(child); @@ -63,6 +63,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(Definition.ClassToName(cast.to), child); + return singleLineToString(PainlessLookup.ClassToName(cast.to), child); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index 876229797ecb0..c0fccab8e8a8e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -21,8 +21,8 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -93,7 +93,7 @@ private void analyzeEq(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply equals [==] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } if (promotedType == def.class) { @@ -142,7 +142,7 @@ private void analyzeEqR(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference equals [===] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } left.expected = promotedType; @@ -182,7 +182,7 @@ private void analyzeNE(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply not equals [!=] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } if (promotedType == def.class) { @@ -231,7 +231,7 @@ private void analyzeNER(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference not equals [!==] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } left.expected = promotedType; @@ -271,7 +271,7 @@ private void analyzeGTE(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than or equals [>=] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } if (promotedType == def.class) { @@ -310,7 +310,7 @@ private void analyzeGT(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than [>] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } if (promotedType == def.class) { @@ -349,7 +349,7 @@ private void analyzeLTE(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than or equals [<=] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } if (promotedType == def.class) { @@ -388,7 +388,7 @@ private void analyzeLT(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than [>=] to types " + - "[" + Definition.ClassToName(left.actual) + "] and [" + Definition.ClassToName(right.actual) + "].")); + "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); } if (promotedType == def.class) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java index 5a897e04a8d98..d19068f8fa6a2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EExplicit.java @@ -50,7 +50,7 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { try { - actual = locals.getDefinition().getJavaClassFromPainlessType(type); + actual = locals.getPainlessLookup().getJavaClassFromPainlessType(type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index 21bef9aa2ed5d..82c24e27c5d16 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -20,9 +20,9 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -66,15 +66,15 @@ void analyze(Locals locals) { try { if ("this".equals(type)) { // user's own function - Method interfaceMethod = locals.getDefinition().getPainlessStructFromJavaClass(expected).functionalMethod; + PainlessMethod interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + Definition.ClassToName(expected) + "], not a functional interface"); + "to [" + PainlessLookup.ClassToName(expected) + "], not a functional interface"); } - Method delegateMethod = locals.getMethod(new MethodKey(call, interfaceMethod.arguments.size())); + PainlessMethod delegateMethod = locals.getMethod(new PainlessMethodKey(call, interfaceMethod.arguments.size())); if (delegateMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + Definition.ClassToName(expected) + "], function not found"); + "to [" + PainlessLookup.ClassToName(expected) + "], function not found"); } ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0); @@ -90,7 +90,7 @@ void analyze(Locals locals) { } } else { // whitelist lookup - ref = new FunctionRef(locals.getDefinition(), expected, type, call, 0); + ref = new FunctionRef(locals.getPainlessLookup(), expected, type, call, 0); } } catch (IllegalArgumentException e) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index 5296d79e214ed..a3835cbc21372 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -19,7 +19,7 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -58,13 +58,13 @@ void analyze(Locals locals) { // ensure the specified type is part of the definition try { - clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); + clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } // map to wrapped type for primitive types - resolvedType = clazz.isPrimitive() ? Definition.getBoxedType(clazz) : Definition.defClassToObjectClass(clazz); + resolvedType = clazz.isPrimitive() ? PainlessLookup.getBoxedType(clazz) : PainlessLookup.defClassToObjectClass(clazz); // analyze and cast the expression expression.analyze(locals); @@ -75,7 +75,7 @@ void analyze(Locals locals) { primitiveExpression = expression.actual.isPrimitive(); // map to wrapped type for primitive types expressionType = expression.actual.isPrimitive() ? - Definition.getBoxedType(expression.actual) : Definition.defClassToObjectClass(clazz); + PainlessLookup.getBoxedType(expression.actual) : PainlessLookup.defClassToObjectClass(clazz); actual = boolean.class; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index e40d21ab110ab..a7b7a41fe051a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -20,9 +20,9 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; @@ -103,7 +103,7 @@ void extractVariables(Set variables) { void analyze(Locals locals) { Class returnType; List actualParamTypeStrs; - Method interfaceMethod; + PainlessMethod interfaceMethod; // inspect the target first, set interface method if we know it. if (expected == null) { interfaceMethod = null; @@ -120,15 +120,15 @@ void analyze(Locals locals) { } } else { // we know the method statically, infer return type and any unknown/def types - interfaceMethod = locals.getDefinition().getPainlessStructFromJavaClass(expected).functionalMethod; + interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { - throw createError(new IllegalArgumentException("Cannot pass lambda to [" + Definition.ClassToName(expected) + + throw createError(new IllegalArgumentException("Cannot pass lambda to [" + PainlessLookup.ClassToName(expected) + "], not a functional interface")); } // check arity before we manipulate parameters if (interfaceMethod.arguments.size() != paramTypeStrs.size()) throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name + - "] in [" + Definition.ClassToName(expected) + "]"); + "] in [" + PainlessLookup.ClassToName(expected) + "]"); // for method invocation, its allowed to ignore the return value if (interfaceMethod.rtn == void.class) { returnType = def.class; @@ -140,7 +140,7 @@ void analyze(Locals locals) { for (int i = 0; i < paramTypeStrs.size(); i++) { String paramType = paramTypeStrs.get(i); if (paramType == null) { - actualParamTypeStrs.add(Definition.ClassToName(interfaceMethod.arguments.get(i))); + actualParamTypeStrs.add(PainlessLookup.ClassToName(interfaceMethod.arguments.get(i))); } else { actualParamTypeStrs.add(paramType); } @@ -162,16 +162,16 @@ void analyze(Locals locals) { List paramTypes = new ArrayList<>(captures.size() + actualParamTypeStrs.size()); List paramNames = new ArrayList<>(captures.size() + paramNameStrs.size()); for (Variable var : captures) { - paramTypes.add(Definition.ClassToName(var.clazz)); + paramTypes.add(PainlessLookup.ClassToName(var.clazz)); paramNames.add(var.name); } paramTypes.addAll(actualParamTypeStrs); paramNames.addAll(paramNameStrs); // desugar lambda body into a synthetic method - desugared = new SFunction(reserved, location, Definition.ClassToName(returnType), name, + desugared = new SFunction(reserved, location, PainlessLookup.ClassToName(returnType), name, paramTypes, paramNames, statements, true); - desugared.generateSignature(locals.getDefinition()); + desugared.generateSignature(locals.getPainlessLookup()); desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, desugared.parameters, captures.size(), reserved.getMaxLoopCounter())); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 05b10796cb4f9..518f1953525a6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -19,9 +19,9 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -37,8 +37,8 @@ public final class EListInit extends AExpression { private final List values; - private Method constructor = null; - private Method method = null; + private PainlessMethod constructor = null; + private PainlessMethod method = null; public EListInit(Location location, List values) { super(location); @@ -61,13 +61,14 @@ void analyze(Locals locals) { actual = ArrayList.class; - constructor = locals.getDefinition().getPainlessStructFromJavaClass(actual).constructors.get(new MethodKey("", 0)); + constructor = + locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get(new PainlessMethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getDefinition().getPainlessStructFromJavaClass(actual).methods.get(new MethodKey("add", 1)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods.get(new PainlessMethodKey("add", 1)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index f5763042b8191..45158aedcf787 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -19,9 +19,9 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -38,8 +38,8 @@ public final class EMapInit extends AExpression { private final List keys; private final List values; - private Method constructor = null; - private Method method = null; + private PainlessMethod constructor = null; + private PainlessMethod method = null; public EMapInit(Location location, List keys, List values) { super(location); @@ -67,13 +67,14 @@ void analyze(Locals locals) { actual = HashMap.class; - constructor = locals.getDefinition().getPainlessStructFromJavaClass(actual).constructors.get(new MethodKey("", 0)); + constructor = + locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get(new PainlessMethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getDefinition().getPainlessStructFromJavaClass(actual).methods.get(new MethodKey("put", 2)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods.get(new PainlessMethodKey("put", 2)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java index 1a0a718ae7fc8..f9bd4cebc3fed 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewArray.java @@ -61,7 +61,7 @@ void analyze(Locals locals) { Class clazz; try { - clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); + clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index e3a926ef2244b..197e2fcb7fd42 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.List; import java.util.Objects; @@ -39,7 +39,7 @@ public final class ENewObj extends AExpression { private final String type; private final List arguments; - private Method constructor; + private PainlessMethod constructor; public ENewObj(Location location, String type, List arguments) { super(location); @@ -58,13 +58,13 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { try { - actual = locals.getDefinition().getJavaClassFromPainlessType(this.type); + actual = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } - Struct struct = locals.getDefinition().getPainlessStructFromJavaClass(actual); - constructor = struct.constructors.get(new Definition.MethodKey("", arguments.size())); + PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual); + constructor = struct.constructors.get(new PainlessMethodKey("", arguments.size())); if (constructor != null) { Class[] types = new Class[constructor.arguments.size()]; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index ae682d5f7be3c..983819b6b2bf9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -19,7 +19,7 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -53,7 +53,7 @@ void analyze(Locals locals) { if (expected != null) { if (expected.isPrimitive()) { throw createError(new IllegalArgumentException( - "Cannot cast null to a primitive type [" + Definition.ClassToName(expected) + "].")); + "Cannot cast null to a primitive type [" + PainlessLookup.ClassToName(expected) + "].")); } actual = expected; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java index 5ebf30f5781cf..a556b3ad315c6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EStatic.java @@ -48,7 +48,7 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { try { - actual = locals.getDefinition().getJavaClassFromPainlessType(type); + actual = locals.getPainlessLookup().getJavaClassFromPainlessType(type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index a746ade26a15c..8e293556eac01 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -21,8 +21,8 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -93,7 +93,7 @@ void analyzeBWNot(Locals variables) { promote = AnalyzerCaster.promoteNumeric(child.actual, false); if (promote == null) { - throw createError(new ClassCastException("Cannot apply not [~] to type [" + Definition.ClassToName(child.actual) + "].")); + throw createError(new ClassCastException("Cannot apply not [~] to type [" + PainlessLookup.ClassToName(child.actual) + "].")); } child.expected = promote; @@ -122,7 +122,8 @@ void analyzerAdd(Locals variables) { promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { - throw createError(new ClassCastException("Cannot apply positive [+] to type [" + Definition.ClassToName(child.actual) + "].")); + throw createError( + new ClassCastException("Cannot apply positive [+] to type [" + PainlessLookup.ClassToName(child.actual) + "].")); } child.expected = promote; @@ -155,7 +156,8 @@ void analyzerSub(Locals variables) { promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { - throw createError(new ClassCastException("Cannot apply negative [-] to type [" + Definition.ClassToName(child.actual) + "].")); + throw createError( + new ClassCastException("Cannot apply negative [-] to type [" + PainlessLookup.ClassToName(child.actual) + "].")); } child.expected = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java index 0e2ab70897fe5..ec7d0f6d7bb7a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -19,8 +19,8 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -63,12 +63,12 @@ void analyze(Locals locals) { } else if (prefix.actual == def.class) { sub = new PSubDefArray(location, index); } else if (Map.class.isAssignableFrom(prefix.actual)) { - sub = new PSubMapShortcut(location, locals.getDefinition().getPainlessStructFromJavaClass(prefix.actual), index); + sub = new PSubMapShortcut(location, locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual), index); } else if (List.class.isAssignableFrom(prefix.actual)) { - sub = new PSubListShortcut(location, locals.getDefinition().getPainlessStructFromJavaClass(prefix.actual), index); + sub = new PSubListShortcut(location, locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual), index); } else { throw createError( - new IllegalArgumentException("Illegal array access on type [" + Definition.ClassToName(prefix.actual) + "].")); + new IllegalArgumentException("Illegal array access on type [" + PainlessLookup.ClassToName(prefix.actual) + "].")); } sub.write = write; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 6fff5a8e93f3e..12ff483248367 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; -import org.elasticsearch.painless.Definition.Struct; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -71,14 +71,14 @@ void analyze(Locals locals) { throw createError(new IllegalArgumentException("Illegal call [" + name + "] on array type.")); } - Struct struct = locals.getDefinition().getPainlessStructFromJavaClass(prefix.actual); + PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual); if (prefix.actual.isPrimitive()) { - struct = locals.getDefinition().getPainlessStructFromJavaClass(Definition.getBoxedType(prefix.actual)); + struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookup.getBoxedType(prefix.actual)); } - MethodKey methodKey = new MethodKey(name, arguments.size()); - Method method = prefix instanceof EStatic ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey); + PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); + PainlessMethod method = prefix instanceof EStatic ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey); if (method != null) { sub = new PSubCallInvoke(location, method, prefix.actual, arguments); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index de2c05dfa9b28..8d27162fc367b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -19,15 +19,16 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Field; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.Struct; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessField; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.List; import java.util.Map; @@ -63,29 +64,29 @@ void analyze(Locals locals) { prefix = prefix.cast(locals); if (prefix.actual.isArray()) { - sub = new PSubArrayLength(location, Definition.ClassToName(prefix.actual), value); + sub = new PSubArrayLength(location, PainlessLookup.ClassToName(prefix.actual), value); } else if (prefix.actual == def.class) { sub = new PSubDefField(location, value); } else { - Struct struct = locals.getDefinition().getPainlessStructFromJavaClass(prefix.actual); - Field field = prefix instanceof EStatic ? struct.staticMembers.get(value) : struct.members.get(value); + PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual); + PainlessField field = prefix instanceof EStatic ? struct.staticMembers.get(value) : struct.members.get(value); if (field != null) { sub = new PSubField(location, field); } else { - Method getter = struct.methods.get( - new Definition.MethodKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + PainlessMethod getter = struct.methods.get( + new PainlessMethodKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); if (getter == null) { getter = struct.methods.get( - new Definition.MethodKey("is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + new PainlessMethodKey("is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); } - Method setter = struct.methods.get( - new Definition.MethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); + PainlessMethod setter = struct.methods.get( + new PainlessMethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (getter != null || setter != null) { - sub = new PSubShortcut(location, value, Definition.ClassToName(prefix.actual), getter, setter); + sub = new PSubShortcut(location, value, PainlessLookup.ClassToName(prefix.actual), getter, setter); } else { EConstant index = new EConstant(location, value); index.analyze(locals); @@ -103,7 +104,7 @@ void analyze(Locals locals) { if (sub == null) { throw createError(new IllegalArgumentException( - "Unknown field [" + value + "] for type [" + Definition.ClassToName(prefix.actual) + "].")); + "Unknown field [" + value + "] for type [" + PainlessLookup.ClassToName(prefix.actual) + "].")); } if (nullSafe) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java index e13fe0d85c143..66ad0ecff1b89 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java @@ -19,7 +19,7 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -34,11 +34,11 @@ */ final class PSubCallInvoke extends AExpression { - private final Method method; + private final PainlessMethod method; private final Class box; private final List arguments; - PSubCallInvoke(Location location, Method method, Class box, List arguments) { + PSubCallInvoke(Location location, PainlessMethod method, Class box, List arguments) { super(location); this.method = Objects.requireNonNull(method); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java index 27087928d4fec..8e30d43432953 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java @@ -20,7 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java index 6428e47d1bacc..0882f19177006 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java @@ -20,7 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java index fbdfc47a65e66..41fcf563d241c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java @@ -20,7 +20,7 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index b386feacb846d..d6c367cfeabec 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -19,8 +19,8 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Field; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -35,9 +35,9 @@ */ final class PSubField extends AStoreable { - private final Field field; + private final PainlessField field; - PSubField(Location location, Field field) { + PSubField(Location location, PainlessField field) { super(location); this.field = Objects.requireNonNull(field); @@ -52,7 +52,7 @@ void extractVariables(Set variables) { void analyze(Locals locals) { if (write && Modifier.isFinal(field.modifiers)) { throw createError(new IllegalArgumentException( - "Cannot write to read-only field [" + field.name + "] for type [" + Definition.ClassToName(field.clazz) + "].")); + "Cannot write to read-only field [" + field.name + "] for type [" + PainlessLookup.ClassToName(field.clazz) + "].")); } actual = field.clazz; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 194a43e16dd16..5d881b30db22d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -19,14 +19,14 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; import java.util.Set; @@ -36,13 +36,13 @@ */ final class PSubListShortcut extends AStoreable { - private final Struct struct; + private final PainlessClass struct; private AExpression index; - private Method getter; - private Method setter; + private PainlessMethod getter; + private PainlessMethod setter; - PSubListShortcut(Location location, Struct struct, AExpression index) { + PSubListShortcut(Location location, PainlessClass struct, AExpression index) { super(location); this.struct = Objects.requireNonNull(struct); @@ -56,8 +56,8 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - getter = struct.methods.get(new Definition.MethodKey("get", 1)); - setter = struct.methods.get(new Definition.MethodKey("set", 2)); + getter = struct.methods.get(new PainlessMethodKey("get", 1)); + setter = struct.methods.get(new PainlessMethodKey("set", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1 || getter.arguments.get(0) != int.class)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index 033db404640e4..4875d55cbeb58 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; import java.util.Set; @@ -35,13 +35,13 @@ */ final class PSubMapShortcut extends AStoreable { - private final Struct struct; + private final PainlessClass struct; private AExpression index; - private Method getter; - private Method setter; + private PainlessMethod getter; + private PainlessMethod setter; - PSubMapShortcut(Location location, Struct struct, AExpression index) { + PSubMapShortcut(Location location, PainlessClass struct, AExpression index) { super(location); this.struct = Objects.requireNonNull(struct); @@ -55,8 +55,8 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - getter = struct.methods.get(new Definition.MethodKey("get", 1)); - setter = struct.methods.get(new Definition.MethodKey("put", 2)); + getter = struct.methods.get(new PainlessMethodKey("get", 1)); + setter = struct.methods.get(new PainlessMethodKey("put", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1)) { throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + struct.name + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java index ff88f0018556c..4b2910dbc010e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java @@ -19,7 +19,7 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; @@ -34,10 +34,10 @@ final class PSubShortcut extends AStoreable { private final String value; private final String type; - private final Method getter; - private final Method setter; + private final PainlessMethod getter; + private final PainlessMethod setter; - PSubShortcut(Location location, String value, String type, Method getter, Method setter) { + PSubShortcut(Location location, String value, String type, PainlessMethod getter, PainlessMethod setter) { super(location); this.value = value; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java index 98e45ca29f416..8a703c80cba2f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SCatch.java @@ -67,7 +67,7 @@ void analyze(Locals locals) { Class clazz; try { - clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); + clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java index 9f3f86abf438b..fb92c20e89e01 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SDeclaration.java @@ -62,7 +62,7 @@ void analyze(Locals locals) { Class clazz; try { - clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); + clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index a3c8319825a26..e7d18ece0590d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -19,8 +19,8 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -71,7 +71,7 @@ void analyze(Locals locals) { Class clazz; try { - clazz = locals.getDefinition().getJavaClassFromPainlessType(this.type); + clazz = locals.getPainlessLookup().getJavaClassFromPainlessType(this.type); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Not a type [" + this.type + "].")); } @@ -84,7 +84,8 @@ void analyze(Locals locals) { } else if (expression.actual == def.class || Iterable.class.isAssignableFrom(expression.actual)) { sub = new SSubEachIterable(location, variable, expression, block); } else { - throw createError(new IllegalArgumentException("Illegal for each type [" + Definition.ClassToName(expression.actual) + "].")); + throw createError( + new IllegalArgumentException("Illegal for each type [" + PainlessLookup.ClassToName(expression.actual) + "].")); } sub.analyze(locals); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 1b1e6bd2ef84b..628bb1d32d59e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -22,8 +22,8 @@ import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Constant; import org.elasticsearch.painless.Def; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Method; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Parameter; @@ -93,7 +93,7 @@ public int getMaxLoopCounter() { Class rtnType = null; List parameters = new ArrayList<>(); - Method method = null; + PainlessMethod method = null; private Variable loop = null; @@ -117,9 +117,9 @@ void extractVariables(Set variables) { throw new IllegalStateException("Illegal tree structure"); } - void generateSignature(Definition definition) { + void generateSignature(PainlessLookup painlessLookup) { try { - rtnType = definition.getJavaClassFromPainlessType(rtnTypeStr); + rtnType = painlessLookup.getJavaClassFromPainlessType(rtnTypeStr); } catch (IllegalArgumentException exception) { throw createError(new IllegalArgumentException("Illegal return type [" + rtnTypeStr + "] for function [" + name + "].")); } @@ -133,9 +133,9 @@ void generateSignature(Definition definition) { for (int param = 0; param < this.paramTypeStrs.size(); ++param) { try { - Class paramType = definition.getJavaClassFromPainlessType(this.paramTypeStrs.get(param)); + Class paramType = painlessLookup.getJavaClassFromPainlessType(this.paramTypeStrs.get(param)); - paramClasses[param] = Definition.defClassToObjectClass(paramType); + paramClasses[param] = PainlessLookup.defClassToObjectClass(paramType); paramTypes.add(paramType); parameters.add(new Parameter(location, paramNameStrs.get(param), paramType)); } catch (IllegalArgumentException exception) { @@ -145,8 +145,8 @@ void generateSignature(Definition definition) { } org.objectweb.asm.commons.Method method = new org.objectweb.asm.commons.Method( - name, MethodType.methodType(Definition.defClassToObjectClass(rtnType), paramClasses).toMethodDescriptorString()); - this.method = new Method(name, null, null, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null); + name, MethodType.methodType(PainlessLookup.defClassToObjectClass(rtnType), paramClasses).toMethodDescriptorString()); + this.method = new PainlessMethod(name, null, null, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index efb6db278140d..4781457a57dfa 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -21,9 +21,9 @@ import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Constant; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -167,20 +167,20 @@ void extractVariables(Set variables) { throw new IllegalStateException("Illegal tree structure."); } - public void analyze(Definition definition) { - Map methods = new HashMap<>(); + public void analyze(PainlessLookup painlessLookup) { + Map methods = new HashMap<>(); for (SFunction function : functions) { - function.generateSignature(definition); + function.generateSignature(painlessLookup); - MethodKey key = new MethodKey(function.name, function.parameters.size()); + PainlessMethodKey key = new PainlessMethodKey(function.name, function.parameters.size()); if (methods.put(key, function.method) != null) { throw createError(new IllegalArgumentException("Duplicate functions with name [" + function.name + "].")); } } - analyze(Locals.newProgramScope(definition, methods.values())); + analyze(Locals.newProgramScope(painlessLookup, methods.values())); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java index e0c9476ba640a..5db161b8002a4 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java @@ -20,8 +20,8 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -41,7 +41,7 @@ final class SSubEachArray extends AStatement { private AExpression expression; private final SBlock block; - private Cast cast = null; + private PainlessCast cast = null; private Variable array = null; private Variable index = null; private Class indexed = null; @@ -109,6 +109,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(Definition.ClassToName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookup.ClassToName(variable.clazz), variable.name, expression, block); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index 11e0f15d7e4f8..faee2ed74a6d0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -21,11 +21,11 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; -import org.elasticsearch.painless.Definition.def; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.PainlessLookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; @@ -51,9 +51,9 @@ final class SSubEachIterable extends AStatement { private final SBlock block; private final Variable variable; - private Cast cast = null; + private PainlessCast cast = null; private Variable iterator = null; - private Method method = null; + private PainlessMethod method = null; SSubEachIterable(Location location, Variable variable, AExpression expression, SBlock block) { super(location); @@ -77,11 +77,12 @@ void analyze(Locals locals) { if (expression.actual == def.class) { method = null; } else { - method = locals.getDefinition().getPainlessStructFromJavaClass(expression.actual).methods.get(new MethodKey("iterator", 0)); + method = locals.getPainlessLookup(). + getPainlessStructFromJavaClass(expression.actual).methods.get(new PainlessMethodKey("iterator", 0)); if (method == null) { throw createError(new IllegalArgumentException( - "Unable to create iterator for the type [" + Definition.ClassToName(expression.actual) + "].")); + "Unable to create iterator for the type [" + PainlessLookup.ClassToName(expression.actual) + "].")); } } @@ -132,6 +133,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(Definition.ClassToName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookup.ClassToName(variable.clazz), variable.name, expression, block); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java index b369d8beca764..a702490fff9d4 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalyzerCasterTests.java @@ -19,7 +19,7 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.Definition.Cast; +import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.test.ESTestCase; @@ -35,7 +35,7 @@ private static void assertCast(Class actual, Class expected, boolean mustB return; } - Cast cast = AnalyzerCaster.getLegalCast(location, actual, expected, true, false); + PainlessCast cast = AnalyzerCaster.getLegalCast(location, actual, expected, true, false); assertEquals(actual, cast.from); assertEquals(expected, cast.to); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java index 59cafa96ddcb9..78e5814e963f7 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java @@ -23,6 +23,7 @@ import java.util.HashMap; import java.util.Map; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.spi.Whitelist; import static java.util.Collections.emptyMap; @@ -36,7 +37,7 @@ */ public class BaseClassTests extends ScriptTestCase { - private final Definition definition = new Definition(Whitelist.BASE_WHITELISTS); + private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS); public abstract static class Gets { @@ -67,7 +68,7 @@ public Map getTestMap() { } public void testGets() { - Compiler compiler = new Compiler(Gets.class, definition); + Compiler compiler = new Compiler(Gets.class, painlessLookup); Map map = new HashMap<>(); map.put("s", 1); @@ -85,7 +86,7 @@ public abstract static class NoArgs { public abstract Object execute(); } public void testNoArgs() { - Compiler compiler = new Compiler(NoArgs.class, definition); + Compiler compiler = new Compiler(NoArgs.class, painlessLookup); assertEquals(1, ((NoArgs)scriptEngine.compile(compiler, null, "1", emptyMap())).execute()); assertEquals("foo", ((NoArgs)scriptEngine.compile(compiler, null, "'foo'", emptyMap())).execute()); @@ -109,13 +110,13 @@ public abstract static class OneArg { public abstract Object execute(Object arg); } public void testOneArg() { - Compiler compiler = new Compiler(OneArg.class, definition); + Compiler compiler = new Compiler(OneArg.class, painlessLookup); Object rando = randomInt(); assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap())).execute(rando)); rando = randomAlphaOfLength(5); assertEquals(rando, ((OneArg)scriptEngine.compile(compiler, null, "arg", emptyMap())).execute(rando)); - Compiler noargs = new Compiler(NoArgs.class, definition); + Compiler noargs = new Compiler(NoArgs.class, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, () -> scriptEngine.compile(noargs, null, "doc", emptyMap())); assertEquals("Variable [doc] is not defined.", e.getMessage()); @@ -130,7 +131,7 @@ public abstract static class ArrayArg { public abstract Object execute(String[] arg); } public void testArrayArg() { - Compiler compiler = new Compiler(ArrayArg.class, definition); + Compiler compiler = new Compiler(ArrayArg.class, painlessLookup); String rando = randomAlphaOfLength(5); assertEquals(rando, ((ArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new String[] {rando, "foo"})); } @@ -140,7 +141,7 @@ public abstract static class PrimitiveArrayArg { public abstract Object execute(int[] arg); } public void testPrimitiveArrayArg() { - Compiler compiler = new Compiler(PrimitiveArrayArg.class, definition); + Compiler compiler = new Compiler(PrimitiveArrayArg.class, painlessLookup); int rando = randomInt(); assertEquals(rando, ((PrimitiveArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new int[] {rando, 10})); } @@ -150,7 +151,7 @@ public abstract static class DefArrayArg { public abstract Object execute(Object[] arg); } public void testDefArrayArg() { - Compiler compiler = new Compiler(DefArrayArg.class, definition); + Compiler compiler = new Compiler(DefArrayArg.class, painlessLookup); Object rando = randomInt(); assertEquals(rando, ((DefArrayArg)scriptEngine.compile(compiler, null, "arg[0]", emptyMap())).execute(new Object[] {rando, 10})); rando = randomAlphaOfLength(5); @@ -168,7 +169,7 @@ public abstract static class ManyArgs { public abstract boolean needsD(); } public void testManyArgs() { - Compiler compiler = new Compiler(ManyArgs.class, definition); + Compiler compiler = new Compiler(ManyArgs.class, painlessLookup); int rando = randomInt(); assertEquals(rando, ((ManyArgs)scriptEngine.compile(compiler, null, "a", emptyMap())).execute(rando, 0, 0, 0)); assertEquals(10, ((ManyArgs)scriptEngine.compile(compiler, null, "a + b + c + d", emptyMap())).execute(1, 2, 3, 4)); @@ -196,7 +197,7 @@ public abstract static class VarargTest { public abstract Object execute(String... arg); } public void testVararg() { - Compiler compiler = new Compiler(VarargTest.class, definition); + Compiler compiler = new Compiler(VarargTest.class, painlessLookup); assertEquals("foo bar baz", ((VarargTest)scriptEngine.compile(compiler, null, "String.join(' ', Arrays.asList(arg))", emptyMap())) .execute("foo", "bar", "baz")); } @@ -212,7 +213,7 @@ public Object executeWithASingleOne(int a, int b, int c) { } } public void testDefaultMethods() { - Compiler compiler = new Compiler(DefaultMethods.class, definition); + Compiler compiler = new Compiler(DefaultMethods.class, painlessLookup); int rando = randomInt(); assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap())).execute(rando, 0, 0, 0)); assertEquals(rando, ((DefaultMethods)scriptEngine.compile(compiler, null, "a", emptyMap())).executeWithASingleOne(rando, 0, 0)); @@ -226,7 +227,7 @@ public abstract static class ReturnsVoid { public abstract void execute(Map map); } public void testReturnsVoid() { - Compiler compiler = new Compiler(ReturnsVoid.class, definition); + Compiler compiler = new Compiler(ReturnsVoid.class, painlessLookup); Map map = new HashMap<>(); ((ReturnsVoid)scriptEngine.compile(compiler, null, "map.a = 'foo'", emptyMap())).execute(map); assertEquals(singletonMap("a", "foo"), map); @@ -245,7 +246,7 @@ public abstract static class ReturnsPrimitiveBoolean { public abstract boolean execute(); } public void testReturnsPrimitiveBoolean() { - Compiler compiler = new Compiler(ReturnsPrimitiveBoolean.class, definition); + Compiler compiler = new Compiler(ReturnsPrimitiveBoolean.class, painlessLookup); assertEquals(true, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "true", emptyMap())).execute()); assertEquals(false, ((ReturnsPrimitiveBoolean)scriptEngine.compile(compiler, null, "false", emptyMap())).execute()); @@ -287,7 +288,7 @@ public abstract static class ReturnsPrimitiveInt { public abstract int execute(); } public void testReturnsPrimitiveInt() { - Compiler compiler = new Compiler(ReturnsPrimitiveInt.class, definition); + Compiler compiler = new Compiler(ReturnsPrimitiveInt.class, painlessLookup); assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "1", emptyMap())).execute()); assertEquals(1, ((ReturnsPrimitiveInt)scriptEngine.compile(compiler, null, "(int) 1L", emptyMap())).execute()); @@ -329,7 +330,7 @@ public abstract static class ReturnsPrimitiveFloat { public abstract float execute(); } public void testReturnsPrimitiveFloat() { - Compiler compiler = new Compiler(ReturnsPrimitiveFloat.class, definition); + Compiler compiler = new Compiler(ReturnsPrimitiveFloat.class, painlessLookup); assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "1.1f", emptyMap())).execute(), 0); assertEquals(1.1f, ((ReturnsPrimitiveFloat)scriptEngine.compile(compiler, null, "(float) 1.1d", emptyMap())).execute(), 0); @@ -360,7 +361,7 @@ public abstract static class ReturnsPrimitiveDouble { public abstract double execute(); } public void testReturnsPrimitiveDouble() { - Compiler compiler = new Compiler(ReturnsPrimitiveDouble.class, definition); + Compiler compiler = new Compiler(ReturnsPrimitiveDouble.class, painlessLookup); assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1", emptyMap())).execute(), 0); assertEquals(1.0, ((ReturnsPrimitiveDouble)scriptEngine.compile(compiler, null, "1L", emptyMap())).execute(), 0); @@ -394,7 +395,7 @@ public abstract static class NoArgumentsConstant { public abstract Object execute(String foo); } public void testNoArgumentsConstant() { - Compiler compiler = new Compiler(NoArgumentsConstant.class, definition); + Compiler compiler = new Compiler(NoArgumentsConstant.class, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertThat(e.getMessage(), startsWith( @@ -407,7 +408,7 @@ public abstract static class WrongArgumentsConstant { public abstract Object execute(String foo); } public void testWrongArgumentsConstant() { - Compiler compiler = new Compiler(WrongArgumentsConstant.class, definition); + Compiler compiler = new Compiler(WrongArgumentsConstant.class, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertThat(e.getMessage(), startsWith( @@ -420,7 +421,7 @@ public abstract static class WrongLengthOfArgumentConstant { public abstract Object execute(String foo); } public void testWrongLengthOfArgumentConstant() { - Compiler compiler = new Compiler(WrongLengthOfArgumentConstant.class, definition); + Compiler compiler = new Compiler(WrongLengthOfArgumentConstant.class, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertThat(e.getMessage(), startsWith("[" + WrongLengthOfArgumentConstant.class.getName() + "#ARGUMENTS] has length [2] but [" @@ -432,7 +433,7 @@ public abstract static class UnknownArgType { public abstract Object execute(UnknownArgType foo); } public void testUnknownArgType() { - Compiler compiler = new Compiler(UnknownArgType.class, definition); + Compiler compiler = new Compiler(UnknownArgType.class, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertEquals("[foo] is of unknown type [" + UnknownArgType.class.getName() + ". Painless interfaces can only accept arguments " @@ -444,7 +445,7 @@ public abstract static class UnknownReturnType { public abstract UnknownReturnType execute(String foo); } public void testUnknownReturnType() { - Compiler compiler = new Compiler(UnknownReturnType.class, definition); + Compiler compiler = new Compiler(UnknownReturnType.class, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertEquals("Painless can only implement execute methods returning a whitelisted type but [" + UnknownReturnType.class.getName() @@ -456,7 +457,7 @@ public abstract static class UnknownArgTypeInArray { public abstract Object execute(UnknownArgTypeInArray[] foo); } public void testUnknownArgTypeInArray() { - Compiler compiler = new Compiler(UnknownArgTypeInArray.class, definition); + Compiler compiler = new Compiler(UnknownArgTypeInArray.class, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "1", emptyMap())); assertEquals("[foo] is of unknown type [" + UnknownArgTypeInArray.class.getName() + ". Painless interfaces can only accept " @@ -468,7 +469,7 @@ public abstract static class TwoExecuteMethods { public abstract Object execute(boolean foo); } public void testTwoExecuteMethods() { - Compiler compiler = new Compiler(TwoExecuteMethods.class, definition); + Compiler compiler = new Compiler(TwoExecuteMethods.class, painlessLookup); Exception e = expectScriptThrows(IllegalArgumentException.class, false, () -> scriptEngine.compile(compiler, null, "null", emptyMap())); assertEquals("Painless can only implement interfaces that have a single method named [execute] but [" diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java index 279438e74a7c3..987eef31eeeaf 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java @@ -22,6 +22,7 @@ import org.elasticsearch.ElasticsearchException; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.script.ScriptException; @@ -35,7 +36,7 @@ import static org.hamcrest.Matchers.not; public class DebugTests extends ScriptTestCase { - private final Definition definition = new Definition(Whitelist.BASE_WHITELISTS); + private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS); public void testExplain() { // Debug.explain can explain an object @@ -43,16 +44,16 @@ public void testExplain() { PainlessExplainError e = expectScriptThrows(PainlessExplainError.class, () -> exec( "Debug.explain(params.a)", singletonMap("a", dummy), true)); assertSame(dummy, e.getObjectToExplain()); - assertThat(e.getHeaders(definition), hasEntry("es.to_string", singletonList(dummy.toString()))); - assertThat(e.getHeaders(definition), hasEntry("es.java_class", singletonList("java.lang.Object"))); - assertThat(e.getHeaders(definition), hasEntry("es.painless_class", singletonList("java.lang.Object"))); + assertThat(e.getHeaders(painlessLookup), hasEntry("es.to_string", singletonList(dummy.toString()))); + assertThat(e.getHeaders(painlessLookup), hasEntry("es.java_class", singletonList("java.lang.Object"))); + assertThat(e.getHeaders(painlessLookup), hasEntry("es.painless_class", singletonList("java.lang.Object"))); // Null should be ok e = expectScriptThrows(PainlessExplainError.class, () -> exec("Debug.explain(null)")); assertNull(e.getObjectToExplain()); - assertThat(e.getHeaders(definition), hasEntry("es.to_string", singletonList("null"))); - assertThat(e.getHeaders(definition), not(hasKey("es.java_class"))); - assertThat(e.getHeaders(definition), not(hasKey("es.painless_class"))); + assertThat(e.getHeaders(painlessLookup), hasEntry("es.to_string", singletonList("null"))); + assertThat(e.getHeaders(painlessLookup), not(hasKey("es.java_class"))); + assertThat(e.getHeaders(painlessLookup), not(hasKey("es.painless_class"))); // You can't catch the explain exception e = expectScriptThrows(PainlessExplainError.class, () -> exec( diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java index d17b9e55ab0c1..0d5e2748b7b32 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java @@ -19,6 +19,7 @@ package org.elasticsearch.painless; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.spi.Whitelist; import org.objectweb.asm.util.Textifier; @@ -39,7 +40,7 @@ static String toString(Class iface, String source, CompilerSettings settings) PrintWriter outputWriter = new PrintWriter(output); Textifier textifier = new Textifier(); try { - new Compiler(iface, new Definition(Whitelist.BASE_WHITELISTS)) + new Compiler(iface, new PainlessLookup(Whitelist.BASE_WHITELISTS)) .compile("", source, settings, textifier); } catch (RuntimeException e) { textifier.print(outputWriter); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java index 52528c358fc82..ab4844dd58bd9 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java @@ -27,15 +27,16 @@ import java.util.Collections; import java.util.HashMap; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.test.ESTestCase; public class DefBootstrapTests extends ESTestCase { - private final Definition definition = new Definition(Whitelist.BASE_WHITELISTS); + private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS); /** calls toString() on integers, twice */ public void testOneType() throws Throwable { - CallSite site = DefBootstrap.bootstrap(definition, + CallSite site = DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "toString", MethodType.methodType(String.class, Object.class), @@ -55,7 +56,7 @@ public void testOneType() throws Throwable { } public void testTwoTypes() throws Throwable { - CallSite site = DefBootstrap.bootstrap(definition, + CallSite site = DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "toString", MethodType.methodType(String.class, Object.class), @@ -80,7 +81,7 @@ public void testTwoTypes() throws Throwable { public void testTooManyTypes() throws Throwable { // if this changes, test must be rewritten assertEquals(5, DefBootstrap.PIC.MAX_DEPTH); - CallSite site = DefBootstrap.bootstrap(definition, + CallSite site = DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "toString", MethodType.methodType(String.class, Object.class), @@ -106,7 +107,7 @@ public void testTooManyTypes() throws Throwable { /** test that we revert to the megamorphic classvalue cache and that it works as expected */ public void testMegamorphic() throws Throwable { - DefBootstrap.PIC site = (DefBootstrap.PIC) DefBootstrap.bootstrap(definition, + DefBootstrap.PIC site = (DefBootstrap.PIC) DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "size", MethodType.methodType(int.class, Object.class), @@ -138,7 +139,7 @@ public void testMegamorphic() throws Throwable { // test operators with null guards public void testNullGuardAdd() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, Object.class, Object.class), @@ -150,7 +151,7 @@ public void testNullGuardAdd() throws Throwable { } public void testNullGuardAddWhenCached() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, Object.class, Object.class), @@ -163,7 +164,7 @@ public void testNullGuardAddWhenCached() throws Throwable { } public void testNullGuardEq() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "eq", MethodType.methodType(boolean.class, Object.class, Object.class), @@ -176,7 +177,7 @@ public void testNullGuardEq() throws Throwable { } public void testNullGuardEqWhenCached() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "eq", MethodType.methodType(boolean.class, Object.class, Object.class), @@ -194,7 +195,7 @@ public void testNullGuardEqWhenCached() throws Throwable { // and can be disabled in some circumstances. public void testNoNullGuardAdd() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, int.class, Object.class), @@ -208,7 +209,7 @@ public void testNoNullGuardAdd() throws Throwable { } public void testNoNullGuardAddWhenCached() throws Throwable { - DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(definition, + DefBootstrap.MIC site = (DefBootstrap.MIC) DefBootstrap.bootstrap(painlessLookup, MethodHandles.publicLookup(), "add", MethodType.methodType(Object.class, int.class, Object.class), diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index 309b6be97f20b..5177d64cbdb06 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -23,9 +23,10 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.logging.ESLoggerFactory; -import org.elasticsearch.painless.Definition.Field; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessField; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessClass; import java.io.IOException; import java.io.PrintStream; import java.lang.reflect.Modifier; @@ -44,15 +45,15 @@ import static org.elasticsearch.painless.spi.Whitelist.BASE_WHITELISTS; /** - * Generates an API reference from the method and type whitelists in {@link Definition}. + * Generates an API reference from the method and type whitelists in {@link PainlessLookup}. */ public class PainlessDocGenerator { - private static final Definition definition = new Definition(BASE_WHITELISTS); + private static final PainlessLookup PAINLESS_LOOKUP = new PainlessLookup(BASE_WHITELISTS); private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class); - private static final Comparator FIELD_NAME = comparing(f -> f.name); - private static final Comparator METHOD_NAME = comparing(m -> m.name); - private static final Comparator NUMBER_OF_ARGS = comparing(m -> m.arguments.size()); + private static final Comparator FIELD_NAME = comparing(f -> f.name); + private static final Comparator METHOD_NAME = comparing(m -> m.name); + private static final Comparator NUMBER_OF_ARGS = comparing(m -> m.arguments.size()); public static void main(String[] args) throws IOException { Path apiRootPath = PathUtils.get(args[0]); @@ -67,8 +68,8 @@ public static void main(String[] args) throws IOException { Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(indexStream); - List structs = definition.getStructs().stream().sorted(comparing(t -> t.name)).collect(toList()); - for (Struct struct : structs) { + List structs = PAINLESS_LOOKUP.getStructs().stream().sorted(comparing(t -> t.name)).collect(toList()); + for (PainlessClass struct : structs) { if (struct.clazz.isPrimitive()) { // Primitives don't have methods to reference continue; @@ -93,13 +94,13 @@ public static void main(String[] args) throws IOException { typeStream.print(struct.name); typeStream.println("++::"); - Consumer documentField = field -> PainlessDocGenerator.documentField(typeStream, field); - Consumer documentMethod = method -> PainlessDocGenerator.documentMethod(typeStream, method); + Consumer documentField = field -> PainlessDocGenerator.documentField(typeStream, field); + Consumer documentMethod = method -> PainlessDocGenerator.documentMethod(typeStream, method); struct.staticMembers.values().stream().sorted(FIELD_NAME).forEach(documentField); struct.members.values().stream().sorted(FIELD_NAME).forEach(documentField); struct.staticMethods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(documentMethod); struct.constructors.values().stream().sorted(NUMBER_OF_ARGS).forEach(documentMethod); - Map inherited = new TreeMap<>(); + Map inherited = new TreeMap<>(); struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(method -> { if (method.owner == struct) { documentMethod(typeStream, method); @@ -111,7 +112,7 @@ public static void main(String[] args) throws IOException { if (false == inherited.isEmpty()) { typeStream.print("* Inherits methods from "); boolean first = true; - for (Struct inheritsFrom : inherited.values()) { + for (PainlessClass inheritsFrom : inherited.values()) { if (first) { first = false; } else { @@ -129,7 +130,7 @@ public static void main(String[] args) throws IOException { logger.info("Done writing [index.asciidoc]"); } - private static void documentField(PrintStream stream, Field field) { + private static void documentField(PrintStream stream, PainlessField field) { stream.print("** [["); emitAnchor(stream, field); stream.print("]]"); @@ -159,7 +160,7 @@ private static void documentField(PrintStream stream, Field field) { /** * Document a method. */ - private static void documentMethod(PrintStream stream, Method method) { + private static void documentMethod(PrintStream stream, PainlessMethod method) { stream.print("* ++[["); emitAnchor(stream, method); stream.print("]]"); @@ -201,17 +202,17 @@ private static void documentMethod(PrintStream stream, Method method) { } /** - * Anchor text for a {@link Struct}. + * Anchor text for a {@link PainlessClass}. */ - private static void emitAnchor(PrintStream stream, Struct struct) { + private static void emitAnchor(PrintStream stream, PainlessClass struct) { stream.print("painless-api-reference-"); stream.print(struct.name.replace('.', '-')); } /** - * Anchor text for a {@link Method}. + * Anchor text for a {@link PainlessMethod}. */ - private static void emitAnchor(PrintStream stream, Method method) { + private static void emitAnchor(PrintStream stream, PainlessMethod method) { emitAnchor(stream, method.owner); stream.print('-'); stream.print(methodName(method)); @@ -220,15 +221,15 @@ private static void emitAnchor(PrintStream stream, Method method) { } /** - * Anchor text for a {@link Field}. + * Anchor text for a {@link PainlessField}. */ - private static void emitAnchor(PrintStream stream, Field field) { + private static void emitAnchor(PrintStream stream, PainlessField field) { emitAnchor(stream, field.owner); stream.print('-'); stream.print(field.name); } - private static String methodName(Method method) { + private static String methodName(PainlessMethod method) { return method.name.equals("") ? method.owner.name : method.name; } @@ -237,17 +238,17 @@ private static String methodName(Method method) { an internal link with the text. */ private static void emitType(PrintStream stream, Class clazz) { - emitStruct(stream, definition.getPainlessStructFromJavaClass(clazz)); + emitStruct(stream, PAINLESS_LOOKUP.getPainlessStructFromJavaClass(clazz)); while ((clazz = clazz.getComponentType()) != null) { stream.print("[]"); } } /** - * Emit a {@link Struct}. If the {@linkplain Struct} is primitive or def this just emits the name of the struct. Otherwise this emits - * an internal link with the name. + * Emit a {@link PainlessClass}. If the {@linkplain PainlessClass} is primitive or def this just emits the name of the struct. + * Otherwise this emits an internal link with the name. */ - private static void emitStruct(PrintStream stream, Struct struct) { + private static void emitStruct(PrintStream stream, PainlessClass struct) { if (false == struct.clazz.isPrimitive() && false == struct.name.equals("def")) { stream.print("<<"); emitAnchor(stream, struct); @@ -260,11 +261,11 @@ private static void emitStruct(PrintStream stream, Struct struct) { } /** - * Emit an external link to Javadoc for a {@link Method}. + * Emit an external link to Javadoc for a {@link PainlessMethod}. * * @param root name of the root uri variable */ - private static void emitJavadocLink(PrintStream stream, String root, Method method) { + private static void emitJavadocLink(PrintStream stream, String root, PainlessMethod method) { stream.print("link:{"); stream.print(root); stream.print("-javadoc}/"); @@ -292,11 +293,11 @@ private static void emitJavadocLink(PrintStream stream, String root, Method meth } /** - * Emit an external link to Javadoc for a {@link Field}. + * Emit an external link to Javadoc for a {@link PainlessField}. * * @param root name of the root uri variable */ - private static void emitJavadocLink(PrintStream stream, String root, Field field) { + private static void emitJavadocLink(PrintStream stream, String root, PainlessField field) { stream.print("link:{"); stream.print(root); stream.print("-javadoc}/"); @@ -306,9 +307,9 @@ private static void emitJavadocLink(PrintStream stream, String root, Field field } /** - * Pick the javadoc root for a {@link Method}. + * Pick the javadoc root for a {@link PainlessMethod}. */ - private static String javadocRoot(Method method) { + private static String javadocRoot(PainlessMethod method) { if (method.augmentation != null) { return "painless"; } @@ -316,16 +317,16 @@ private static String javadocRoot(Method method) { } /** - * Pick the javadoc root for a {@link Field}. + * Pick the javadoc root for a {@link PainlessField}. */ - private static String javadocRoot(Field field) { + private static String javadocRoot(PainlessField field) { return javadocRoot(field.owner); } /** - * Pick the javadoc root for a {@link Struct}. + * Pick the javadoc root for a {@link PainlessClass}. */ - private static String javadocRoot(Struct struct) { + private static String javadocRoot(PainlessClass struct) { String classPackage = struct.clazz.getPackage().getName(); if (classPackage.startsWith("java")) { return "java8"; diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java index ea1d2275b3e8d..1a4770e560a7e 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java @@ -24,6 +24,7 @@ import org.elasticsearch.common.lucene.ScorerAware; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.painless.antlr.Walker; +import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.script.ExecutableScript; import org.elasticsearch.script.ScriptContext; @@ -90,12 +91,12 @@ public Object exec(String script, Map vars, boolean picky) { public Object exec(String script, Map vars, Map compileParams, Scorer scorer, boolean picky) { // test for ambiguity errors before running the actual script if picky is true if (picky) { - Definition definition = new Definition(Whitelist.BASE_WHITELISTS); - ScriptClassInfo scriptClassInfo = new ScriptClassInfo(definition, GenericElasticsearchScript.class); + PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS); + ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, GenericElasticsearchScript.class); CompilerSettings pickySettings = new CompilerSettings(); pickySettings.setPicky(true); pickySettings.setRegexesEnabled(CompilerSettings.REGEX_ENABLED.get(scriptEngineSettings())); - Walker.buildPainlessTree(scriptClassInfo, new MainMethodReserved(), getTestName(), script, pickySettings, definition, null); + Walker.buildPainlessTree(scriptClassInfo, new MainMethodReserved(), getTestName(), script, pickySettings, painlessLookup, null); } // test actual script execution ExecutableScript.Factory factory = scriptEngine.compile(null, script, ExecutableScript.CONTEXT, compileParams); diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index fd8190aa2c2eb..3e9f724743faa 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -20,12 +20,12 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.CompilerSettings; -import org.elasticsearch.painless.Definition; -import org.elasticsearch.painless.Definition.Cast; -import org.elasticsearch.painless.Definition.Field; -import org.elasticsearch.painless.Definition.Method; -import org.elasticsearch.painless.Definition.MethodKey; -import org.elasticsearch.painless.Definition.Struct; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessField; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.FeatureTest; import org.elasticsearch.painless.GenericElasticsearchScript; import org.elasticsearch.painless.Locals.Variable; @@ -48,7 +48,7 @@ * Tests {@link Object#toString} implementations on all extensions of {@link ANode}. */ public class NodeToStringTests extends ESTestCase { - private final Definition definition = new Definition(Whitelist.BASE_WHITELISTS); + private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS); public void testEAssignment() { assertToString( @@ -161,12 +161,12 @@ public void testECapturingFunctionRef() { public void testECast() { Location l = new Location(getTestName(), 0); AExpression child = new EConstant(l, "test"); - Cast cast = Cast.standard(String.class, Integer.class, true); + PainlessCast cast = PainlessCast.standard(String.class, Integer.class, true); assertEquals("(ECast java.lang.Integer (EConstant String 'test'))", new ECast(l, child, cast).toString()); l = new Location(getTestName(), 1); child = new EBinary(l, Operation.ADD, new EConstant(l, "test"), new EConstant(l, 12)); - cast = Cast.standard(Integer.class, Boolean.class, true); + cast = PainlessCast.standard(Integer.class, Boolean.class, true); assertEquals("(ECast java.lang.Boolean (EBinary (EConstant String 'test') + (EConstant Integer 12)))", new ECast(l, child, cast).toString()); } @@ -403,15 +403,15 @@ public void testPSubBrace() { public void testPSubCallInvoke() { Location l = new Location(getTestName(), 0); - Struct c = definition.getPainlessStructFromJavaClass(Integer.class); - Method m = c.methods.get(new MethodKey("toString", 0)); + PainlessClass c = painlessLookup.getPainlessStructFromJavaClass(Integer.class); + PainlessMethod m = c.methods.get(new PainlessMethodKey("toString", 0)); PSubCallInvoke node = new PSubCallInvoke(l, m, null, emptyList()); node.prefix = new EVariable(l, "a"); assertEquals("(PSubCallInvoke (EVariable a) toString)", node.toString()); assertEquals("(PSubNullSafeCallInvoke (PSubCallInvoke (EVariable a) toString))", new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 1); - m = c.methods.get(new MethodKey("equals", 1)); + m = c.methods.get(new PainlessMethodKey("equals", 1)); node = new PSubCallInvoke(l, m, null, singletonList(new EVariable(l, "b"))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubCallInvoke (EVariable a) equals (Args (EVariable b)))", node.toString()); @@ -458,8 +458,8 @@ public void testPSubDefField() { public void testPSubField() { Location l = new Location(getTestName(), 0); - Struct s = definition.getPainlessStructFromJavaClass(Boolean.class); - Field f = s.staticMembers.get("TRUE"); + PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(Boolean.class); + PainlessField f = s.staticMembers.get("TRUE"); PSubField node = new PSubField(l, f); node.prefix = new EStatic(l, "Boolean"); assertEquals("(PSubField (EStatic Boolean) TRUE)", node.toString()); @@ -468,7 +468,7 @@ public void testPSubField() { public void testPSubListShortcut() { Location l = new Location(getTestName(), 0); - Struct s = definition.getPainlessStructFromJavaClass(List.class); + PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(List.class); PSubListShortcut node = new PSubListShortcut(l, s, new EConstant(l, 1)); node.prefix = new EVariable(l, "a"); assertEquals("(PSubListShortcut (EVariable a) (EConstant Integer 1))", node.toString()); @@ -476,7 +476,7 @@ public void testPSubListShortcut() { new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 0); - s = definition.getPainlessStructFromJavaClass(List.class); + s = painlessLookup.getPainlessStructFromJavaClass(List.class); node = new PSubListShortcut(l, s, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubListShortcut (EVariable a) (EBinary (EConstant Integer 1) + (EConstant Integer 4)))", node.toString()); @@ -484,7 +484,7 @@ public void testPSubListShortcut() { public void testPSubMapShortcut() { Location l = new Location(getTestName(), 0); - Struct s = definition.getPainlessStructFromJavaClass(Map.class); + PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(Map.class); PSubMapShortcut node = new PSubMapShortcut(l, s, new EConstant(l, "cat")); node.prefix = new EVariable(l, "a"); assertEquals("(PSubMapShortcut (EVariable a) (EConstant String 'cat'))", node.toString()); @@ -492,7 +492,7 @@ public void testPSubMapShortcut() { new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 1); - s = definition.getPainlessStructFromJavaClass(Map.class); + s = painlessLookup.getPainlessStructFromJavaClass(Map.class); node = new PSubMapShortcut(l, s, new EBinary(l, Operation.ADD, new EConstant(l, 1), new EConstant(l, 4))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubMapShortcut (EVariable a) (EBinary (EConstant Integer 1) + (EConstant Integer 4)))", node.toString()); @@ -500,9 +500,9 @@ public void testPSubMapShortcut() { public void testPSubShortcut() { Location l = new Location(getTestName(), 0); - Struct s = definition.getPainlessStructFromJavaClass(FeatureTest.class); - Method getter = s.methods.get(new MethodKey("getX", 0)); - Method setter = s.methods.get(new MethodKey("setX", 1)); + PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(FeatureTest.class); + PainlessMethod getter = s.methods.get(new PainlessMethodKey("getX", 0)); + PainlessMethod setter = s.methods.get(new PainlessMethodKey("setX", 1)); PSubShortcut node = new PSubShortcut(l, "x", FeatureTest.class.getName(), getter, setter); node.prefix = new EVariable(l, "a"); assertEquals("(PSubShortcut (EVariable a) x)", node.toString()); @@ -900,12 +900,12 @@ private void assertToString(String expected, String code) { } private SSource walk(String code) { - ScriptClassInfo scriptClassInfo = new ScriptClassInfo(definition, GenericElasticsearchScript.class); + ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, GenericElasticsearchScript.class); CompilerSettings compilerSettings = new CompilerSettings(); compilerSettings.setRegexesEnabled(true); try { return Walker.buildPainlessTree( - scriptClassInfo, new MainMethodReserved(), getTestName(), code, compilerSettings, definition, null); + scriptClassInfo, new MainMethodReserved(), getTestName(), code, compilerSettings, painlessLookup, null); } catch (Exception e) { throw new AssertionError("Failed to compile: " + code, e); } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/20_scriptfield.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/20_scriptfield.yml index 02c17ce0e3714..2914e8a916ec6 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/20_scriptfield.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/20_scriptfield.yml @@ -88,14 +88,13 @@ setup: --- "Scripted Field with a null safe dereference (null)": - # Change this to ?: once we have it implemented - do: search: body: script_fields: bar: script: - source: "(doc['missing'].value?.length() ?: 0) + params.x;" + source: "(doc['missing'].size() == 0 ? 0 : doc['missing'].value.length()) + params.x;" params: x: 5 diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java index 01a6e35299b29..cab3237732301 100644 --- a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGain.java @@ -326,9 +326,9 @@ public boolean equals(Object obj) { return false; } DiscountedCumulativeGain.Detail other = (DiscountedCumulativeGain.Detail) obj; - return (this.dcg == other.dcg && - this.idcg == other.idcg && - this.unratedDocs == other.unratedDocs); + return Double.compare(this.dcg, other.dcg) == 0 && + Double.compare(this.idcg, other.idcg) == 0 && + this.unratedDocs == other.unratedDocs; } @Override diff --git a/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java new file mode 100644 index 0000000000000..4aac29f299d67 --- /dev/null +++ b/modules/rank-eval/src/main/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRank.java @@ -0,0 +1,295 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.rankeval; + +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.SearchHit; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Objects; +import java.util.Optional; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; +import static org.elasticsearch.index.rankeval.EvaluationMetric.joinHitsWithRatings; + +/** + * Implementation of the Expected Reciprocal Rank metric described in:

    + * + * Chapelle, O., Metlzer, D., Zhang, Y., & Grinspan, P. (2009).
    + * Expected reciprocal rank for graded relevance.
    + * Proceeding of the 18th ACM Conference on Information and Knowledge Management - CIKM ’09, 621.
    + * https://doi.org/10.1145/1645953.1646033 + */ +public class ExpectedReciprocalRank implements EvaluationMetric { + + /** the default search window size */ + private static final int DEFAULT_K = 10; + + /** the search window size */ + private final int k; + + /** + * Optional. If set, this will be the rating for docs that are unrated in the ranking evaluation request + */ + private final Integer unknownDocRating; + + private final int maxRelevance; + + private final double two_pow_maxRelevance; + + public static final String NAME = "expected_reciprocal_rank"; + + public ExpectedReciprocalRank(int maxRelevance) { + this(maxRelevance, null, DEFAULT_K); + } + + /** + * @param maxRelevance + * the maximal relevance judgment in the evaluation dataset + * @param unknownDocRating + * the rating for documents the user hasn't supplied an explicit + * rating for. Can be {@code null}, in which case document is + * skipped. + * @param k + * the search window size all request use. + */ + public ExpectedReciprocalRank(int maxRelevance, @Nullable Integer unknownDocRating, int k) { + this.maxRelevance = maxRelevance; + this.unknownDocRating = unknownDocRating; + this.k = k; + // we can pre-calculate the constant used in metric calculation + this.two_pow_maxRelevance = Math.pow(2, this.maxRelevance); + } + + ExpectedReciprocalRank(StreamInput in) throws IOException { + this.maxRelevance = in.readVInt(); + this.unknownDocRating = in.readOptionalVInt(); + this.k = in.readVInt(); + this.two_pow_maxRelevance = Math.pow(2, this.maxRelevance); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(maxRelevance); + out.writeOptionalVInt(unknownDocRating); + out.writeVInt(k); + } + + @Override + public String getWriteableName() { + return NAME; + } + + int getK() { + return this.k; + } + + int getMaxRelevance() { + return this.maxRelevance; + } + + /** + * get the rating used for unrated documents + */ + public Integer getUnknownDocRating() { + return this.unknownDocRating; + } + + + @Override + public Optional forcedSearchSize() { + return Optional.of(k); + } + + @Override + public EvalQueryQuality evaluate(String taskId, SearchHit[] hits, List ratedDocs) { + List ratedHits = joinHitsWithRatings(hits, ratedDocs); + if (ratedHits.size() > this.k) { + ratedHits = ratedHits.subList(0, k); + } + List ratingsInSearchHits = new ArrayList<>(ratedHits.size()); + int unratedResults = 0; + for (RatedSearchHit hit : ratedHits) { + // unknownDocRating might be null, in which case unrated will be ignored in the calculation. + // we still need to add them as a placeholder so the rank of the subsequent ratings is correct + ratingsInSearchHits.add(hit.getRating().orElse(unknownDocRating)); + if (hit.getRating().isPresent() == false) { + unratedResults++; + } + } + + double p = 1; + double err = 0; + int rank = 1; + for (Integer rating : ratingsInSearchHits) { + if (rating != null) { + double probR = probabilityOfRelevance(rating); + err = err + (p * probR / rank); + p = p * (1 - probR); + } + rank++; + } + + EvalQueryQuality evalQueryQuality = new EvalQueryQuality(taskId, err); + evalQueryQuality.addHitsAndRatings(ratedHits); + evalQueryQuality.setMetricDetails(new Detail(unratedResults)); + return evalQueryQuality; + } + + double probabilityOfRelevance(Integer rating) { + return (Math.pow(2, rating) - 1) / this.two_pow_maxRelevance; + } + + private static final ParseField K_FIELD = new ParseField("k"); + private static final ParseField UNKNOWN_DOC_RATING_FIELD = new ParseField("unknown_doc_rating"); + private static final ParseField MAX_RELEVANCE_FIELD = new ParseField("maximum_relevance"); + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>("dcg", false, + args -> { + int maxRelevance = (Integer) args[0]; + Integer optK = (Integer) args[2]; + return new ExpectedReciprocalRank(maxRelevance, (Integer) args[1], + optK == null ? DEFAULT_K : optK); + }); + + + static { + PARSER.declareInt(constructorArg(), MAX_RELEVANCE_FIELD); + PARSER.declareInt(optionalConstructorArg(), UNKNOWN_DOC_RATING_FIELD); + PARSER.declareInt(optionalConstructorArg(), K_FIELD); + } + + public static ExpectedReciprocalRank fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + builder.startObject(NAME); + builder.field(MAX_RELEVANCE_FIELD.getPreferredName(), this.maxRelevance); + if (unknownDocRating != null) { + builder.field(UNKNOWN_DOC_RATING_FIELD.getPreferredName(), this.unknownDocRating); + } + builder.field(K_FIELD.getPreferredName(), this.k); + builder.endObject(); + builder.endObject(); + return builder; + } + + @Override + public final boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ExpectedReciprocalRank other = (ExpectedReciprocalRank) obj; + return this.k == other.k && + this.maxRelevance == other.maxRelevance + && Objects.equals(unknownDocRating, other.unknownDocRating); + } + + @Override + public final int hashCode() { + return Objects.hash(unknownDocRating, k, maxRelevance); + } + + public static final class Detail implements MetricDetail { + + private static ParseField UNRATED_FIELD = new ParseField("unrated_docs"); + private final int unratedDocs; + + Detail(int unratedDocs) { + this.unratedDocs = unratedDocs; + } + + Detail(StreamInput in) throws IOException { + this.unratedDocs = in.readVInt(); + } + + @Override + public + String getMetricName() { + return NAME; + } + + @Override + public XContentBuilder innerToXContent(XContentBuilder builder, Params params) throws IOException { + return builder.field(UNRATED_FIELD.getPreferredName(), this.unratedDocs); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>(NAME, true, args -> { + return new Detail((Integer) args[0]); + }); + + static { + PARSER.declareInt(constructorArg(), UNRATED_FIELD); + } + + public static Detail fromXContent(XContentParser parser) { + return PARSER.apply(parser, null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(this.unratedDocs); + } + + @Override + public String getWriteableName() { + return NAME; + } + + /** + * @return the number of unrated documents in the search results + */ + public Object getUnratedDocs() { + return this.unratedDocs; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ExpectedReciprocalRank.Detail other = (ExpectedReciprocalRank.Detail) obj; + return this.unratedDocs == other.unratedDocs; + } + + @Override + public int hashCode() { + return Objects.hash(this.unratedDocs); + } + } +} + diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java index 24ac600a11398..56b0c692c411a 100644 --- a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/DiscountedCumulativeGainTests.java @@ -55,7 +55,7 @@ public class DiscountedCumulativeGainTests extends ESTestCase { /** * Assuming the docs are ranked in the following order: * - * rank | rel_rank | 2^(rel_rank) - 1 | log_2(rank + 1) | (2^(rel_rank) - 1) / log_2(rank + 1) + * rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1) * ------------------------------------------------------------------------------------------- * 1 | 3 | 7.0 | 1.0 | 7.0 | 7.0 |  * 2 | 2 | 3.0 | 1.5849625007211563 | 1.8927892607143721 @@ -82,7 +82,7 @@ public void testDCGAt() { * Check with normalization: to get the maximal possible dcg, sort documents by * relevance in descending order * - * rank | rel_rank | 2^(rel_rank) - 1 | log_2(rank + 1) | (2^(rel_rank) - 1) / log_2(rank + 1) + * rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1) * --------------------------------------------------------------------------------------- * 1 | 3 | 7.0 | 1.0  | 7.0 * 2 | 3 | 7.0 | 1.5849625007211563 | 4.416508275000202 @@ -101,7 +101,7 @@ public void testDCGAt() { * This tests metric when some documents in the search result don't have a * rating provided by the user. * - * rank | rel_rank | 2^(rel_rank) - 1 | log_2(rank + 1) | (2^(rel_rank) - 1) / log_2(rank + 1) + * rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1) * ------------------------------------------------------------------------------------------- * 1 | 3 | 7.0 | 1.0 | 7.0 2 |  * 2 | 3.0 | 1.5849625007211563 | 1.8927892607143721 @@ -134,7 +134,7 @@ public void testDCGAtSixMissingRatings() { * Check with normalization: to get the maximal possible dcg, sort documents by * relevance in descending order * - * rank | rel_rank | 2^(rel_rank) - 1 | log_2(rank + 1) | (2^(rel_rank) - 1) / log_2(rank + 1) + * rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1) * ---------------------------------------------------------------------------------------- * 1 | 3 | 7.0 | 1.0  | 7.0 * 2 | 3 | 7.0 | 1.5849625007211563 | 4.416508275000202 @@ -154,7 +154,7 @@ public void testDCGAtSixMissingRatings() { * documents than search hits because we restrict DCG to be calculated at the * fourth position * - * rank | rel_rank | 2^(rel_rank) - 1 | log_2(rank + 1) | (2^(rel_rank) - 1) / log_2(rank + 1) + * rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1) * ------------------------------------------------------------------------------------------- * 1 | 3 | 7.0 | 1.0 | 7.0 2 |  * 2 | 3.0 | 1.5849625007211563 | 1.8927892607143721 @@ -191,7 +191,7 @@ public void testDCGAtFourMoreRatings() { * Check with normalization: to get the maximal possible dcg, sort documents by * relevance in descending order * - * rank | rel_rank | 2^(rel_rank) - 1 | log_2(rank + 1) | (2^(rel_rank) - 1) / log_2(rank + 1) + * rank | relevance | 2^(relevance) - 1 | log_2(rank + 1) | (2^(relevance) - 1) / log_2(rank + 1) * --------------------------------------------------------------------------------------- * 1 | 3 | 7.0 | 1.0  | 7.0 * 2 | 3 | 7.0 | 1.5849625007211563 | 4.416508275000202 diff --git a/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java new file mode 100644 index 0000000000000..a888bf010f9e2 --- /dev/null +++ b/modules/rank-eval/src/test/java/org/elasticsearch/index/rankeval/ExpectedReciprocalRankTests.java @@ -0,0 +1,217 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.rankeval; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.text.Text; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentFactory; +import org.elasticsearch.common.xcontent.XContentParseException; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.index.Index; +import org.elasticsearch.search.SearchHit; +import org.elasticsearch.search.SearchShardTarget; +import org.elasticsearch.test.ESTestCase; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; + +import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; +import static org.elasticsearch.test.XContentTestUtils.insertRandomFields; +import static org.hamcrest.CoreMatchers.containsString; + +public class ExpectedReciprocalRankTests extends ESTestCase { + + private static final double DELTA = 10E-14; + + public void testProbabilityOfRelevance() { + ExpectedReciprocalRank err = new ExpectedReciprocalRank(5); + assertEquals(0.0, err.probabilityOfRelevance(0), 0.0); + assertEquals(1d/32d, err.probabilityOfRelevance(1), 0.0); + assertEquals(3d/32d, err.probabilityOfRelevance(2), 0.0); + assertEquals(7d/32d, err.probabilityOfRelevance(3), 0.0); + assertEquals(15d/32d, err.probabilityOfRelevance(4), 0.0); + assertEquals(31d/32d, err.probabilityOfRelevance(5), 0.0); + } + + /** + * Assuming the result ranking is + * + *

    {@code
    +     * rank | relevance | probR / r | p        | p * probR / r
    +     * -------------------------------------------------------
    +     * 1    | 3         | 0.875     | 1        | 0.875       |
    +     * 2    | 2         | 0.1875    | 0.125    | 0.0234375   |
    +     * 3    | 0         | 0         | 0.078125 | 0           |
    +     * 4    | 1         | 0.03125   | 0.078125 | 0.00244140625 |
    +     * }
    + * + * err => sum of last column + */ + public void testERRAt() { + List rated = new ArrayList<>(); + Integer[] relevanceRatings = new Integer[] { 3, 2, 0, 1}; + SearchHit[] hits = createSearchHits(rated, relevanceRatings); + ExpectedReciprocalRank err = new ExpectedReciprocalRank(3, 0, 3); + assertEquals(0.8984375, err.evaluate("id", hits, rated).getQualityLevel(), DELTA); + // take 4th rank into window + err = new ExpectedReciprocalRank(3, 0, 4); + assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).getQualityLevel(), DELTA); + } + + /** + * Assuming the result ranking is + * + *
    {@code
    +     * rank | relevance | probR / r | p        | p * probR / r
    +     * -------------------------------------------------------
    +     * 1    | 3         | 0.875     | 1        | 0.875       |
    +     * 2    | n/a       | n/a       | 0.125    | n/a   |
    +     * 3    | 0         | 0         | 0.125    | 0           |
    +     * 4    | 1         | 0.03125   | 0.125    | 0.00390625 |
    +     * }
    + * + * err => sum of last column + */ + public void testERRMissingRatings() { + List rated = new ArrayList<>(); + Integer[] relevanceRatings = new Integer[] { 3, null, 0, 1}; + SearchHit[] hits = createSearchHits(rated, relevanceRatings); + ExpectedReciprocalRank err = new ExpectedReciprocalRank(3, null, 4); + EvalQueryQuality evaluation = err.evaluate("id", hits, rated); + assertEquals(0.875 + 0.00390625, evaluation.getQualityLevel(), DELTA); + assertEquals(1, ((ExpectedReciprocalRank.Detail) evaluation.getMetricDetails()).getUnratedDocs()); + // if we supply e.g. 2 as unknown docs rating, it should be the same as in the other test above + err = new ExpectedReciprocalRank(3, 2, 4); + assertEquals(0.8984375 + 0.00244140625, err.evaluate("id", hits, rated).getQualityLevel(), DELTA); + } + + private SearchHit[] createSearchHits(List rated, Integer[] relevanceRatings) { + SearchHit[] hits = new SearchHit[relevanceRatings.length]; + for (int i = 0; i < relevanceRatings.length; i++) { + if (relevanceRatings[i] != null) { + rated.add(new RatedDocument("index", Integer.toString(i), relevanceRatings[i])); + } + hits[i] = new SearchHit(i, Integer.toString(i), new Text("type"), Collections.emptyMap()); + hits[i].shard(new SearchShardTarget("testnode", new Index("index", "uuid"), 0, null)); + } + return hits; + } + + /** + * test that metric returns 0.0 when there are no search results + */ + public void testNoResults() throws Exception { + ExpectedReciprocalRank err = new ExpectedReciprocalRank(5, 0, 10); + assertEquals(0.0, err.evaluate("id", new SearchHit[0], Collections.emptyList()).getQualityLevel(), DELTA); + } + + public void testParseFromXContent() throws IOException { + assertParsedCorrect("{ \"unknown_doc_rating\": 2, \"maximum_relevance\": 5, \"k\" : 15 }", 2, 5, 15); + assertParsedCorrect("{ \"unknown_doc_rating\": 2, \"maximum_relevance\": 4 }", 2, 4, 10); + assertParsedCorrect("{ \"maximum_relevance\": 4, \"k\": 23 }", null, 4, 23); + } + + private void assertParsedCorrect(String xContent, Integer expectedUnknownDocRating, int expectedMaxRelevance, int expectedK) + throws IOException { + try (XContentParser parser = createParser(JsonXContent.jsonXContent, xContent)) { + ExpectedReciprocalRank errAt = ExpectedReciprocalRank.fromXContent(parser); + assertEquals(expectedUnknownDocRating, errAt.getUnknownDocRating()); + assertEquals(expectedK, errAt.getK()); + assertEquals(expectedMaxRelevance, errAt.getMaxRelevance()); + } + } + + public static ExpectedReciprocalRank createTestItem() { + Integer unknownDocRating = frequently() ? Integer.valueOf(randomIntBetween(0, 10)) : null; + int maxRelevance = randomIntBetween(1, 10); + return new ExpectedReciprocalRank(maxRelevance, unknownDocRating, randomIntBetween(1, 10)); + } + + public void testXContentRoundtrip() throws IOException { + ExpectedReciprocalRank testItem = createTestItem(); + XContentBuilder builder = XContentFactory.contentBuilder(randomFrom(XContentType.values())); + XContentBuilder shuffled = shuffleXContent(testItem.toXContent(builder, ToXContent.EMPTY_PARAMS)); + try (XContentParser itemParser = createParser(shuffled)) { + itemParser.nextToken(); + itemParser.nextToken(); + ExpectedReciprocalRank parsedItem = ExpectedReciprocalRank.fromXContent(itemParser); + assertNotSame(testItem, parsedItem); + assertEquals(testItem, parsedItem); + assertEquals(testItem.hashCode(), parsedItem.hashCode()); + } + } + + public void testXContentParsingIsNotLenient() throws IOException { + ExpectedReciprocalRank testItem = createTestItem(); + XContentType xContentType = randomFrom(XContentType.values()); + BytesReference originalBytes = toShuffledXContent(testItem, xContentType, ToXContent.EMPTY_PARAMS, randomBoolean()); + BytesReference withRandomFields = insertRandomFields(xContentType, originalBytes, null, random()); + try (XContentParser parser = createParser(xContentType.xContent(), withRandomFields)) { + parser.nextToken(); + parser.nextToken(); + XContentParseException exception = expectThrows(XContentParseException.class, + () -> DiscountedCumulativeGain.fromXContent(parser)); + assertThat(exception.getMessage(), containsString("[dcg] unknown field")); + } + } + + public void testMetricDetails() { + int unratedDocs = randomIntBetween(0, 100); + ExpectedReciprocalRank.Detail detail = new ExpectedReciprocalRank.Detail(unratedDocs); + assertEquals(unratedDocs, detail.getUnratedDocs()); + } + + public void testSerialization() throws IOException { + ExpectedReciprocalRank original = createTestItem(); + ExpectedReciprocalRank deserialized = ESTestCase.copyWriteable(original, new NamedWriteableRegistry(Collections.emptyList()), + ExpectedReciprocalRank::new); + assertEquals(deserialized, original); + assertEquals(deserialized.hashCode(), original.hashCode()); + assertNotSame(deserialized, original); + } + + public void testEqualsAndHash() throws IOException { + checkEqualsAndHashCode(createTestItem(), original -> { + return new ExpectedReciprocalRank(original.getMaxRelevance(), original.getUnknownDocRating(), original.getK()); + }, ExpectedReciprocalRankTests::mutateTestItem); + } + + private static ExpectedReciprocalRank mutateTestItem(ExpectedReciprocalRank original) { + switch (randomIntBetween(0, 2)) { + case 0: + return new ExpectedReciprocalRank(original.getMaxRelevance() + 1, original.getUnknownDocRating(), original.getK()); + case 1: + return new ExpectedReciprocalRank(original.getMaxRelevance(), + randomValueOtherThan(original.getUnknownDocRating(), () -> randomIntBetween(0, 10)), original.getK()); + case 2: + return new ExpectedReciprocalRank(original.getMaxRelevance(), original.getUnknownDocRating(), + randomValueOtherThan(original.getK(), () -> randomIntBetween(1, 10))); + default: + throw new IllegalArgumentException("mutation variant not allowed"); + } + } +} diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java index e9082c96fd163..6aa1046492ccc 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/ManyDocumentsIT.java @@ -19,19 +19,13 @@ package org.elasticsearch.index.reindex; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; -import org.elasticsearch.client.Response; -import org.elasticsearch.common.xcontent.XContentHelper; -import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.client.Request; import org.elasticsearch.test.rest.ESRestTestCase; import org.junit.Before; import java.io.IOException; import java.util.Map; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.hasEntry; /** @@ -50,48 +44,69 @@ public void setupTestIndex() throws IOException { bulk.append("{\"index\":{}}\n"); bulk.append("{\"test\":\"test\"}\n"); } - client().performRequest("POST", "/test/test/_bulk", singletonMap("refresh", "true"), - new StringEntity(bulk.toString(), ContentType.APPLICATION_JSON)); + Request request = new Request("POST", "/test/test/_bulk"); + request.addParameter("refresh", "true"); + request.setJsonEntity(bulk.toString()); + client().performRequest(request); } public void testReindex() throws IOException { - Map response = toMap(client().performRequest("POST", "/_reindex", emptyMap(), new StringEntity( - "{\"source\":{\"index\":\"test\"}, \"dest\":{\"index\":\"des\"}}", - ContentType.APPLICATION_JSON))); + Request request = new Request("POST", "/_reindex"); + request.setJsonEntity( + "{\n" + + " \"source\":{\n" + + " \"index\":\"test\"\n" + + " },\n" + + " \"dest\":{\n" + + " \"index\":\"des\"\n" + + " }\n" + + "}"); + Map response = entityAsMap(client().performRequest(request)); assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("created", count)); } public void testReindexFromRemote() throws IOException { - Map nodesInfo = toMap(client().performRequest("GET", "/_nodes/http")); + Map nodesInfo = entityAsMap(client().performRequest(new Request("GET", "/_nodes/http"))); nodesInfo = (Map) nodesInfo.get("nodes"); Map nodeInfo = (Map) nodesInfo.values().iterator().next(); Map http = (Map) nodeInfo.get("http"); String remote = "http://"+ http.get("publish_address"); - Map response = toMap(client().performRequest("POST", "/_reindex", emptyMap(), new StringEntity( - "{\"source\":{\"index\":\"test\",\"remote\":{\"host\":\"" + remote + "\"}}, \"dest\":{\"index\":\"des\"}}", - ContentType.APPLICATION_JSON))); + Request request = new Request("POST", "/_reindex"); + request.setJsonEntity( + "{\n" + + " \"source\":{\n" + + " \"index\":\"test\",\n" + + " \"remote\":{\n" + + " \"host\":\"" + remote + "\"\n" + + " }\n" + + " }\n," + + " \"dest\":{\n" + + " \"index\":\"des\"\n" + + " }\n" + + "}"); + Map response = entityAsMap(client().performRequest(request)); assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("created", count)); } public void testUpdateByQuery() throws IOException { - Map response = toMap(client().performRequest("POST", "/test/_update_by_query")); + Map response = entityAsMap(client().performRequest(new Request("POST", "/test/_update_by_query"))); assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("updated", count)); } public void testDeleteByQuery() throws IOException { - Map response = toMap(client().performRequest("POST", "/test/_delete_by_query", emptyMap(), new StringEntity( - "{\"query\":{\"match_all\":{}}}", - ContentType.APPLICATION_JSON))); + Request request = new Request("POST", "/test/_delete_by_query"); + request.setJsonEntity( + "{\n" + + " \"query\":{\n" + + " \"match_all\": {}\n" + + " }\n" + + "}"); + Map response = entityAsMap(client().performRequest(request)); assertThat(response, hasEntry("total", count)); assertThat(response, hasEntry("deleted", count)); } - - static Map toMap(Response response) throws IOException { - return XContentHelper.convertToMap(JsonXContent.jsonXContent, response.getEntity().getContent(), false); - } - } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java index 5d359053a6668..9feed83595ff1 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/ReindexFromOldRemoteIT.java @@ -19,25 +19,24 @@ package org.elasticsearch.index.reindex.remote; -import org.apache.http.HttpEntity; import org.apache.http.HttpHost; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; -import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.Booleans; import org.elasticsearch.test.rest.ESRestTestCase; import java.io.IOException; -import java.util.Map; -import java.util.TreeMap; -import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.containsString; public class ReindexFromOldRemoteIT extends ESRestTestCase { + /** + * Number of documents to test when reindexing from an old version. + */ + private static final int DOCS = 5; + private void oldEsTestCase(String portPropertyName, String requestsPerSecond) throws IOException { boolean enabled = Booleans.parseBoolean(System.getProperty("tests.fromOld")); assumeTrue("test is disabled, probably because this is windows", enabled); @@ -45,17 +44,19 @@ private void oldEsTestCase(String portPropertyName, String requestsPerSecond) th int oldEsPort = Integer.parseInt(System.getProperty(portPropertyName)); try (RestClient oldEs = RestClient.builder(new HttpHost("127.0.0.1", oldEsPort)).build()) { try { - HttpEntity entity = new StringEntity("{\"settings\":{\"number_of_shards\": 1}}", ContentType.APPLICATION_JSON); - oldEs.performRequest("PUT", "/test", singletonMap("refresh", "true"), entity); - - entity = new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON); - oldEs.performRequest("PUT", "/test/doc/testdoc1", singletonMap("refresh", "true"), entity); - oldEs.performRequest("PUT", "/test/doc/testdoc2", singletonMap("refresh", "true"), entity); - oldEs.performRequest("PUT", "/test/doc/testdoc3", singletonMap("refresh", "true"), entity); - oldEs.performRequest("PUT", "/test/doc/testdoc4", singletonMap("refresh", "true"), entity); - oldEs.performRequest("PUT", "/test/doc/testdoc5", singletonMap("refresh", "true"), entity); + Request createIndex = new Request("PUT", "/test"); + createIndex.setJsonEntity("{\"settings\":{\"number_of_shards\": 1}}"); + oldEs.performRequest(createIndex); + + for (int i = 0; i < DOCS; i++) { + Request doc = new Request("PUT", "/test/doc/testdoc" + i); + doc.addParameter("refresh", "true"); + doc.setJsonEntity("{\"test\":\"test\"}"); + oldEs.performRequest(doc); + } - entity = new StringEntity( + Request reindex = new Request("POST", "/_reindex"); + reindex.setJsonEntity( "{\n" + " \"source\":{\n" + " \"index\": \"test\",\n" @@ -67,36 +68,23 @@ private void oldEsTestCase(String portPropertyName, String requestsPerSecond) th + " \"dest\": {\n" + " \"index\": \"test\"\n" + " }\n" - + "}", - ContentType.APPLICATION_JSON); - Map params = new TreeMap<>(); - params.put("refresh", "true"); - params.put("pretty", "true"); + + "}"); + reindex.addParameter("refresh", "true"); + reindex.addParameter("pretty", "true"); if (requestsPerSecond != null) { - params.put("requests_per_second", requestsPerSecond); + reindex.addParameter("requests_per_second", requestsPerSecond); } - client().performRequest("POST", "/_reindex", params, entity); + client().performRequest(reindex); - Response response = client().performRequest("POST", "test/_search", singletonMap("pretty", "true")); + Request search = new Request("POST", "/test/_search"); + search.addParameter("pretty", "true"); + Response response = client().performRequest(search); String result = EntityUtils.toString(response.getEntity()); - assertThat(result, containsString("\"_id\" : \"testdoc1\"")); - } finally { - try { - oldEs.performRequest("DELETE", "/test"); - } catch (ResponseException e) { - /* Try not to throw ResponseException for as it'll eat the - * real exception. This is because the rest client throws - * exceptions in a "funny" way that isn't compatible with - * `suppressed`. In the case of 404s we'll just log something - * and move on because that just means that a previous - * failure caused the index not to be created. */ - if (e.getResponse().getStatusLine().getStatusCode() == 404) { - logger.warn("old index not deleted because it doesn't exist"); - } else { - logger.error("failed to remove old index", e); - fail("failed to remove old index, see log"); - } + for (int i = 0; i < DOCS; i++) { + assertThat(result, containsString("\"_id\" : \"testdoc" + i + "\"")); } + } finally { + oldEs.performRequest(new Request("DELETE", "/test")); } } } diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java index f33fa98f0e3be..65d9b87b07d4c 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/RepositoryURLClientYamlTestSuiteIT.java @@ -24,6 +24,7 @@ import org.apache.http.HttpEntity; import org.apache.http.entity.ContentType; import org.apache.http.nio.entity.NStringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.PathUtils; @@ -44,7 +45,6 @@ import java.util.List; import java.util.Map; -import static java.util.Collections.emptyMap; import static org.elasticsearch.common.xcontent.XContentFactory.jsonBuilder; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasSize; @@ -70,8 +70,10 @@ public static Iterable parameters() throws Exception { **/ @Before public void registerRepositories() throws IOException { - Response clusterSettingsResponse = client().performRequest("GET", "/_cluster/settings?include_defaults=true" + - "&filter_path=defaults.path.repo,defaults.repositories.url.allowed_urls"); + Request clusterSettingsRequest = new Request("GET", "/_cluster/settings"); + clusterSettingsRequest.addParameter("include_defaults", "true"); + clusterSettingsRequest.addParameter("filter_path", "defaults.path.repo,defaults.repositories.url.allowed_urls"); + Response clusterSettingsResponse = client().performRequest(clusterSettingsRequest); Map clusterSettings = entityAsMap(clusterSettingsResponse); @SuppressWarnings("unchecked") @@ -83,13 +85,17 @@ public void registerRepositories() throws IOException { final URI pathRepoUri = PathUtils.get(pathRepo).toUri().normalize(); // Create a FS repository using the path.repo location - Response createFsRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-fs", emptyMap(), - buildRepositorySettings(FsRepository.TYPE, Settings.builder().put("location", pathRepo).build())); + Request createFsRepositoryRequest = new Request("PUT", "/_snapshot/repository-fs"); + createFsRepositoryRequest.setEntity(buildRepositorySettings(FsRepository.TYPE, + Settings.builder().put("location", pathRepo).build())); + Response createFsRepositoryResponse = client().performRequest(createFsRepositoryRequest); assertThat(createFsRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); // Create a URL repository using the file://{path.repo} URL - Response createFileRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-file", emptyMap(), - buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", pathRepoUri.toString()).build())); + Request createFileRepositoryRequest = new Request("PUT", "/_snapshot/repository-file"); + createFileRepositoryRequest.setEntity(buildRepositorySettings(URLRepository.TYPE, + Settings.builder().put("url", pathRepoUri.toString()).build())); + Response createFileRepositoryResponse = client().performRequest(createFileRepositoryRequest); assertThat(createFileRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); // Create a URL repository using the http://{fixture} URL @@ -99,8 +105,10 @@ public void registerRepositories() throws IOException { try { InetAddress inetAddress = InetAddress.getByName(new URL(allowedUrl).getHost()); if (inetAddress.isAnyLocalAddress() || inetAddress.isLoopbackAddress()) { - Response createUrlRepositoryResponse = client().performRequest("PUT", "_snapshot/repository-url", emptyMap(), - buildRepositorySettings(URLRepository.TYPE, Settings.builder().put("url", allowedUrl).build())); + Request createUrlRepositoryRequest = new Request("PUT", "/_snapshot/repository-url"); + createUrlRepositoryRequest.setEntity(buildRepositorySettings(URLRepository.TYPE, + Settings.builder().put("url", allowedUrl).build())); + Response createUrlRepositoryResponse = client().performRequest(createUrlRepositoryRequest); assertThat(createUrlRepositoryResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); break; } @@ -126,4 +134,3 @@ private static HttpEntity buildRepositorySettings(final String type, final Setti } } } - diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index dc2140a6086a4..5af0a412b4cc0 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -92,23 +92,26 @@ String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary") // If all these variables are missing then we are testing against the internal fixture instead, which has the following // credentials hard-coded in. -if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath - && !s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) { - +if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath) { s3PermanentAccessKey = 's3_integration_test_permanent_access_key' s3PermanentSecretKey = 's3_integration_test_permanent_secret_key' s3PermanentBucket = 'permanent-bucket-test' s3PermanentBasePath = 'integration_test' + useFixture = true + +} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath) { + throw new IllegalArgumentException("not all options specified to run against external S3 service") +} + +if (!s3TemporaryAccessKey && !s3TemporarySecretKey && !s3TemporaryBucket && !s3TemporaryBasePath && !s3TemporarySessionToken) { s3TemporaryAccessKey = 's3_integration_test_temporary_access_key' s3TemporarySecretKey = 's3_integration_test_temporary_secret_key' s3TemporaryBucket = 'temporary-bucket-test' s3TemporaryBasePath = 'integration_test' s3TemporarySessionToken = 's3_integration_test_temporary_session_token' - useFixture = true -} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath - || !s3TemporaryAccessKey || !s3TemporarySecretKey || !s3TemporaryBucket || !s3TemporaryBasePath || !s3TemporarySessionToken) { +} else if (!s3TemporaryAccessKey || !s3TemporarySecretKey || !s3TemporaryBucket || !s3TemporaryBasePath || !s3TemporarySessionToken) { throw new IllegalArgumentException("not all options specified to run against external S3 service") } @@ -296,6 +299,13 @@ processTestResources { MavenFilteringHack.filter(it, expansions) } +project.afterEvaluate { + if (useFixture == false) { + // 30_repository_temporary_credentials is not ready for CI yet + integTestRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*' + } +} + integTestCluster { keystoreSetting 's3.client.integration_test_permanent.access_key', s3PermanentAccessKey keystoreSetting 's3.client.integration_test_permanent.secret_key', s3PermanentSecretKey diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java index 26bc7c962c450..9b38669da2563 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java @@ -18,14 +18,15 @@ */ package org.elasticsearch.repositories.s3; -import org.elasticsearch.test.fixture.AbstractHttpFixture; import com.amazonaws.util.DateUtils; + import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.Streams; import org.elasticsearch.common.path.PathTrie; import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; +import org.elasticsearch.test.fixture.AbstractHttpFixture; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; @@ -93,23 +94,21 @@ protected Response handle(final Request request) throws IOException { return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); } - if (handler != null) { - final String bucket = request.getParam("bucket"); - if (bucket != null && permittedBucket.equals(bucket) == false) { - // allow a null bucket to support the multi-object-delete API which - // passes the bucket name in the host header instead of the URL. - if (buckets.containsKey(bucket)) { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad bucket", ""); - } else { - return newBucketNotFoundError(request.getId(), bucket); - } + final String bucket = request.getParam("bucket"); + if (bucket != null && permittedBucket.equals(bucket) == false) { + // allow a null bucket to support the multi-object-delete API which + // passes the bucket name in the host header instead of the URL. + if (buckets.containsKey(bucket)) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad bucket", ""); + } else { + return newBucketNotFoundError(request.getId(), bucket); } - return handler.handle(request); - } else { - return newInternalError(request.getId(), "No handler defined for request [" + request + "]"); } + return handler.handle(request); + + } else { + return newInternalError(request.getId(), "No handler defined for request [" + request + "]"); } - return null; } public static void main(final String[] args) throws Exception { diff --git a/qa/ccs-unavailable-clusters/build.gradle b/qa/ccs-unavailable-clusters/build.gradle index 86d0cb64f65a2..d9de422bb43e1 100644 --- a/qa/ccs-unavailable-clusters/build.gradle +++ b/qa/ccs-unavailable-clusters/build.gradle @@ -21,5 +21,5 @@ apply plugin: 'elasticsearch.rest-test' apply plugin: 'elasticsearch.test-with-dependencies' dependencies { - testCompile project(path: ':client:rest-high-level', configuration: 'runtime') -} \ No newline at end of file + testCompile project(path: ':client:rest-high-level', configuration: 'shadow') +} diff --git a/qa/vagrant/build.gradle b/qa/vagrant/build.gradle index 23d171f312512..704136eb4cf27 100644 --- a/qa/vagrant/build.gradle +++ b/qa/vagrant/build.gradle @@ -31,6 +31,12 @@ dependencies { compile "org.hamcrest:hamcrest-core:${versions.hamcrest}" compile "org.hamcrest:hamcrest-library:${versions.hamcrest}" + compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" + compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" + compile "org.apache.httpcomponents:fluent-hc:${versions.httpclient}" + compile "commons-codec:commons-codec:${versions.commonscodec}" + compile "commons-logging:commons-logging:${versions.commonslogging}" + compile project(':libs:core') // pulls in the jar built by this project and its dependencies @@ -73,3 +79,17 @@ tasks.test.enabled = false // this project doesn't get published tasks.dependencyLicenses.enabled = false tasks.dependenciesInfo.enabled = false + +tasks.thirdPartyAudit.excludes = [ + //commons-logging optional dependencies + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + 'org.apache.log4j.Category', + 'org.apache.log4j.Level', + 'org.apache.log4j.Logger', + 'org.apache.log4j.Priority', + //commons-logging provided dependencies + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener' +] diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java index ab4a11922cc21..df5e8cf995d86 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/test/ArchiveTestCase.java @@ -19,6 +19,12 @@ package org.elasticsearch.packaging.test; +import org.apache.http.client.fluent.Request; +import org.elasticsearch.packaging.util.Archives; +import org.elasticsearch.packaging.util.Platforms; +import org.elasticsearch.packaging.util.ServerUtils; +import org.elasticsearch.packaging.util.Shell; +import org.elasticsearch.packaging.util.Shell.Result; import org.junit.Before; import org.junit.BeforeClass; import org.junit.FixMethodOrder; @@ -28,9 +34,33 @@ import org.elasticsearch.packaging.util.Distribution; import org.elasticsearch.packaging.util.Installation; +import java.io.IOException; +import java.nio.file.Files; +import java.nio.file.Path; +import java.util.Arrays; +import java.util.stream.Stream; + +import static java.util.stream.Collectors.joining; +import static org.elasticsearch.packaging.util.Archives.ARCHIVE_OWNER; import static org.elasticsearch.packaging.util.Cleanup.cleanEverything; import static org.elasticsearch.packaging.util.Archives.installArchive; import static org.elasticsearch.packaging.util.Archives.verifyArchiveInstallation; +import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File; +import static org.elasticsearch.packaging.util.FileMatcher.file; +import static org.elasticsearch.packaging.util.FileMatcher.p660; +import static org.elasticsearch.packaging.util.FileUtils.append; +import static org.elasticsearch.packaging.util.FileUtils.cp; +import static org.elasticsearch.packaging.util.FileUtils.getTempDir; +import static org.elasticsearch.packaging.util.FileUtils.mkdir; +import static org.elasticsearch.packaging.util.FileUtils.rm; +import static org.elasticsearch.packaging.util.ServerUtils.makeRequest; +import static org.hamcrest.CoreMatchers.containsString; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.notNullValue; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.isEmptyString; +import static org.junit.Assert.assertTrue; +import static org.junit.Assume.assumeThat; import static org.junit.Assume.assumeTrue; /** @@ -61,4 +91,226 @@ public void test10Install() { installation = installArchive(distribution()); verifyArchiveInstallation(installation, distribution()); } + + @Test + public void test20PluginsListWithNoPlugins() { + assumeThat(installation, is(notNullValue())); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + final Result r = sh.run(bin.elasticsearchPlugin + " list"); + + assertThat(r.stdout, isEmptyString()); + } + + @Test + public void test30AbortWhenJavaMissing() { + assumeThat(installation, is(notNullValue())); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + + Platforms.onWindows(() -> { + // on windows, removing java from PATH and removing JAVA_HOME is less involved than changing the permissions of the java + // executable. we also don't check permissions in the windows scripts anyway + final String originalPath = sh.run("$Env:PATH").stdout.trim(); + final String newPath = Arrays.stream(originalPath.split(";")) + .filter(path -> path.contains("Java") == false) + .collect(joining(";")); + + // note the lack of a $ when clearing the JAVA_HOME env variable - with a $ it deletes the java home directory + // https://docs.microsoft.com/en-us/powershell/module/microsoft.powershell.core/providers/environment-provider?view=powershell-6 + // + // this won't persist to another session so we don't have to reset anything + final Result runResult = sh.runIgnoreExitCode( + "$Env:PATH = '" + newPath + "'; " + + "Remove-Item Env:JAVA_HOME; " + + bin.elasticsearch + ); + + assertThat(runResult.exitCode, is(1)); + assertThat(runResult.stderr, containsString("could not find java; set JAVA_HOME or ensure java is in PATH")); + }); + + Platforms.onLinux(() -> { + final String javaPath = sh.run("which java").stdout.trim(); + + try { + sh.run("chmod -x '" + javaPath + "'"); + final Result runResult = sh.runIgnoreExitCode(bin.elasticsearch.toString()); + assertThat(runResult.exitCode, is(1)); + assertThat(runResult.stdout, containsString("could not find java; set JAVA_HOME or ensure java is in PATH")); + } finally { + sh.run("chmod +x '" + javaPath + "'"); + } + }); + } + + @Test + public void test40CreateKeystoreManually() { + assumeThat(installation, is(notNullValue())); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + + Platforms.onLinux(() -> sh.run("sudo -u " + ARCHIVE_OWNER + " " + bin.elasticsearchKeystore + " create")); + + // this is a hack around the fact that we can't run a command in the same session as the same user but not as administrator. + // the keystore ends up being owned by the Administrators group, so we manually set it to be owned by the vagrant user here. + // from the server's perspective the permissions aren't really different, this is just to reflect what we'd expect in the tests. + // when we run these commands as a role user we won't have to do this + Platforms.onWindows(() -> sh.run( + bin.elasticsearchKeystore + " create; " + + "$account = New-Object System.Security.Principal.NTAccount 'vagrant'; " + + "$acl = Get-Acl '" + installation.config("elasticsearch.keystore") + "'; " + + "$acl.SetOwner($account); " + + "Set-Acl '" + installation.config("elasticsearch.keystore") + "' $acl" + )); + + assertThat(installation.config("elasticsearch.keystore"), file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); + + Platforms.onLinux(() -> { + final Result r = sh.run("sudo -u " + ARCHIVE_OWNER + " " + bin.elasticsearchKeystore + " list"); + assertThat(r.stdout, containsString("keystore.seed")); + }); + + Platforms.onWindows(() -> { + final Result r = sh.run(bin.elasticsearchKeystore + " list"); + assertThat(r.stdout, containsString("keystore.seed")); + }); + } + + @Test + public void test50StartAndStop() throws IOException { + assumeThat(installation, is(notNullValue())); + + // cleanup from previous test + rm(installation.config("elasticsearch.keystore")); + + Archives.runElasticsearch(installation); + + final String gcLogName = Platforms.LINUX + ? "gc.log.0.current" + : "gc.log"; + assertTrue("gc logs exist", Files.exists(installation.logs.resolve(gcLogName))); + ServerUtils.runElasticsearchTests(); + + Archives.stopElasticsearch(installation); + } + + @Test + public void test60AutoCreateKeystore() { + assumeThat(installation, is(notNullValue())); + + assertThat(installation.config("elasticsearch.keystore"), file(File, ARCHIVE_OWNER, ARCHIVE_OWNER, p660)); + + final Installation.Executables bin = installation.executables(); + final Shell sh = new Shell(); + + Platforms.onLinux(() -> { + final Result result = sh.run("sudo -u " + ARCHIVE_OWNER + " " + bin.elasticsearchKeystore + " list"); + assertThat(result.stdout, containsString("keystore.seed")); + }); + + Platforms.onWindows(() -> { + final Result result = sh.run(bin.elasticsearchKeystore + " list"); + assertThat(result.stdout, containsString("keystore.seed")); + }); + } + + @Test + public void test70CustomPathConfAndJvmOptions() throws IOException { + assumeThat(installation, is(notNullValue())); + + final Path tempConf = getTempDir().resolve("esconf-alternate"); + + try { + mkdir(tempConf); + cp(installation.config("elasticsearch.yml"), tempConf.resolve("elasticsearch.yml")); + cp(installation.config("log4j2.properties"), tempConf.resolve("log4j2.properties")); + + // we have to disable Log4j from using JMX lest it will hit a security + // manager exception before we have configured logging; this will fail + // startup since we detect usages of logging before it is configured + final String jvmOptions = + "-Xms512m\n" + + "-Xmx512m\n" + + "-Dlog4j2.disable.jmx=true\n"; + append(tempConf.resolve("jvm.options"), jvmOptions); + + final Shell sh = new Shell(); + Platforms.onLinux(() -> sh.run("chown -R elasticsearch:elasticsearch " + tempConf)); + Platforms.onWindows(() -> sh.run( + "$account = New-Object System.Security.Principal.NTAccount 'vagrant'; " + + "$tempConf = Get-ChildItem '" + tempConf + "' -Recurse; " + + "$tempConf += Get-Item '" + tempConf + "'; " + + "$tempConf | ForEach-Object { " + + "$acl = Get-Acl $_.FullName; " + + "$acl.SetOwner($account); " + + "Set-Acl $_.FullName $acl " + + "}" + )); + + final Shell serverShell = new Shell(); + serverShell.getEnv().put("ES_PATH_CONF", tempConf.toString()); + serverShell.getEnv().put("ES_JAVA_OPTS", "-XX:-UseCompressedOops"); + + Archives.runElasticsearch(installation, serverShell); + + final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes")); + assertThat(nodesResponse, containsString("\"heap_init_in_bytes\":536870912")); + assertThat(nodesResponse, containsString("\"using_compressed_ordinary_object_pointers\":\"false\"")); + + Archives.stopElasticsearch(installation); + + } finally { + rm(tempConf); + } + } + + @Test + public void test80RelativePathConf() throws IOException { + assumeThat(installation, is(notNullValue())); + + final Path temp = getTempDir().resolve("esconf-alternate"); + final Path tempConf = temp.resolve("config"); + + try { + mkdir(tempConf); + Stream.of( + "elasticsearch.yml", + "log4j2.properties", + "jvm.options" + ).forEach(file -> cp(installation.config(file), tempConf.resolve(file))); + + append(tempConf.resolve("elasticsearch.yml"), "node.name: relative"); + + final Shell sh = new Shell(); + Platforms.onLinux(() -> sh.run("chown -R elasticsearch:elasticsearch " + temp)); + Platforms.onWindows(() -> sh.run( + "$account = New-Object System.Security.Principal.NTAccount 'vagrant'; " + + "$tempConf = Get-ChildItem '" + temp + "' -Recurse; " + + "$tempConf += Get-Item '" + temp + "'; " + + "$tempConf | ForEach-Object { " + + "$acl = Get-Acl $_.FullName; " + + "$acl.SetOwner($account); " + + "Set-Acl $_.FullName $acl " + + "}" + )); + + final Shell serverShell = new Shell(temp); + serverShell.getEnv().put("ES_PATH_CONF", "config"); + Archives.runElasticsearch(installation, serverShell); + + final String nodesResponse = makeRequest(Request.Get("http://localhost:9200/_nodes")); + assertThat(nodesResponse, containsString("\"name\":\"relative\"")); + + Archives.stopElasticsearch(installation); + + } finally { + rm(tempConf); + } + } + + } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java index c4d3655a55398..6ffec813eb041 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Archives.java @@ -19,11 +19,14 @@ package org.elasticsearch.packaging.util; +import java.io.IOException; import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.Paths; import java.util.List; import java.util.stream.Stream; +import static java.util.stream.Collectors.joining; import static org.elasticsearch.packaging.util.FileMatcher.Fileness.Directory; import static org.elasticsearch.packaging.util.FileMatcher.Fileness.File; import static org.elasticsearch.packaging.util.FileMatcher.file; @@ -36,17 +39,26 @@ import static org.elasticsearch.packaging.util.FileUtils.lsGlob; import static org.elasticsearch.packaging.util.FileUtils.mv; +import static org.elasticsearch.packaging.util.FileUtils.slurp; import static org.elasticsearch.packaging.util.Platforms.isDPKG; import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.isEmptyOrNullString; import static org.hamcrest.core.Is.is; import static org.hamcrest.collection.IsEmptyCollection.empty; import static org.hamcrest.collection.IsCollectionWithSize.hasSize; +import static org.hamcrest.core.IsNot.not; +import static org.junit.Assert.assertTrue; /** * Installation and verification logic for archive distributions */ public class Archives { + // in the future we'll run as a role user on Windows + public static final String ARCHIVE_OWNER = Platforms.WINDOWS + ? "vagrant" + : "elasticsearch"; + public static Installation installArchive(Distribution distribution) { return installArchive(distribution, getDefaultArchiveInstallPath(), getCurrentVersion()); } @@ -63,22 +75,20 @@ public static Installation installArchive(Distribution distribution, Path fullIn if (distribution.packaging == Distribution.Packaging.TAR) { - if (Platforms.LINUX) { - sh.bash("tar -C " + baseInstallPath + " -xzpf " + distributionFile); - } else { + Platforms.onLinux(() -> sh.run("tar -C " + baseInstallPath + " -xzpf " + distributionFile)); + + if (Platforms.WINDOWS) { throw new RuntimeException("Distribution " + distribution + " is not supported on windows"); } } else if (distribution.packaging == Distribution.Packaging.ZIP) { - if (Platforms.LINUX) { - sh.bash("unzip " + distributionFile + " -d " + baseInstallPath); - } else { - sh.powershell( - "Add-Type -AssemblyName 'System.IO.Compression.Filesystem'; " + - "[IO.Compression.ZipFile]::ExtractToDirectory('" + distributionFile + "', '" + baseInstallPath + "')" - ); - } + Platforms.onLinux(() -> sh.run("unzip " + distributionFile + " -d " + baseInstallPath)); + + Platforms.onWindows(() -> sh.run( + "Add-Type -AssemblyName 'System.IO.Compression.Filesystem'; " + + "[IO.Compression.ZipFile]::ExtractToDirectory('" + distributionFile + "', '" + baseInstallPath + "')" + )); } else { throw new RuntimeException("Distribution " + distribution + " is not a known archive type"); @@ -93,9 +103,8 @@ public static Installation installArchive(Distribution distribution, Path fullIn assertThat("only the intended installation exists", installations, hasSize(1)); assertThat("only the intended installation exists", installations.get(0), is(fullInstallPath)); - if (Platforms.LINUX) { - setupArchiveUsersLinux(fullInstallPath); - } + Platforms.onLinux(() -> setupArchiveUsersLinux(fullInstallPath)); + Platforms.onWindows(() -> setupArchiveUsersWindows(fullInstallPath)); return new Installation(fullInstallPath); } @@ -103,17 +112,17 @@ public static Installation installArchive(Distribution distribution, Path fullIn private static void setupArchiveUsersLinux(Path installPath) { final Shell sh = new Shell(); - if (sh.bashIgnoreExitCode("getent group elasticsearch").isSuccess() == false) { + if (sh.runIgnoreExitCode("getent group elasticsearch").isSuccess() == false) { if (isDPKG()) { - sh.bash("addgroup --system elasticsearch"); + sh.run("addgroup --system elasticsearch"); } else { - sh.bash("groupadd -r elasticsearch"); + sh.run("groupadd -r elasticsearch"); } } - if (sh.bashIgnoreExitCode("id elasticsearch").isSuccess() == false) { + if (sh.runIgnoreExitCode("id elasticsearch").isSuccess() == false) { if (isDPKG()) { - sh.bash("adduser " + + sh.run("adduser " + "--quiet " + "--system " + "--no-create-home " + @@ -122,7 +131,7 @@ private static void setupArchiveUsersLinux(Path installPath) { "--shell /bin/false " + "elasticsearch"); } else { - sh.bash("useradd " + + sh.run("useradd " + "--system " + "-M " + "--gid elasticsearch " + @@ -131,20 +140,29 @@ private static void setupArchiveUsersLinux(Path installPath) { "elasticsearch"); } } - sh.bash("chown -R elasticsearch:elasticsearch " + installPath); + sh.run("chown -R elasticsearch:elasticsearch " + installPath); + } + + private static void setupArchiveUsersWindows(Path installPath) { + // we want the installation to be owned as the vagrant user rather than the Administrators group + + final Shell sh = new Shell(); + sh.run( + "$account = New-Object System.Security.Principal.NTAccount 'vagrant'; " + + "$install = Get-ChildItem -Path '" + installPath + "' -Recurse; " + + "$install += Get-Item -Path '" + installPath + "'; " + + "$install | ForEach-Object { " + + "$acl = Get-Acl $_.FullName; " + + "$acl.SetOwner($account); " + + "Set-Acl $_.FullName $acl " + + "}" + ); } public static void verifyArchiveInstallation(Installation installation, Distribution distribution) { - // on Windows for now we leave the installation owned by the vagrant user that the tests run as. Since the vagrant account - // is a local administrator, the files really end up being owned by the local administrators group. In the future we'll - // install and run elasticesearch with a role user on Windows - final String owner = Platforms.WINDOWS - ? "BUILTIN\\Administrators" - : "elasticsearch"; - - verifyOssInstallation(installation, distribution, owner); + verifyOssInstallation(installation, distribution, ARCHIVE_OWNER); if (distribution.flavor == Distribution.Flavor.DEFAULT) { - verifyDefaultInstallation(installation, distribution, owner); + verifyDefaultInstallation(installation, distribution, ARCHIVE_OWNER); } } @@ -160,38 +178,38 @@ private static void verifyOssInstallation(Installation es, Distribution distribu assertThat(Files.exists(es.data), is(false)); assertThat(Files.exists(es.scripts), is(false)); - assertThat(es.home.resolve("bin"), file(Directory, owner, owner, p755)); - assertThat(es.home.resolve("lib"), file(Directory, owner, owner, p755)); - assertThat(Files.exists(es.config.resolve("elasticsearch.keystore")), is(false)); + assertThat(es.bin, file(Directory, owner, owner, p755)); + assertThat(es.lib, file(Directory, owner, owner, p755)); + assertThat(Files.exists(es.config("elasticsearch.keystore")), is(false)); Stream.of( - "bin/elasticsearch", - "bin/elasticsearch-env", - "bin/elasticsearch-keystore", - "bin/elasticsearch-plugin", - "bin/elasticsearch-translog" + "elasticsearch", + "elasticsearch-env", + "elasticsearch-keystore", + "elasticsearch-plugin", + "elasticsearch-translog" ).forEach(executable -> { - assertThat(es.home.resolve(executable), file(File, owner, owner, p755)); + assertThat(es.bin(executable), file(File, owner, owner, p755)); if (distribution.packaging == Distribution.Packaging.ZIP) { - assertThat(es.home.resolve(executable + ".bat"), file(File, owner)); + assertThat(es.bin(executable + ".bat"), file(File, owner)); } }); if (distribution.packaging == Distribution.Packaging.ZIP) { Stream.of( - "bin/elasticsearch-service.bat", - "bin/elasticsearch-service-mgr.exe", - "bin/elasticsearch-service-x64.exe" - ).forEach(executable -> assertThat(es.home.resolve(executable), file(File, owner))); + "elasticsearch-service.bat", + "elasticsearch-service-mgr.exe", + "elasticsearch-service-x64.exe" + ).forEach(executable -> assertThat(es.bin(executable), file(File, owner))); } Stream.of( "elasticsearch.yml", "jvm.options", "log4j2.properties" - ).forEach(config -> assertThat(es.config.resolve(config), file(File, owner, owner, p660))); + ).forEach(config -> assertThat(es.config(config), file(File, owner, owner, p660))); Stream.of( "NOTICE.txt", @@ -203,30 +221,30 @@ private static void verifyOssInstallation(Installation es, Distribution distribu private static void verifyDefaultInstallation(Installation es, Distribution distribution, String owner) { Stream.of( - "bin/elasticsearch-certgen", - "bin/elasticsearch-certutil", - "bin/elasticsearch-croneval", - "bin/elasticsearch-migrate", - "bin/elasticsearch-saml-metadata", - "bin/elasticsearch-setup-passwords", - "bin/elasticsearch-sql-cli", - "bin/elasticsearch-syskeygen", - "bin/elasticsearch-users", - "bin/x-pack-env", - "bin/x-pack-security-env", - "bin/x-pack-watcher-env" + "elasticsearch-certgen", + "elasticsearch-certutil", + "elasticsearch-croneval", + "elasticsearch-migrate", + "elasticsearch-saml-metadata", + "elasticsearch-setup-passwords", + "elasticsearch-sql-cli", + "elasticsearch-syskeygen", + "elasticsearch-users", + "x-pack-env", + "x-pack-security-env", + "x-pack-watcher-env" ).forEach(executable -> { - assertThat(es.home.resolve(executable), file(File, owner, owner, p755)); + assertThat(es.bin(executable), file(File, owner, owner, p755)); if (distribution.packaging == Distribution.Packaging.ZIP) { - assertThat(es.home.resolve(executable + ".bat"), file(File, owner)); + assertThat(es.bin(executable + ".bat"), file(File, owner)); } }); // at this time we only install the current version of archive distributions, but if that changes we'll need to pass // the version through here - assertThat(es.home.resolve("bin/elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), file(File, owner, owner, p755)); + assertThat(es.bin("elasticsearch-sql-cli-" + getCurrentVersion() + ".jar"), file(File, owner, owner, p755)); Stream.of( "users", @@ -234,7 +252,72 @@ private static void verifyDefaultInstallation(Installation es, Distribution dist "roles.yml", "role_mapping.yml", "log4j2.properties" - ).forEach(config -> assertThat(es.config.resolve(config), file(File, owner, owner, p660))); + ).forEach(config -> assertThat(es.config(config), file(File, owner, owner, p660))); + } + + public static void runElasticsearch(Installation installation) throws IOException { + runElasticsearch(installation, new Shell()); + } + + public static void runElasticsearch(Installation installation, Shell sh) throws IOException { + final Path pidFile = installation.home.resolve("elasticsearch.pid"); + + final Installation.Executables bin = installation.executables(); + + Platforms.onLinux(() -> { + // If jayatana is installed then we try to use it. Elasticsearch should ignore it even when we try. + // If it doesn't ignore it then Elasticsearch will fail to start because of security errors. + // This line is attempting to emulate the on login behavior of /usr/share/upstart/sessions/jayatana.conf + if (Files.exists(Paths.get("/usr/share/java/jayatanaag.jar"))) { + sh.getEnv().put("JAVA_TOOL_OPTIONS", "-javaagent:/usr/share/java/jayatanaag.jar"); + } + sh.run("sudo -E -u " + ARCHIVE_OWNER + " " + + bin.elasticsearch + " -d -p " + installation.home.resolve("elasticsearch.pid")); + }); + + Platforms.onWindows(() -> { + // this starts the server in the background. the -d flag is unsupported on windows + // these tests run as Administrator. we don't want to run the server as Administrator, so we provide the current user's + // username and password to the process which has the effect of starting it not as Administrator. + sh.run( + "$password = ConvertTo-SecureString 'vagrant' -AsPlainText -Force; " + + "$processInfo = New-Object System.Diagnostics.ProcessStartInfo; " + + "$processInfo.FileName = '" + bin.elasticsearch + "'; " + + "$processInfo.Arguments = '-p " + installation.home.resolve("elasticsearch.pid") + "'; " + + "$processInfo.Username = 'vagrant'; " + + "$processInfo.Password = $password; " + + "$processInfo.RedirectStandardOutput = $true; " + + "$processInfo.RedirectStandardError = $true; " + + sh.env.entrySet().stream() + .map(entry -> "$processInfo.Environment.Add('" + entry.getKey() + "', '" + entry.getValue() + "'); ") + .collect(joining()) + + "$processInfo.UseShellExecute = $false; " + + "$process = New-Object System.Diagnostics.Process; " + + "$process.StartInfo = $processInfo; " + + "$process.Start() | Out-Null; " + + "$process.Id;" + ); + }); + + ServerUtils.waitForElasticsearch(); + + assertTrue(Files.exists(pidFile)); + String pid = slurp(pidFile).trim(); + assertThat(pid, not(isEmptyOrNullString())); + + Platforms.onLinux(() -> sh.run("ps " + pid)); + Platforms.onWindows(() -> sh.run("Get-Process -Id " + pid)); + } + + public static void stopElasticsearch(Installation installation) { + Path pidFile = installation.home.resolve("elasticsearch.pid"); + assertTrue(Files.exists(pidFile)); + String pid = slurp(pidFile).trim(); + assertThat(pid, not(isEmptyOrNullString())); + + final Shell sh = new Shell(); + Platforms.onLinux(() -> sh.run("kill -SIGTERM " + pid)); + Platforms.onWindows(() -> sh.run("Get-Process -Id " + pid + " | Stop-Process -Force")); } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java index a775a23a19490..4ff2998988c5f 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Cleanup.java @@ -56,31 +56,28 @@ public static void cleanEverything() { final Shell sh = new Shell(); // kill elasticsearch processes - if (Platforms.WINDOWS) { + Platforms.onLinux(() -> { + sh.runIgnoreExitCode("pkill -u elasticsearch"); + sh.runIgnoreExitCode("ps aux | grep -i 'org.elasticsearch.bootstrap.Elasticsearch' | awk {'print $2'} | xargs kill -9"); + }); + Platforms.onWindows(() -> { // the view of processes returned by Get-Process doesn't expose command line arguments, so we use WMI here - sh.powershellIgnoreExitCode( + sh.runIgnoreExitCode( "Get-WmiObject Win32_Process | " + "Where-Object { $_.CommandLine -Match 'org.elasticsearch.bootstrap.Elasticsearch' } | " + "ForEach-Object { $_.Terminate() }" ); + }); - } else { - - sh.bashIgnoreExitCode("pkill -u elasticsearch"); - sh.bashIgnoreExitCode("ps aux | grep -i 'org.elasticsearch.bootstrap.Elasticsearch' | awk {'print $2'} | xargs kill -9"); - - } - - if (Platforms.LINUX) { - purgePackagesLinux(); - } + Platforms.onLinux(Cleanup::purgePackagesLinux); // remove elasticsearch users - if (Platforms.LINUX) { - sh.bashIgnoreExitCode("userdel elasticsearch"); - sh.bashIgnoreExitCode("groupdel elasticsearch"); - } + Platforms.onLinux(() -> { + sh.runIgnoreExitCode("userdel elasticsearch"); + sh.runIgnoreExitCode("groupdel elasticsearch"); + }); + // when we run es as a role user on windows, add the equivalent here // delete files that may still exist lsGlob(getTempDir(), "elasticsearch*").forEach(FileUtils::rm); @@ -95,7 +92,7 @@ public static void cleanEverything() { // disable elasticsearch service // todo add this for windows when adding tests for service intallation if (Platforms.LINUX && isSystemd()) { - sh.bash("systemctl unmask systemd-sysctl.service"); + sh.run("systemctl unmask systemd-sysctl.service"); } } @@ -103,19 +100,19 @@ private static void purgePackagesLinux() { final Shell sh = new Shell(); if (isRPM()) { - sh.bashIgnoreExitCode("rpm --quiet -e elasticsearch elasticsearch-oss"); + sh.runIgnoreExitCode("rpm --quiet -e elasticsearch elasticsearch-oss"); } if (isYUM()) { - sh.bashIgnoreExitCode("yum remove -y elasticsearch elasticsearch-oss"); + sh.runIgnoreExitCode("yum remove -y elasticsearch elasticsearch-oss"); } if (isDPKG()) { - sh.bashIgnoreExitCode("dpkg --purge elasticsearch elasticsearch-oss"); + sh.runIgnoreExitCode("dpkg --purge elasticsearch elasticsearch-oss"); } if (isAptGet()) { - sh.bashIgnoreExitCode("apt-get --quiet --yes purge elasticsearch elasticsearch-oss"); + sh.runIgnoreExitCode("apt-get --quiet --yes purge elasticsearch elasticsearch-oss"); } } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java index ad826675244a0..315dc6ffee1f9 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/FileUtils.java @@ -21,11 +21,14 @@ import org.elasticsearch.core.internal.io.IOUtils; +import java.io.BufferedWriter; import java.io.IOException; +import java.nio.charset.StandardCharsets; import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; import java.nio.file.attribute.BasicFileAttributes; import java.nio.file.attribute.FileOwnerAttributeView; import java.nio.file.attribute.PosixFileAttributes; @@ -63,6 +66,22 @@ public static void rm(Path... paths) { } } + public static Path mkdir(Path path) { + try { + return Files.createDirectories(path); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + + public static Path cp(Path source, Path target) { + try { + return Files.copy(source, target); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + public static Path mv(Path source, Path target) { try { return Files.move(source, target); @@ -71,9 +90,19 @@ public static Path mv(Path source, Path target) { } } + public static void append(Path file, String text) { + try (BufferedWriter writer = Files.newBufferedWriter(file, StandardCharsets.UTF_8, + StandardOpenOption.CREATE, StandardOpenOption.APPEND)) { + + writer.write(text); + } catch (IOException e) { + throw new RuntimeException(e); + } + } + public static String slurp(Path file) { try { - return String.join("\n", Files.readAllLines(file)); + return String.join("\n", Files.readAllLines(file, StandardCharsets.UTF_8)); } catch (IOException e) { throw new RuntimeException(e); } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java index d231762d06227..68da440400a36 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Installation.java @@ -27,6 +27,8 @@ public class Installation { public final Path home; + public final Path bin; // this isn't a first-class installation feature but we include it for convenience + public final Path lib; // same public final Path config; public final Path data; public final Path logs; @@ -36,6 +38,9 @@ public class Installation { public Installation(Path home, Path config, Path data, Path logs, Path plugins, Path modules, Path scripts) { this.home = home; + this.bin = home.resolve("bin"); + this.lib = home.resolve("lib"); + this.config = config; this.data = data; this.logs = logs; @@ -55,4 +60,31 @@ public Installation(Path home) { home.resolve("scripts") ); } + + public Path bin(String executableName) { + return bin.resolve(executableName); + } + + public Path config(String configFileName) { + return config.resolve(configFileName); + } + + public Executables executables() { + return new Executables(); + } + + public class Executables { + + public final Path elasticsearch = platformExecutable("elasticsearch"); + public final Path elasticsearchPlugin = platformExecutable("elasticsearch-plugin"); + public final Path elasticsearchKeystore = platformExecutable("elasticsearch-keystore"); + public final Path elasticsearchTranslog = platformExecutable("elasticsearch-translog"); + + private Path platformExecutable(String name) { + final String platformExecutableName = Platforms.WINDOWS + ? name + ".bat" + : name; + return bin(platformExecutableName); + } + } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java index 202c025ae8a55..5ffbc31820022 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Platforms.java @@ -28,41 +28,61 @@ public static boolean isDPKG() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which dpkg").isSuccess(); + return new Shell().runIgnoreExitCode("which dpkg").isSuccess(); } public static boolean isAptGet() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which apt-get").isSuccess(); + return new Shell().runIgnoreExitCode("which apt-get").isSuccess(); } public static boolean isRPM() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which rpm").isSuccess(); + return new Shell().runIgnoreExitCode("which rpm").isSuccess(); } public static boolean isYUM() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which yum").isSuccess(); + return new Shell().runIgnoreExitCode("which yum").isSuccess(); } public static boolean isSystemd() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which systemctl").isSuccess(); + return new Shell().runIgnoreExitCode("which systemctl").isSuccess(); } public static boolean isSysVInit() { if (WINDOWS) { return false; } - return new Shell().bashIgnoreExitCode("which service").isSuccess(); + return new Shell().runIgnoreExitCode("which service").isSuccess(); + } + + public static void onWindows(PlatformAction action) { + if (WINDOWS) { + action.run(); + } + } + + public static void onLinux(PlatformAction action) { + if (LINUX) { + action.run(); + } + } + + /** + * Essentially a Runnable, but we make the distinction so it's more clear that these are synchronous + */ + @FunctionalInterface + public interface PlatformAction { + void run(); } } diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java new file mode 100644 index 0000000000000..ff006a34e6892 --- /dev/null +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/ServerUtils.java @@ -0,0 +1,123 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.packaging.util; + +import org.apache.commons.logging.Log; +import org.apache.commons.logging.LogFactory; +import org.apache.http.HttpResponse; +import org.apache.http.client.fluent.Request; +import org.apache.http.conn.HttpHostConnectException; +import org.apache.http.entity.ContentType; +import org.apache.http.util.EntityUtils; + +import java.io.IOException; +import java.util.Objects; +import java.util.concurrent.TimeUnit; + +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; + +public class ServerUtils { + + private static final Log LOG = LogFactory.getLog(ServerUtils.class); + + private static final long waitTime = TimeUnit.SECONDS.toMillis(60); + private static final long timeoutLength = TimeUnit.SECONDS.toMillis(10); + + public static void waitForElasticsearch() throws IOException { + waitForElasticsearch("green", null); + } + + public static void waitForElasticsearch(String status, String index) throws IOException { + + Objects.requireNonNull(status); + + // we loop here rather than letting httpclient handle retries so we can measure the entire waiting time + final long startTime = System.currentTimeMillis(); + long timeElapsed = 0; + boolean started = false; + while (started == false && timeElapsed < waitTime) { + try { + + final HttpResponse response = Request.Get("http://localhost:9200/_cluster/health") + .connectTimeout((int) timeoutLength) + .socketTimeout((int) timeoutLength) + .execute() + .returnResponse(); + + if (response.getStatusLine().getStatusCode() >= 300) { + final String statusLine = response.getStatusLine().toString(); + final String body = EntityUtils.toString(response.getEntity()); + throw new RuntimeException("Connecting to elasticsearch cluster health API failed:\n" + statusLine+ "\n" + body); + } + + started = true; + + } catch (HttpHostConnectException e) { + // we want to retry if the connection is refused + LOG.info("Got connection refused when waiting for cluster health", e); + } + + timeElapsed = System.currentTimeMillis() - startTime; + } + + if (started == false) { + throw new RuntimeException("Elasticsearch did not start"); + } + + final String url; + if (index == null) { + url = "http://localhost:9200/_cluster/health?wait_for_status=" + status + "&timeout=60s&pretty"; + } else { + url = "http://localhost:9200/_cluster/health/" + index + "?wait_for_status=" + status + "&timeout=60s&pretty"; + + } + + final String body = makeRequest(Request.Get(url)); + assertThat("cluster health response must contain desired status", body, containsString(status)); + } + + public static void runElasticsearchTests() throws IOException { + makeRequest( + Request.Post("http://localhost:9200/library/book/1?refresh=true&pretty") + .bodyString("{ \"title\": \"Book #1\", \"pages\": 123 }", ContentType.APPLICATION_JSON)); + + makeRequest( + Request.Post("http://localhost:9200/library/book/2?refresh=true&pretty") + .bodyString("{ \"title\": \"Book #2\", \"pages\": 456 }", ContentType.APPLICATION_JSON)); + + String count = makeRequest(Request.Get("http://localhost:9200/_count?pretty")); + assertThat(count, containsString("\"count\" : 2")); + + makeRequest(Request.Delete("http://localhost:9200/_all")); + } + + public static String makeRequest(Request request) throws IOException { + final HttpResponse response = request.execute().returnResponse(); + final String body = EntityUtils.toString(response.getEntity()); + + if (response.getStatusLine().getStatusCode() >= 300) { + throw new RuntimeException("Request failed:\n" + response.getStatusLine().toString() + "\n" + body); + } + + return body; + + } +} diff --git a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Shell.java b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Shell.java index 9a908e2d680eb..5853bc2daa148 100644 --- a/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Shell.java +++ b/qa/vagrant/src/main/java/org/elasticsearch/packaging/util/Shell.java @@ -58,58 +58,50 @@ public Shell(Map env, Path workingDirectory) { this.workingDirectory = workingDirectory; } - /** - * Runs a script in a bash shell, throwing an exception if its exit code is nonzero - */ - public Result bash(String script) { - return run(bashCommand(script)); + public Map getEnv() { + return env; } /** - * Runs a script in a bash shell + * Run the provided string as a shell script. On Linux the {@code bash -c [script]} syntax will be used, and on Windows + * the {@code powershell.exe -Command [script]} syntax will be used. Throws an exception if the exit code of the script is nonzero */ - public Result bashIgnoreExitCode(String script) { - return runIgnoreExitCode(bashCommand(script)); - } - - private static String[] bashCommand(String script) { - return Stream.concat(Stream.of("bash", "-c"), Stream.of(script)).toArray(String[]::new); + public Result run(String script) { + return runScript(getScriptCommand(script)); } /** - * Runs a script in a powershell shell, throwing an exception if its exit code is nonzero + * Same as {@link #run(String)}, but does not throw an exception if the exit code of the script is nonzero */ - public Result powershell(String script) { - return run(powershellCommand(script)); + public Result runIgnoreExitCode(String script) { + return runScriptIgnoreExitCode(getScriptCommand(script)); } - /** - * Runs a script in a powershell shell - */ - public Result powershellIgnoreExitCode(String script) { - return runIgnoreExitCode(powershellCommand(script)); + private String[] getScriptCommand(String script) { + if (Platforms.WINDOWS) { + return powershellCommand(script); + } else { + return bashCommand(script); + } + } + + private static String[] bashCommand(String script) { + return Stream.concat(Stream.of("bash", "-c"), Stream.of(script)).toArray(String[]::new); } private static String[] powershellCommand(String script) { return Stream.concat(Stream.of("powershell.exe", "-Command"), Stream.of(script)).toArray(String[]::new); } - /** - * Runs an executable file, passing all elements of {@code command} after the first as arguments. Throws an exception if the process' - * exit code is nonzero - */ - private Result run(String[] command) { - Result result = runIgnoreExitCode(command); + private Result runScript(String[] command) { + Result result = runScriptIgnoreExitCode(command); if (result.isSuccess() == false) { throw new RuntimeException("Command was not successful: [" + String.join(" ", command) + "] result: " + result.toString()); } return result; } - /** - * Runs an executable file, passing all elements of {@code command} after the first as arguments - */ - private Result runIgnoreExitCode(String[] command) { + private Result runScriptIgnoreExitCode(String[] command) { ProcessBuilder builder = new ProcessBuilder(); builder.command(command); diff --git a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats b/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats deleted file mode 100644 index 3607e4ab45ac6..0000000000000 --- a/qa/vagrant/src/test/resources/packaging/tests/20_tar_package.bats +++ /dev/null @@ -1,178 +0,0 @@ -#!/usr/bin/env bats - -# This file is used to test the tar gz package. - -# WARNING: This testing file must be executed as root and can -# dramatically change your system. It should only be executed -# in a throw-away VM like those made by the Vagrantfile at -# the root of the Elasticsearch source code. This should -# cause the script to fail if it is executed any other way: -[ -f /etc/is_vagrant_vm ] || { - >&2 echo "must be run on a vagrant VM" - exit 1 -} - -# The test case can be executed with the Bash Automated -# Testing System tool available at https://github.com/sstephenson/bats -# Thanks to Sam Stephenson! - -# Licensed to Elasticsearch under one or more contributor -# license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright -# ownership. Elasticsearch licenses this file to you under -# the Apache License, Version 2.0 (the "License"); you may -# not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Load test utilities -load $BATS_UTILS/utils.bash -load $BATS_UTILS/tar.bash -load $BATS_UTILS/plugins.bash - -setup() { - skip_not_tar_gz - export ESHOME=/tmp/elasticsearch - export_elasticsearch_paths -} - -################################## -# Install TAR GZ package -################################## -@test "[TAR] tar command is available" { - # Cleans everything for the 1st execution - clean_before_test - run tar --version - [ "$status" -eq 0 ] -} - -@test "[TAR] archive is available" { - local version=$(cat version) - count=$(find . -type f -name "${PACKAGE_NAME}-${version}.tar.gz" | wc -l) - [ "$count" -eq 1 ] -} - -@test "[TAR] archive is not installed" { - count=$(find /tmp -type d -name 'elasticsearch*' | wc -l) - [ "$count" -eq 0 ] -} - -@test "[TAR] install archive" { - # Install the archive - install_archive - set_debug_logging - - count=$(find /tmp -type d -name 'elasticsearch*' | wc -l) - [ "$count" -eq 1 ] - - # Its simpler to check that the install was correct in this test rather - # than in another test because install_archive sets a number of path - # variables that verify_archive_installation reads. To separate this into - # another test you'd have to recreate the variables. - verify_archive_installation -} - -@test "[TAR] verify elasticsearch-plugin list runs without any plugins installed" { - # previously this would fail because the archive installations did - # not create an empty plugins directory - local plugins_list=`$ESHOME/bin/elasticsearch-plugin list` - [[ -z $plugins_list ]] -} - -@test "[TAR] elasticsearch fails if java executable is not found" { - local JAVA=$(which java) - - sudo chmod -x $JAVA - run "$ESHOME/bin/elasticsearch" - sudo chmod +x $JAVA - - [ "$status" -eq 1 ] - local expected="could not find java; set JAVA_HOME or ensure java is in PATH" - [[ "$output" == *"$expected"* ]] || { - echo "Expected error message [$expected] but found: $output" - false - } -} - -@test "[TAR] test creating elasticearch.keystore" { - sudo -E -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" create - assert_file "$ESCONFIG/elasticsearch.keystore" f elasticsearch elasticsearch 660 - sudo -E -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" list | grep "keystore.seed" - # cleanup for the next test - rm -rf "$ESCONFIG/elasticsearch.keystore" -} - -################################## -# Check that Elasticsearch is working -################################## -@test "[TAR] test elasticsearch" { - start_elasticsearch_service - run_elasticsearch_tests - stop_elasticsearch_service -} - -@test "[TAR] test auto-creating elasticearch.keystore" { - # a keystore should automatically be created after the service is started - assert_file "$ESCONFIG/elasticsearch.keystore" f elasticsearch elasticsearch 660 - # the keystore should be seeded - sudo -E -u elasticsearch "$ESHOME/bin/elasticsearch-keystore" list | grep "keystore.seed" -} - -@test "[TAR] start Elasticsearch with custom JVM options" { - local es_java_opts=$ES_JAVA_OPTS - local es_path_conf=$ES_PATH_CONF - local temp=`mktemp -d` - cp "$ESCONFIG"/elasticsearch.yml "$temp" - cp "$ESCONFIG"/log4j2.properties "$temp" - touch "$temp/jvm.options" - chown -R elasticsearch:elasticsearch "$temp" - echo "-Xms512m" >> "$temp/jvm.options" - echo "-Xmx512m" >> "$temp/jvm.options" - # we have to disable Log4j from using JMX lest it will hit a security - # manager exception before we have configured logging; this will fail - # startup since we detect usages of logging before it is configured - echo "-Dlog4j2.disable.jmx=true" >> "$temp/jvm.options" - export ES_PATH_CONF="$temp" - export ES_JAVA_OPTS="-XX:-UseCompressedOops" - start_elasticsearch_service - curl -s -XGET localhost:9200/_nodes | fgrep '"heap_init_in_bytes":536870912' - curl -s -XGET localhost:9200/_nodes | fgrep '"using_compressed_ordinary_object_pointers":"false"' - stop_elasticsearch_service - export ES_PATH_CONF=$es_path_conf - export ES_JAVA_OPTS=$es_java_opts -} - -@test "[TAR] GC logs exist" { - start_elasticsearch_service - assert_file_exist $ESHOME/logs/gc.log.0.current - stop_elasticsearch_service -} - -@test "[TAR] relative ES_PATH_CONF" { - local es_path_conf=$ES_PATH_CONF - local temp=`mktemp -d` - mkdir "$temp"/config - cp "$ESCONFIG"/elasticsearch.yml "$temp"/config - cp "$ESCONFIG"/log4j2.properties "$temp"/config - cp "$ESCONFIG/jvm.options" "$temp/config" - chown -R elasticsearch:elasticsearch "$temp" - echo "node.name: relative" >> "$temp"/config/elasticsearch.yml - cd "$temp" - export ES_PATH_CONF=config - start_elasticsearch_service - curl -s -XGET localhost:9200/_nodes | fgrep '"name":"relative"' - stop_elasticsearch_service - export ES_PATH_CONF=$es_path_conf -} - -@test "[TAR] remove tar" { - rm -rf "/tmp/elasticsearch" -} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml index 9978bb219f699..dd0628ea993ee 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.put_alias/10_basic.yml @@ -56,3 +56,27 @@ indices.put_alias: index: test_index name: foo + +--- +"Can set is_write_index": + + - skip: + version: " - 6.3.99" + reason: "is_write_index is only available from 6.4.0 on" + + - do: + indices.create: + index: test_index + + - do: + indices.put_alias: + index: test_index + name: test_alias + body: + is_write_index: true + + - do: + indices.get_alias: + index: test_index + name: test_alias + - match: {test_index.aliases.test_alias: { 'is_write_index': true }} diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml index dfb849fff5700..1bf70e7dc428f 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/30_context.yml @@ -337,11 +337,10 @@ setup: - match: { suggest.result.0.options.0.text: "foo" } --- -"Indexing and Querying without contexts is deprecated": +"Indexing and Querying without contexts is forbidden": - skip: version: " - 6.99.99" - reason: this feature was deprecated in 7.0 - features: "warnings" + reason: this feature was removed in 7.0 - do: index: @@ -359,8 +358,7 @@ setup: color: "blue" - do: - warnings: - - "The ability to index a suggestion with no context on a context enabled completion field is deprecated and will be removed in the next major release." + catch: /Contexts are mandatory in context enabled completion field \[suggest_context\]/ index: index: test type: test @@ -373,9 +371,9 @@ setup: indices.refresh: {} - do: - warnings: - - "The ability to query with no context on a context enabled completion field is deprecated and will be removed in the next major release." + catch: /Missing mandatory contexts in context query/ search: + allow_partial_search_results: false body: suggest: result: @@ -383,12 +381,10 @@ setup: completion: field: suggest_context - - length: { suggest.result: 1 } - - do: - warnings: - - "The ability to query with no context on a context enabled completion field is deprecated and will be removed in the next major release." + catch: /Missing mandatory contexts in context query/ search: + allow_partial_search_results: false body: suggest: result: @@ -397,12 +393,10 @@ setup: field: suggest_context contexts: {} - - length: { suggest.result: 1 } - - do: - warnings: - - "The ability to query with no context on a context enabled completion field is deprecated and will be removed in the next major release." + catch: /Missing mandatory contexts in context query/ search: + allow_partial_search_results: false body: suggest: result: @@ -411,5 +405,3 @@ setup: field: suggest_multi_contexts contexts: location: [] - - - length: { suggest.result: 1 } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/40_typed_keys.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/40_typed_keys.yml index 6e799c2bfc500..44cd6d589c26a 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/40_typed_keys.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/suggest/40_typed_keys.yml @@ -19,8 +19,6 @@ setup: "type" : "category" - do: - warnings: - - "The ability to index a suggestion with no context on a context enabled completion field is deprecated and will be removed in the next major release." bulk: refresh: true index: test @@ -29,20 +27,12 @@ setup: - '{"index": {}}' - '{"title": "Elasticsearch in Action", "suggestions": {"input": "ELK in Action", "contexts": {"format": "ebook"}}}' - '{"index": {}}' - - '{"title": "Elasticsearch - The Definitive Guide", "suggestions": {"input": ["Elasticsearch in Action"]}}' + - '{"title": "Elasticsearch - The Definitive Guide", "suggestions": {"input": ["Elasticsearch in Action"], "contexts": {"format": "ebook"}}}' --- "Test typed keys parameter for suggesters": - - skip: -# version: " - 6.99.99" -# reason: queying a context suggester with no context was deprecated in 7.0 - version: "all" - reason: "Awaiting a fix: https://github.com/elastic/elasticsearch/issues/31698" - features: "warnings" - do: - warnings: - - "The ability to query with no context on a context enabled completion field is deprecated and will be removed in the next major release." search: typed_keys: true body: @@ -53,10 +43,6 @@ setup: term_suggester: term: field: title - completion_suggester: - prefix: "Elastic" - completion: - field: suggestions context_suggester: prefix: "Elastic" completion: @@ -68,6 +54,5 @@ setup: field: title - is_true: suggest.term#term_suggester - - is_true: suggest.completion#completion_suggester - is_true: suggest.completion#context_suggester - is_true: suggest.phrase#phrase_suggester diff --git a/server/build.gradle b/server/build.gradle index 3055c625ea914..da60bca5a3e81 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -329,7 +329,7 @@ if (isEclipse == false || project.path == ":server-tests") { task integTest(type: RandomizedTestingTask, group: JavaBasePlugin.VERIFICATION_GROUP, description: 'Multi-node tests', - dependsOn: test.dependsOn) { + dependsOn: test.dependsOn.collect()) { configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath testClassesDirs = project.test.testClassesDirs @@ -338,3 +338,13 @@ if (isEclipse == false || project.path == ":server-tests") { check.dependsOn integTest integTest.mustRunAfter test } + +// TODO: remove ScriptDocValuesMissingV6BehaviourTests in 7.0 +additionalTest('testScriptDocValuesMissingV6Behaviour'){ + include '**/ScriptDocValuesMissingV6BehaviourTests.class' + systemProperty 'es.scripting.exception_for_missing_value', 'false' +} +test { + // these are tested explicitly in separate test tasks + exclude '**/*ScriptDocValuesMissingV6BehaviourTests.class' +} diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java index 39abd8613caa4..834e238e4a0d3 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatus.java @@ -19,16 +19,27 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; +import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.action.support.broadcast.BroadcastShardResponse; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.snapshots.IndexShardSnapshotStatus; import java.io.IOException; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + public class SnapshotIndexShardStatus extends BroadcastShardResponse implements ToXContentFragment { private SnapshotIndexShardStage stage = SnapshotIndexShardStage.INIT; @@ -80,6 +91,14 @@ private SnapshotIndexShardStatus() { this.nodeId = nodeId; } + SnapshotIndexShardStatus(ShardId shardId, SnapshotIndexShardStage stage, SnapshotStats stats, String nodeId, String failure) { + super(shardId); + this.stage = stage; + this.stats = stats; + this.nodeId = nodeId; + this.failure = failure; + } + /** * Returns snapshot stage */ @@ -143,7 +162,7 @@ static final class Fields { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(Integer.toString(getShardId().getId())); builder.field(Fields.STAGE, getStage()); - stats.toXContent(builder, params); + builder.field(SnapshotStats.Fields.STATS, stats, params); if (getNodeId() != null) { builder.field(Fields.NODE, getNodeId()); } @@ -153,4 +172,72 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + static final ObjectParser.NamedObjectParser PARSER; + static { + ConstructingObjectParser innerParser = new ConstructingObjectParser<>( + "snapshot_index_shard_status", true, + (Object[] parsedObjects, ShardId shard) -> { + int i = 0; + String rawStage = (String) parsedObjects[i++]; + String nodeId = (String) parsedObjects[i++]; + String failure = (String) parsedObjects[i++]; + SnapshotStats stats = (SnapshotStats) parsedObjects[i]; + + SnapshotIndexShardStage stage; + try { + stage = SnapshotIndexShardStage.valueOf(rawStage); + } catch (IllegalArgumentException iae) { + throw new ElasticsearchParseException( + "failed to parse snapshot index shard status [{}][{}], unknonwn stage [{}]", + shard.getIndex().getName(), shard.getId(), rawStage); + } + return new SnapshotIndexShardStatus(shard, stage, stats, nodeId, failure); + } + ); + innerParser.declareString(constructorArg(), new ParseField(Fields.STAGE)); + innerParser.declareString(optionalConstructorArg(), new ParseField(Fields.NODE)); + innerParser.declareString(optionalConstructorArg(), new ParseField(Fields.REASON)); + innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), new ParseField(SnapshotStats.Fields.STATS)); + PARSER = (p, indexId, shardName) -> { + // Combine the index name in the context with the shard name passed in for the named object parser + // into a ShardId to pass as context for the inner parser. + int shard; + try { + shard = Integer.parseInt(shardName); + } catch (NumberFormatException nfe) { + throw new ElasticsearchParseException( + "failed to parse snapshot index shard status [{}], expected numeric shard id but got [{}]", indexId, shardName); + } + ShardId shardId = new ShardId(new Index(indexId, IndexMetaData.INDEX_UUID_NA_VALUE), shard); + return innerParser.parse(p, shardId); + }; + } + + public static SnapshotIndexShardStatus fromXContent(XContentParser parser, String indexId) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + return PARSER.parse(parser, indexId, parser.currentName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + SnapshotIndexShardStatus that = (SnapshotIndexShardStatus) o; + + if (stage != that.stage) return false; + if (stats != null ? !stats.equals(that.stats) : that.stats != null) return false; + if (nodeId != null ? !nodeId.equals(that.nodeId) : that.nodeId != null) return false; + return failure != null ? failure.equals(that.failure) : that.failure == null; + } + + @Override + public int hashCode() { + int result = stage != null ? stage.hashCode() : 0; + result = 31 * result + (stats != null ? stats.hashCode() : 0); + result = 31 * result + (nodeId != null ? nodeId.hashCode() : 0); + result = 31 * result + (failure != null ? failure.hashCode() : 0); + return result; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java index 1605e41dc61be..ba85849598060 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatus.java @@ -19,17 +19,24 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; -import org.elasticsearch.common.xcontent.ToXContent.Params; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentFragment; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; import java.io.IOException; import java.util.Collection; import java.util.HashMap; import java.util.Iterator; +import java.util.List; import java.util.Map; +import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; /** * Represents snapshot status of all shards in the index @@ -57,6 +64,14 @@ public class SnapshotIndexStatus implements Iterable, this.indexShards = unmodifiableMap(indexShards); } + public SnapshotIndexStatus(String index, Map indexShards, SnapshotShardsStats shardsStats, + SnapshotStats stats) { + this.index = index; + this.indexShards = indexShards; + this.shardsStats = shardsStats; + this.stats = stats; + } + /** * Returns the index name */ @@ -97,8 +112,8 @@ static final class Fields { @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(getIndex()); - shardsStats.toXContent(builder, params); - stats.toXContent(builder, params); + builder.field(SnapshotShardsStats.Fields.SHARDS_STATS, shardsStats, params); + builder.field(SnapshotStats.Fields.STATS, stats, params); builder.startObject(Fields.SHARDS); for (SnapshotIndexShardStatus shard : indexShards.values()) { shard.toXContent(builder, params); @@ -107,4 +122,61 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws builder.endObject(); return builder; } + + static final ObjectParser.NamedObjectParser PARSER; + static { + ConstructingObjectParser innerParser = new ConstructingObjectParser<>( + "snapshot_index_status", true, + (Object[] parsedObjects, String index) -> { + int i = 0; + SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); + SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); + @SuppressWarnings("unchecked") List shardStatuses = + (List) parsedObjects[i]; + + final Map indexShards; + if (shardStatuses == null || shardStatuses.isEmpty()) { + indexShards = emptyMap(); + } else { + indexShards = new HashMap<>(shardStatuses.size()); + for (SnapshotIndexShardStatus shardStatus : shardStatuses) { + indexShards.put(shardStatus.getShardId().getId(), shardStatus); + } + } + return new SnapshotIndexStatus(index, indexShards, shardsStats, stats); + }); + innerParser.declareObject(constructorArg(), (p, c) -> SnapshotShardsStats.PARSER.apply(p, null), + new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS)); + innerParser.declareObject(constructorArg(), (p, c) -> SnapshotStats.fromXContent(p), + new ParseField(SnapshotStats.Fields.STATS)); + innerParser.declareNamedObjects(constructorArg(), SnapshotIndexShardStatus.PARSER, new ParseField(Fields.SHARDS)); + PARSER = ((p, c, name) -> innerParser.apply(p, name)); + } + + public static SnapshotIndexStatus fromXContent(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.currentToken(), parser::getTokenLocation); + return PARSER.parse(parser, null, parser.currentName()); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + SnapshotIndexStatus that = (SnapshotIndexStatus) o; + + if (index != null ? !index.equals(that.index) : that.index != null) return false; + if (indexShards != null ? !indexShards.equals(that.indexShards) : that.indexShards != null) return false; + if (shardsStats != null ? !shardsStats.equals(that.shardsStats) : that.shardsStats != null) return false; + return stats != null ? stats.equals(that.stats) : that.stats == null; + } + + @Override + public int hashCode() { + int result = index != null ? index.hashCode() : 0; + result = 31 * result + (indexShards != null ? indexShards.hashCode() : 0); + result = 31 * result + (shardsStats != null ? shardsStats.hashCode() : 0); + result = 31 * result + (stats != null ? stats.hashCode() : 0); + return result; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java index c74dd5af1eec9..c0ac432292ddc 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStats.java @@ -19,17 +19,22 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.Collection; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + /** * Status of a snapshot shards */ -public class SnapshotShardsStats implements ToXContentFragment { +public class SnapshotShardsStats implements ToXContentObject { private int initializingShards; private int startedShards; @@ -63,6 +68,16 @@ public class SnapshotShardsStats implements ToXContentFragment { } } + public SnapshotShardsStats(int initializingShards, int startedShards, int finalizingShards, int doneShards, int failedShards, + int totalShards) { + this.initializingShards = initializingShards; + this.startedShards = startedShards; + this.finalizingShards = finalizingShards; + this.doneShards = doneShards; + this.failedShards = failedShards; + this.totalShards = totalShards; + } + /** * Number of shards with the snapshot in the initializing stage */ @@ -117,15 +132,68 @@ static final class Fields { @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(Fields.SHARDS_STATS); - builder.field(Fields.INITIALIZING, getInitializingShards()); - builder.field(Fields.STARTED, getStartedShards()); - builder.field(Fields.FINALIZING, getFinalizingShards()); - builder.field(Fields.DONE, getDoneShards()); - builder.field(Fields.FAILED, getFailedShards()); - builder.field(Fields.TOTAL, getTotalShards()); + builder.startObject(); + { + builder.field(Fields.INITIALIZING, getInitializingShards()); + builder.field(Fields.STARTED, getStartedShards()); + builder.field(Fields.FINALIZING, getFinalizingShards()); + builder.field(Fields.DONE, getDoneShards()); + builder.field(Fields.FAILED, getFailedShards()); + builder.field(Fields.TOTAL, getTotalShards()); + } builder.endObject(); return builder; } + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + Fields.SHARDS_STATS, true, + (Object[] parsedObjects) -> { + int i = 0; + int initializingShards = (int) parsedObjects[i++]; + int startedShards = (int) parsedObjects[i++]; + int finalizingShards = (int) parsedObjects[i++]; + int doneShards = (int) parsedObjects[i++]; + int failedShards = (int) parsedObjects[i++]; + int totalShards = (int) parsedObjects[i]; + return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards); + } + ); + static { + PARSER.declareInt(constructorArg(), new ParseField(Fields.INITIALIZING)); + PARSER.declareInt(constructorArg(), new ParseField(Fields.STARTED)); + PARSER.declareInt(constructorArg(), new ParseField(Fields.FINALIZING)); + PARSER.declareInt(constructorArg(), new ParseField(Fields.DONE)); + PARSER.declareInt(constructorArg(), new ParseField(Fields.FAILED)); + PARSER.declareInt(constructorArg(), new ParseField(Fields.TOTAL)); + } + + public static SnapshotShardsStats fromXContent(XContentParser parser) throws IOException { + return PARSER.apply(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + SnapshotShardsStats that = (SnapshotShardsStats) o; + + if (initializingShards != that.initializingShards) return false; + if (startedShards != that.startedShards) return false; + if (finalizingShards != that.finalizingShards) return false; + if (doneShards != that.doneShards) return false; + if (failedShards != that.failedShards) return false; + return totalShards == that.totalShards; + } + + @Override + public int hashCode() { + int result = initializingShards; + result = 31 * result + startedShards; + result = 31 * result + finalizingShards; + result = 31 * result + doneShards; + result = 31 * result + failedShards; + result = 31 * result + totalShards; + return result; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java index 76f6b2191840d..6cb56bd88dcd9 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStats.java @@ -26,12 +26,14 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.common.xcontent.ToXContent; -import org.elasticsearch.common.xcontent.ToXContentFragment; +import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; import java.io.IOException; -public class SnapshotStats implements Streamable, ToXContentFragment { +public class SnapshotStats implements Streamable, ToXContentObject { private long startTime; private long time; @@ -176,35 +178,132 @@ static final class Fields { @Override public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException { - builder.startObject(Fields.STATS) - // incremental starts - .startObject(Fields.INCREMENTAL) - .field(Fields.FILE_COUNT, getIncrementalFileCount()) - .humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getIncrementalSize())) - // incremental ends - .endObject(); - - if (getProcessedFileCount() != getIncrementalFileCount()) { - // processed starts - builder.startObject(Fields.PROCESSED) - .field(Fields.FILE_COUNT, getProcessedFileCount()) - .humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getProcessedSize())) - // processed ends - .endObject(); + builder.startObject(); + { + builder.startObject(Fields.INCREMENTAL); + { + builder.field(Fields.FILE_COUNT, getIncrementalFileCount()); + builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getIncrementalSize())); + } + builder.endObject(); + + if (getProcessedFileCount() != getIncrementalFileCount()) { + builder.startObject(Fields.PROCESSED); + { + builder.field(Fields.FILE_COUNT, getProcessedFileCount()); + builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getProcessedSize())); + } + builder.endObject(); + } + + builder.startObject(Fields.TOTAL); + { + builder.field(Fields.FILE_COUNT, getTotalFileCount()); + builder.humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalSize())); + } + builder.endObject(); + + // timings stats + builder.field(Fields.START_TIME_IN_MILLIS, getStartTime()); + builder.humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime())); } - // total starts - builder.startObject(Fields.TOTAL) - .field(Fields.FILE_COUNT, getTotalFileCount()) - .humanReadableField(Fields.SIZE_IN_BYTES, Fields.SIZE, new ByteSizeValue(getTotalSize())) - // total ends - .endObject(); - // timings stats - builder.field(Fields.START_TIME_IN_MILLIS, getStartTime()) - .humanReadableField(Fields.TIME_IN_MILLIS, Fields.TIME, new TimeValue(getTime())); - return builder.endObject(); } + public static SnapshotStats fromXContent(XContentParser parser) throws IOException { + // Parse this old school style instead of using the ObjectParser since there's an impedance mismatch between how the + // object has historically been written as JSON versus how it is structured in Java. + XContentParser.Token token = parser.currentToken(); + if (token == null) { + token = parser.nextToken(); + } + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + long startTime = 0; + long time = 0; + int incrementalFileCount = 0; + int totalFileCount = 0; + int processedFileCount = 0; + long incrementalSize = 0; + long totalSize = 0; + long processedSize = 0; + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + String currentName = parser.currentName(); + token = parser.nextToken(); + if (currentName.equals(Fields.INCREMENTAL)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + String innerName = parser.currentName(); + token = parser.nextToken(); + if (innerName.equals(Fields.FILE_COUNT)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation); + incrementalFileCount = parser.intValue(); + } else if (innerName.equals(Fields.SIZE_IN_BYTES)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation); + incrementalSize = parser.longValue(); + } else { + // Unknown sub field, skip + if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + } + } + } else if (currentName.equals(Fields.PROCESSED)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + String innerName = parser.currentName(); + token = parser.nextToken(); + if (innerName.equals(Fields.FILE_COUNT)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation); + processedFileCount = parser.intValue(); + } else if (innerName.equals(Fields.SIZE_IN_BYTES)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation); + processedSize = parser.longValue(); + } else { + // Unknown sub field, skip + if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + } + } + } else if (currentName.equals(Fields.TOTAL)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, token, parser::getTokenLocation); + while ((token = parser.nextToken()) != XContentParser.Token.END_OBJECT) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, token, parser::getTokenLocation); + String innerName = parser.currentName(); + token = parser.nextToken(); + if (innerName.equals(Fields.FILE_COUNT)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation); + totalFileCount = parser.intValue(); + } else if (innerName.equals(Fields.SIZE_IN_BYTES)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation); + totalSize = parser.longValue(); + } else { + // Unknown sub field, skip + if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + } + } + } else if (currentName.equals(Fields.START_TIME_IN_MILLIS)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation); + startTime = parser.longValue(); + } else if (currentName.equals(Fields.TIME_IN_MILLIS)) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.VALUE_NUMBER, token, parser::getTokenLocation); + time = parser.longValue(); + } else { + // Unknown field, skip + if (token == XContentParser.Token.START_OBJECT || token == XContentParser.Token.START_ARRAY) { + parser.skipChildren(); + } + } + } + return new SnapshotStats(startTime, time, incrementalFileCount, totalFileCount, processedFileCount, incrementalSize, totalSize, + processedSize); + } + void add(SnapshotStats stats) { incrementalFileCount += stats.incrementalFileCount; totalFileCount += stats.totalFileCount; @@ -229,4 +328,34 @@ void add(SnapshotStats stats) { time = endTime - startTime; } } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + SnapshotStats that = (SnapshotStats) o; + + if (startTime != that.startTime) return false; + if (time != that.time) return false; + if (incrementalFileCount != that.incrementalFileCount) return false; + if (totalFileCount != that.totalFileCount) return false; + if (processedFileCount != that.processedFileCount) return false; + if (incrementalSize != that.incrementalSize) return false; + if (totalSize != that.totalSize) return false; + return processedSize == that.processedSize; + } + + @Override + public int hashCode() { + int result = (int) (startTime ^ (startTime >>> 32)); + result = 31 * result + (int) (time ^ (time >>> 32)); + result = 31 * result + incrementalFileCount; + result = 31 * result + totalFileCount; + result = 31 * result + processedFileCount; + result = 31 * result + (int) (incrementalSize ^ (incrementalSize >>> 32)); + result = 31 * result + (int) (totalSize ^ (totalSize >>> 32)); + result = 31 * result + (int) (processedSize ^ (processedSize >>> 32)); + return result; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java index f7545ea0236a7..618bb54c9015d 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatus.java @@ -20,15 +20,21 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import org.elasticsearch.Version; +import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.SnapshotsInProgress.State; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.snapshots.Snapshot; +import org.elasticsearch.snapshots.SnapshotId; import java.io.IOException; import java.util.ArrayList; @@ -40,7 +46,11 @@ import java.util.Objects; import java.util.Set; +import static java.util.Collections.emptyList; +import static java.util.Collections.emptyMap; import static java.util.Collections.unmodifiableMap; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; /** * Status of a snapshot @@ -72,6 +82,18 @@ public class SnapshotStatus implements ToXContentObject, Streamable { updateShardStats(); } + private SnapshotStatus(Snapshot snapshot, State state, List shards, + Map indicesStatus, SnapshotShardsStats shardsStats, + SnapshotStats stats, Boolean includeGlobalState) { + this.snapshot = snapshot; + this.state = state; + this.shards = shards; + this.indicesStatus = indicesStatus; + this.shardsStats = shardsStats; + this.stats = stats; + this.includeGlobalState = includeGlobalState; + } + SnapshotStatus() { } @@ -207,8 +229,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (includeGlobalState != null) { builder.field(INCLUDE_GLOBAL_STATE, includeGlobalState); } - shardsStats.toXContent(builder, params); - stats.toXContent(builder, params); + builder.field(SnapshotShardsStats.Fields.SHARDS_STATS, shardsStats, params); + builder.field(SnapshotStats.Fields.STATS, stats, params); builder.startObject(INDICES); for (SnapshotIndexStatus indexStatus : getIndices().values()) { indexStatus.toXContent(builder, params); @@ -218,6 +240,52 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "snapshot_status", true, + (Object[] parsedObjects) -> { + int i = 0; + String name = (String) parsedObjects[i++]; + String repository = (String) parsedObjects[i++]; + String uuid = (String) parsedObjects[i++]; + String rawState = (String) parsedObjects[i++]; + Boolean includeGlobalState = (Boolean) parsedObjects[i++]; + SnapshotStats stats = ((SnapshotStats) parsedObjects[i++]); + SnapshotShardsStats shardsStats = ((SnapshotShardsStats) parsedObjects[i++]); + @SuppressWarnings("unchecked") List indices = ((List) parsedObjects[i]); + + Snapshot snapshot = new Snapshot(repository, new SnapshotId(name, uuid)); + SnapshotsInProgress.State state = SnapshotsInProgress.State.valueOf(rawState); + Map indicesStatus; + List shards; + if (indices == null || indices.isEmpty()) { + indicesStatus = emptyMap(); + shards = emptyList(); + } else { + indicesStatus = new HashMap<>(indices.size()); + shards = new ArrayList<>(); + for (SnapshotIndexStatus index : indices) { + indicesStatus.put(index.getIndex(), index); + shards.addAll(index.getShards().values()); + } + } + return new SnapshotStatus(snapshot, state, shards, indicesStatus, shardsStats, stats, includeGlobalState); + }); + static { + PARSER.declareString(constructorArg(), new ParseField(SNAPSHOT)); + PARSER.declareString(constructorArg(), new ParseField(REPOSITORY)); + PARSER.declareString(constructorArg(), new ParseField(UUID)); + PARSER.declareString(constructorArg(), new ParseField(STATE)); + PARSER.declareBoolean(optionalConstructorArg(), new ParseField(INCLUDE_GLOBAL_STATE)); + PARSER.declareField(constructorArg(), SnapshotStats::fromXContent, new ParseField(SnapshotStats.Fields.STATS), + ObjectParser.ValueType.OBJECT); + PARSER.declareObject(constructorArg(), SnapshotShardsStats.PARSER, new ParseField(SnapshotShardsStats.Fields.SHARDS_STATS)); + PARSER.declareNamedObjects(constructorArg(), SnapshotIndexStatus.PARSER, new ParseField(INDICES)); + } + + public static SnapshotStatus fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + private void updateShardStats() { stats = new SnapshotStats(); shardsStats = new SnapshotShardsStats(shards); @@ -225,4 +293,31 @@ private void updateShardStats() { stats.add(shard.getStats()); } } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + SnapshotStatus that = (SnapshotStatus) o; + + if (snapshot != null ? !snapshot.equals(that.snapshot) : that.snapshot != null) return false; + if (state != that.state) return false; + if (indicesStatus != null ? !indicesStatus.equals(that.indicesStatus) : that.indicesStatus != null) + return false; + if (shardsStats != null ? !shardsStats.equals(that.shardsStats) : that.shardsStats != null) return false; + if (stats != null ? !stats.equals(that.stats) : that.stats != null) return false; + return includeGlobalState != null ? includeGlobalState.equals(that.includeGlobalState) : that.includeGlobalState == null; + } + + @Override + public int hashCode() { + int result = snapshot != null ? snapshot.hashCode() : 0; + result = 31 * result + (state != null ? state.hashCode() : 0); + result = 31 * result + (indicesStatus != null ? indicesStatus.hashCode() : 0); + result = 31 * result + (shardsStats != null ? shardsStats.hashCode() : 0); + result = 31 * result + (stats != null ? stats.hashCode() : 0); + result = 31 * result + (includeGlobalState != null ? includeGlobalState.hashCode() : 0); + return result; + } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java index d44a490680c9b..ef1435e41080c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponse.java @@ -20,16 +20,21 @@ package org.elasticsearch.action.admin.cluster.snapshots.status; import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.List; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; + /** * Snapshot status response */ @@ -85,4 +90,33 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "snapshots_status_response", true, + (Object[] parsedObjects) -> { + @SuppressWarnings("unchecked") List snapshots = (List) parsedObjects[0]; + return new SnapshotsStatusResponse(snapshots); + } + ); + static { + PARSER.declareObjectArray(constructorArg(), SnapshotStatus.PARSER, new ParseField("snapshots")); + } + + public static SnapshotsStatusResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + + SnapshotsStatusResponse response = (SnapshotsStatusResponse) o; + + return snapshots != null ? snapshots.equals(response.snapshots) : response.snapshots == null; + } + + @Override + public int hashCode() { + return snapshots != null ? snapshots.hashCode() : 0; + } } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index dd43b82f8b862..133d0291df597 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -409,7 +409,7 @@ public void onFailure(Exception e) { }); TransportActionProxy.registerProxyAction(transportService, FETCH_ID_SCROLL_ACTION_NAME, FetchSearchResult::new); - transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ThreadPool.Names.SEARCH, ShardFetchSearchRequest::new, + transportService.registerRequestHandler(FETCH_ID_ACTION_NAME, ThreadPool.Names.SEARCH, true, true, ShardFetchSearchRequest::new, (request, channel, task) -> { FetchSearchResult result = searchService.executeFetchPhase(request, (SearchTask)task); channel.sendResponse(result); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java index d978e214fc96c..903802050127e 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/IndexMetaData.java @@ -23,7 +23,6 @@ import com.carrotsearch.hppc.cursors.IntObjectCursor; import com.carrotsearch.hppc.cursors.ObjectCursor; import com.carrotsearch.hppc.cursors.ObjectObjectCursor; - import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.rollover.RolloverInfo; import org.elasticsearch.action.support.ActiveShardCount; @@ -56,10 +55,11 @@ import org.elasticsearch.index.mapper.MapperService; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.rest.RestStatus; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.IOException; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; @@ -1345,7 +1345,7 @@ public static Settings addHumanReadableSettings(Settings settings) { } Long creationDate = settings.getAsLong(SETTING_CREATION_DATE, null); if (creationDate != null) { - DateTime creationDateTime = new DateTime(creationDate, DateTimeZone.UTC); + ZonedDateTime creationDateTime = ZonedDateTime.ofInstant(Instant.ofEpochMilli(creationDate), ZoneOffset.UTC); builder.put(SETTING_CREATION_DATE_STRING, creationDateTime.toString()); } return builder.build(); diff --git a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java index be9db5262b00c..b19d65090c6b6 100644 --- a/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java +++ b/server/src/main/java/org/elasticsearch/cluster/metadata/MetaDataCreateIndexService.java @@ -73,11 +73,10 @@ import org.elasticsearch.indices.InvalidIndexNameException; import org.elasticsearch.indices.cluster.IndicesClusterStateService.AllocatedIndices.IndexRemovalReason; import org.elasticsearch.threadpool.ThreadPool; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; import java.io.UnsupportedEncodingException; import java.nio.file.Path; +import java.time.Instant; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; @@ -383,7 +382,7 @@ public ClusterState execute(ClusterState currentState) throws Exception { } if (indexSettingsBuilder.get(SETTING_CREATION_DATE) == null) { - indexSettingsBuilder.put(SETTING_CREATION_DATE, new DateTime(DateTimeZone.UTC).getMillis()); + indexSettingsBuilder.put(SETTING_CREATION_DATE, Instant.now().toEpochMilli()); } indexSettingsBuilder.put(IndexMetaData.SETTING_INDEX_PROVIDED_NAME, request.getProvidedName()); indexSettingsBuilder.put(SETTING_INDEX_UUID, UUIDs.randomBase64UUID()); diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java index 7998a1d27dd66..ec0af211ecca5 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/allocator/BalancedShardsAllocator.java @@ -1009,7 +1009,7 @@ private boolean tryRelocateShard(ModelNode minNode, ModelNode maxNode, String id // simulate moving shard from maxNode to minNode final float delta = weight.weightShardAdded(this, minNode, idx) - weight.weightShardRemoved(this, maxNode, idx); if (delta < minCost || - (candidate != null && delta == minCost && candidate.id() > shard.id())) { + (candidate != null && Float.compare(delta, minCost) == 0 && candidate.id() > shard.id())) { /* this last line is a tie-breaker to make the shard allocation alg deterministic * otherwise we rely on the iteration order of the index.getAllShards() which is a set.*/ minCost = delta; diff --git a/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java b/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java new file mode 100644 index 0000000000000..df459679c22b4 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/CompoundDateTimeFormatter.java @@ -0,0 +1,73 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.common.time; + +import java.time.ZoneId; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import java.time.temporal.TemporalAccessor; + +/** + * wrapper class around java.time.DateTimeFormatter that supports multiple formats for easier parsing, + * and one specific format for printing + */ +public class CompoundDateTimeFormatter { + + final DateTimeFormatter printer; + final DateTimeFormatter[] parsers; + + CompoundDateTimeFormatter(DateTimeFormatter ... parsers) { + if (parsers.length == 0) { + throw new IllegalArgumentException("at least one date time formatter is required"); + } + this.printer = parsers[0]; + this.parsers = parsers; + } + + public TemporalAccessor parse(String input) { + DateTimeParseException failure = null; + for (int i = 0; i < parsers.length; i++) { + try { + return parsers[i].parse(input); + } catch (DateTimeParseException e) { + if (failure == null) { + failure = e; + } else { + failure.addSuppressed(e); + } + } + } + + // ensure that all parsers exceptions are returned instead of only the last one + throw failure; + } + + public CompoundDateTimeFormatter withZone(ZoneId zoneId) { + final DateTimeFormatter[] parsersWithZone = new DateTimeFormatter[parsers.length]; + for (int i = 0; i < parsers.length; i++) { + parsersWithZone[i] = parsers[i].withZone(zoneId); + } + + return new CompoundDateTimeFormatter(parsersWithZone); + } + + public String format(TemporalAccessor accessor) { + return printer.format(accessor); + } +} diff --git a/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java new file mode 100644 index 0000000000000..eef2ab5558789 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/common/time/DateFormatters.java @@ -0,0 +1,1072 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.time; + +import org.elasticsearch.common.Strings; + +import java.time.DateTimeException; +import java.time.DayOfWeek; +import java.time.Instant; +import java.time.LocalDate; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeFormatterBuilder; +import java.time.format.ResolverStyle; +import java.time.format.SignStyle; +import java.time.temporal.ChronoField; +import java.time.temporal.IsoFields; +import java.time.temporal.TemporalAccessor; +import java.time.temporal.TemporalAdjusters; +import java.time.temporal.WeekFields; +import java.util.Arrays; +import java.util.Collection; +import java.util.LinkedHashSet; +import java.util.Locale; + +import static java.time.temporal.ChronoField.DAY_OF_MONTH; +import static java.time.temporal.ChronoField.DAY_OF_WEEK; +import static java.time.temporal.ChronoField.DAY_OF_YEAR; +import static java.time.temporal.ChronoField.HOUR_OF_DAY; +import static java.time.temporal.ChronoField.MILLI_OF_SECOND; +import static java.time.temporal.ChronoField.MINUTE_OF_HOUR; +import static java.time.temporal.ChronoField.MONTH_OF_YEAR; +import static java.time.temporal.ChronoField.SECOND_OF_MINUTE; + +public class DateFormatters { + + private static final DateTimeFormatter TIME_ZONE_FORMATTER_ZONE_ID = new DateTimeFormatterBuilder() + .appendZoneId() + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_ZONE_FORMATTER_WITHOUT_COLON = new DateTimeFormatterBuilder() + .appendOffset("+HHmm", "Z") + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_ZONE_FORMATTER_WITH_COLON = new DateTimeFormatterBuilder() + .appendOffset("+HH:mm", "Z") + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() + .optionalStart().appendZoneId().optionalEnd() + .optionalStart().appendOffset("+HHmm", "Z").optionalEnd() + .optionalStart().appendOffset("+HH:mm", "Z").optionalEnd() + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter OPTIONAL_TIME_ZONE_FORMATTER = new DateTimeFormatterBuilder() + .optionalStart() + .append(TIME_ZONE_FORMATTER) + .optionalEnd() + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter STRICT_YEAR_MONTH_DAY_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-") + .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_1 = new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .optionalStart() + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .optionalStart() + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .optionalEnd() + .optionalStart() + .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_2 = new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .optionalStart() + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .optionalStart() + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .optionalEnd() + .optionalStart() + .append(TIME_ZONE_FORMATTER_WITH_COLON) + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter STRICT_DATE_OPTIONAL_TIME_FORMATTER_3 = new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .optionalStart() + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .optionalStart() + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .optionalEnd() + .optionalStart() + .append(TIME_ZONE_FORMATTER_ZONE_ID) + .optionalEnd() + .optionalEnd() + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter STRICT_DATE_OPTIONAL_TIME = + new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME_FORMATTER_1, STRICT_DATE_OPTIONAL_TIME_FORMATTER_2, + STRICT_DATE_OPTIONAL_TIME_FORMATTER_3); + + private static final DateTimeFormatter BASIC_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .append(OPTIONAL_TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter BASIC_TIME_NO_MILLIS = new CompoundDateTimeFormatter(BASIC_TIME_NO_MILLIS_FORMATTER); + + private static final DateTimeFormatter BASIC_TIME_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .append(OPTIONAL_TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter BASIC_TIME = new CompoundDateTimeFormatter(BASIC_TIME_FORMATTER); + + private static final DateTimeFormatter BASIC_T_TIME_FORMATTER = new DateTimeFormatterBuilder() + .appendLiteral("T") + .append(BASIC_TIME_FORMATTER) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter BASIC_T_TIME = new CompoundDateTimeFormatter(BASIC_T_TIME_FORMATTER); + + private static final CompoundDateTimeFormatter BASIC_T_TIME_NO_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendLiteral("T") + .append(BASIC_TIME_NO_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter BASIC_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 4, SignStyle.NORMAL) + .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) + .append(BASIC_T_TIME_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter BASIC_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 4, SignStyle.NORMAL) + .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendValue(DAY_OF_MONTH, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral("T") + .append(BASIC_TIME_NO_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE = new CompoundDateTimeFormatter( + DateTimeFormatter.ofPattern("yyyyDDD", Locale.ROOT)); + + private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendPattern("yyyyDDD") + .append(BASIC_T_TIME_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter BASIC_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .appendPattern("yyyyDDD") + .appendLiteral("T") + .append(BASIC_TIME_NO_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(IsoFields.WEEK_BASED_YEAR) + .appendLiteral("W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NEVER) + .appendValue(ChronoField.DAY_OF_WEEK) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter BASIC_WEEK_DATE = new CompoundDateTimeFormatter(BASIC_WEEK_DATE_FORMATTER); + + private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(BASIC_WEEK_DATE_FORMATTER) + .appendLiteral("T") + .append(BASIC_TIME_NO_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(BASIC_WEEK_DATE_FORMATTER) + .append(BASIC_T_TIME_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 1, 4, SignStyle.NORMAL) + .appendLiteral('-') + .appendValue(MONTH_OF_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral('-') + .appendValue(DAY_OF_MONTH, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter DATE = new CompoundDateTimeFormatter(DATE_FORMATTER); + + private static final CompoundDateTimeFormatter HOUR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter DATE_HOUR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter HOUR_MINUTE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter HOUR_MINUTE = new CompoundDateTimeFormatter(HOUR_MINUTE_FORMATTER); + + private static final DateTimeFormatter DATE_TIME_PREFIX = new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .optionalEnd() + .toFormatter(Locale.ROOT); + + // only the formatter, nothing optional here + private static final DateTimeFormatter DATE_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendZoneId() + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter DATE_TIME_NO_MILLIS_1 = new DateTimeFormatterBuilder() + .append(DATE_TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_WITH_COLON) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter DATE_TIME_NO_MILLIS_2 = new DateTimeFormatterBuilder() + .append(DATE_TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter DATE_TIME_NO_MILLIS_3 = new DateTimeFormatterBuilder() + .append(DATE_TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_ZONE_ID) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter DATE_TIME_NO_MILLIS_4 = new DateTimeFormatterBuilder() + .append(DATE_TIME_PREFIX) + .optionalStart() + .append(TIME_ZONE_FORMATTER_WITH_COLON) + .optionalEnd() + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter DATE_TIME_NO_MILLIS_5 = new DateTimeFormatterBuilder() + .append(DATE_TIME_PREFIX) + .optionalStart() + .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) + .optionalEnd() + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter DATE_TIME_NO_MILLIS_6 = new DateTimeFormatterBuilder() + .append(DATE_TIME_PREFIX) + .optionalStart() + .append(TIME_ZONE_FORMATTER_ZONE_ID) + .optionalEnd() + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(DATE_TIME_NO_MILLIS_FORMATTER, + DATE_TIME_NO_MILLIS_1, DATE_TIME_NO_MILLIS_2, DATE_TIME_NO_MILLIS_3, DATE_TIME_NO_MILLIS_4, DATE_TIME_NO_MILLIS_5, + DATE_TIME_NO_MILLIS_6); + + private static final CompoundDateTimeFormatter DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .append(OPTIONAL_TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter DATE_OPTIONAL_TIME = new CompoundDateTimeFormatter(STRICT_DATE_OPTIONAL_TIME.printer, + new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .parseLenient() + .optionalStart() + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .append(OPTIONAL_TIME_ZONE_FORMATTER) + .optionalEnd() + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter HOUR_MINUTE_SECOND_FORMATTER = new DateTimeFormatterBuilder() + .append(HOUR_MINUTE_FORMATTER) + .appendLiteral(":") + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(HOUR_MINUTE_FORMATTER) + .appendLiteral(":") + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_SECOND_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter HOUR_MINUTE_SECOND_MILLIS = + new CompoundDateTimeFormatter(HOUR_MINUTE_SECOND_MILLIS_FORMATTER); + + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_MILLIS = + new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter DATE_HOUR_MINUTE_SECOND_FRACTION = + new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(DATE_FORMATTER) + .appendLiteral("T") + .append(HOUR_MINUTE_SECOND_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter ORDINAL_DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 1, 3, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter ORDINAL_DATE = new CompoundDateTimeFormatter(ORDINAL_DATE_FORMATTER); + + private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(ORDINAL_DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_SECOND_FORMATTER) + .append(OPTIONAL_TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(ORDINAL_DATE_FORMATTER) + .appendLiteral('T') + .append(HOUR_MINUTE_FORMATTER) + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .append(TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter TIME_FORMATTER_1 = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .append(TIME_ZONE_FORMATTER_ZONE_ID) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_FORMATTER_2 = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .append(TIME_ZONE_FORMATTER_WITH_COLON) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_FORMATTER_3 = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_PREFIX = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_ZONE_ID = new DateTimeFormatterBuilder() + .append(TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_ZONE_ID) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_ZONE_WITH_COLON = new DateTimeFormatterBuilder() + .append(TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_WITH_COLON) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_ZONE_WITHOUT_COLON = new DateTimeFormatterBuilder() + .append(TIME_PREFIX) + .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter T_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_FORMATTER_1).toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_FORMATTER_2).toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().appendLiteral("T").append(TIME_FORMATTER_3).toFormatter(Locale.ROOT) + ); + + private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER_1 = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .append(TIME_ZONE_FORMATTER_ZONE_ID) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER_2 = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .append(TIME_ZONE_FORMATTER_WITH_COLON) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter TIME_NO_MILLIS_FORMATTER_3 = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 1, 2, SignStyle.NOT_NEGATIVE) + .append(TIME_ZONE_FORMATTER_WITHOUT_COLON) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter TIME = new CompoundDateTimeFormatter(TIME_ZONE_ID, TIME_ZONE_WITH_COLON, + TIME_ZONE_WITHOUT_COLON); + + private static final CompoundDateTimeFormatter TIME_NO_MILLIS = + new CompoundDateTimeFormatter(TIME_NO_MILLIS_FORMATTER_1, TIME_NO_MILLIS_FORMATTER_2, TIME_NO_MILLIS_FORMATTER_3); + + private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER_1 = new DateTimeFormatterBuilder() + .appendLiteral("T") + .append(TIME_NO_MILLIS_FORMATTER_1) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER_2 = new DateTimeFormatterBuilder() + .appendLiteral("T") + .append(TIME_NO_MILLIS_FORMATTER_2) + .toFormatter(Locale.ROOT); + + private static final DateTimeFormatter T_TIME_NO_MILLIS_FORMATTER_3 = new DateTimeFormatterBuilder() + .appendLiteral("T") + .append(TIME_NO_MILLIS_FORMATTER_3) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter T_TIME_NO_MILLIS = + new CompoundDateTimeFormatter(T_TIME_NO_MILLIS_FORMATTER_1, T_TIME_NO_MILLIS_FORMATTER_2, T_TIME_NO_MILLIS_FORMATTER_3); + + private static final DateTimeFormatter WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(IsoFields.WEEK_BASED_YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral('-') + .appendValue(DAY_OF_WEEK, 1) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter WEEK_DATE = new CompoundDateTimeFormatter(WEEK_DATE_FORMATTER); + + private static final CompoundDateTimeFormatter WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER_1).toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER_2).toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).append(T_TIME_NO_MILLIS_FORMATTER_3).toFormatter(Locale.ROOT) + ); + + private static final CompoundDateTimeFormatter WEEK_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_FORMATTER_1).toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_FORMATTER_2).toFormatter(Locale.ROOT), + new DateTimeFormatterBuilder().append(WEEK_DATE_FORMATTER).appendLiteral("T").append(TIME_FORMATTER_3).toFormatter(Locale.ROOT) + ); + + private static final CompoundDateTimeFormatter WEEK_YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(WeekFields.ISO.weekBasedYear()) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter WEEKYEAR_WEEK = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(WeekFields.ISO.weekBasedYear()) + .appendLiteral("-W") + .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(WeekFields.ISO.weekBasedYear()) + .appendLiteral("-W") + .appendValue(WeekFields.ISO.weekOfWeekBasedYear()) + .appendLiteral("-") + .appendValue(WeekFields.ISO.dayOfWeek()) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR) + .appendLiteral("-") + .appendValue(MONTH_OF_YEAR) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter YEAR_MONTH_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR) + .appendLiteral("-") + .appendValue(MONTH_OF_YEAR) + .appendLiteral("-") + .appendValue(DAY_OF_MONTH) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter EPOCH_SECOND = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.INSTANT_SECONDS) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter EPOCH_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.INSTANT_SECONDS, 1, 19, SignStyle.NEVER) + .appendValue(ChronoField.MILLI_OF_SECOND, 3) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter STRICT_BASIC_WEEK_DATE_FORMATTER = new DateTimeFormatterBuilder() + .parseStrict() + .appendValue(IsoFields.WEEK_BASED_YEAR, 4) + .appendLiteral("W") + .appendValue(IsoFields.WEEK_OF_WEEK_BASED_YEAR, 1, 2, SignStyle.NEVER) + .appendValue(ChronoField.DAY_OF_WEEK) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE = new CompoundDateTimeFormatter(STRICT_BASIC_WEEK_DATE_FORMATTER); + + private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_FORMATTER) + .append(DateTimeFormatter.ofPattern("'T'HHmmssX", Locale.ROOT)) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_BASIC_WEEK_DATE_TIME = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(STRICT_BASIC_WEEK_DATE_FORMATTER) + .append(DateTimeFormatter.ofPattern("'T'HHmmss.SSSX", Locale.ROOT)) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_DATE = new CompoundDateTimeFormatter( + DateTimeFormatter.ISO_LOCAL_DATE.withResolverStyle(ResolverStyle.LENIENT)); + + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR = new CompoundDateTimeFormatter( + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH", Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE = new CompoundDateTimeFormatter( + DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm", Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH_DAY = new CompoundDateTimeFormatter(STRICT_YEAR_MONTH_DAY_FORMATTER); + + private static final CompoundDateTimeFormatter STRICT_YEAR_MONTH = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-") + .appendValue(MONTH_OF_YEAR, 2, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_YEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND = + new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_FORMATTER); + + private static final CompoundDateTimeFormatter STRICT_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .optionalStart() + .appendFraction(MILLI_OF_SECOND, 3, 3, true) + .optionalEnd() + .append(OPTIONAL_TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .append(OPTIONAL_TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral('T') + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .append(OPTIONAL_TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .append(STRICT_HOUR_MINUTE_SECOND_FORMATTER) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_MILLIS = + new CompoundDateTimeFormatter(STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER); + + private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE_SECOND_FRACTION = STRICT_HOUR_MINUTE_SECOND_MILLIS; + + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(STRICT_YEAR_MONTH_DAY_FORMATTER) + .appendLiteral("T") + .append(STRICT_HOUR_MINUTE_SECOND_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS = STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION; + + private static final CompoundDateTimeFormatter STRICT_HOUR = + new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH", Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_HOUR_MINUTE = + new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("HH:mm", Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_ORDINAL_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(ChronoField.YEAR, 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral('-') + .appendValue(DAY_OF_YEAR, 3, 3, SignStyle.NOT_NEGATIVE) + .appendLiteral('T') + .appendPattern("HH:mm") + .optionalStart() + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .optionalEnd() + .append(OPTIONAL_TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter STRICT_TIME_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .appendFraction(MILLI_OF_SECOND, 1, 3, true) + .append(TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter STRICT_TIME = new CompoundDateTimeFormatter(STRICT_TIME_FORMATTER); + + private static final DateTimeFormatter STRICT_T_TIME_FORMATTER = new DateTimeFormatterBuilder() + .appendLiteral("T") + .append(STRICT_TIME_FORMATTER) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter STRICT_T_TIME = new CompoundDateTimeFormatter(STRICT_T_TIME_FORMATTER); + + private static final DateTimeFormatter STRICT_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(HOUR_OF_DAY, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(MINUTE_OF_HOUR, 2, 2, SignStyle.NOT_NEGATIVE) + .appendLiteral(':') + .appendValue(SECOND_OF_MINUTE, 2, 2, SignStyle.NOT_NEGATIVE) + .append(TIME_ZONE_FORMATTER) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter STRICT_TIME_NO_MILLIS = new CompoundDateTimeFormatter(STRICT_TIME_NO_MILLIS_FORMATTER); + + private static final DateTimeFormatter STRICT_T_TIME_NO_MILLIS_FORMATTER = new DateTimeFormatterBuilder() + .appendLiteral("T") + .append(STRICT_TIME_NO_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter STRICT_T_TIME_NO_MILLIS = + new CompoundDateTimeFormatter(STRICT_T_TIME_NO_MILLIS_FORMATTER); + + private static final CompoundDateTimeFormatter STRICT_WEEK_DATE = new CompoundDateTimeFormatter(DateTimeFormatter.ISO_WEEK_DATE); + + private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME_NO_MILLIS = new CompoundDateTimeFormatter( + new DateTimeFormatterBuilder() + .append(DateTimeFormatter.ISO_WEEK_DATE) + .append(STRICT_T_TIME_NO_MILLIS_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_WEEK_DATE_TIME = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(DateTimeFormatter.ISO_WEEK_DATE) + .append(STRICT_T_TIME_FORMATTER) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter STRICT_WEEKYEAR = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .appendValue(WeekFields.ISO.weekBasedYear(), 4, 10, SignStyle.EXCEEDS_PAD) + .toFormatter(Locale.ROOT)); + + private static final DateTimeFormatter STRICT_WEEKYEAR_WEEK_FORMATTER = new DateTimeFormatterBuilder() + .appendValue(WeekFields.ISO.weekBasedYear(), 4, 10, SignStyle.EXCEEDS_PAD) + .appendLiteral("-W") + .appendValue(WeekFields.ISO.weekOfWeekBasedYear(), 2, 2, SignStyle.NOT_NEGATIVE) + .toFormatter(Locale.ROOT); + + private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK = new CompoundDateTimeFormatter(STRICT_WEEKYEAR_WEEK_FORMATTER); + + private static final CompoundDateTimeFormatter STRICT_WEEKYEAR_WEEK_DAY = new CompoundDateTimeFormatter(new DateTimeFormatterBuilder() + .append(STRICT_WEEKYEAR_WEEK_FORMATTER) + .appendLiteral("-") + .appendValue(WeekFields.ISO.dayOfWeek()) + .toFormatter(Locale.ROOT)); + + private static final CompoundDateTimeFormatter BASIC_ISO_DATE = new CompoundDateTimeFormatter(DateTimeFormatter.BASIC_ISO_DATE); + private static final CompoundDateTimeFormatter ISO_ORDINAL_DATE = new CompoundDateTimeFormatter(DateTimeFormatter.ISO_ORDINAL_DATE); + private static final CompoundDateTimeFormatter STRICT_DATE_HOUR_MINUTE_SECOND = + new CompoundDateTimeFormatter(DateTimeFormatter.ofPattern("yyyy-MM-dd'T'HH:mm:ss", Locale.ROOT)); + + public static CompoundDateTimeFormatter forPattern(String input) { + return forPattern(input, Locale.ROOT); + } + + public static CompoundDateTimeFormatter forPattern(String input, Locale locale) { + if (Strings.hasLength(input)) { + input = input.trim(); + } + if (input == null || input.length() == 0) { + throw new IllegalArgumentException("No date pattern provided"); + } + + if ("basicDate".equals(input) || "basic_date".equals(input)) { + return BASIC_ISO_DATE; + } else if ("basicDateTime".equals(input) || "basic_date_time".equals(input)) { + return BASIC_DATE_TIME; + } else if ("basicDateTimeNoMillis".equals(input) || "basic_date_time_no_millis".equals(input)) { + return BASIC_DATE_TIME_NO_MILLIS; + } else if ("basicOrdinalDate".equals(input) || "basic_ordinal_date".equals(input)) { + return BASIC_ORDINAL_DATE; + } else if ("basicOrdinalDateTime".equals(input) || "basic_ordinal_date_time".equals(input)) { + return BASIC_ORDINAL_DATE_TIME; + } else if ("basicOrdinalDateTimeNoMillis".equals(input) || "basic_ordinal_date_time_no_millis".equals(input)) { + return BASIC_ORDINAL_DATE_TIME_NO_MILLIS; + } else if ("basicTime".equals(input) || "basic_time".equals(input)) { + return BASIC_TIME; + } else if ("basicTimeNoMillis".equals(input) || "basic_time_no_millis".equals(input)) { + return BASIC_TIME_NO_MILLIS; + } else if ("basicTTime".equals(input) || "basic_t_time".equals(input)) { + return BASIC_T_TIME; + } else if ("basicTTimeNoMillis".equals(input) || "basic_t_time_no_millis".equals(input)) { + return BASIC_T_TIME_NO_MILLIS; + } else if ("basicWeekDate".equals(input) || "basic_week_date".equals(input)) { + return BASIC_WEEK_DATE; + } else if ("basicWeekDateTime".equals(input) || "basic_week_date_time".equals(input)) { + return BASIC_WEEK_DATE_TIME; + } else if ("basicWeekDateTimeNoMillis".equals(input) || "basic_week_date_time_no_millis".equals(input)) { + return BASIC_WEEK_DATE_TIME_NO_MILLIS; + } else if ("date".equals(input)) { + return DATE; + } else if ("dateHour".equals(input) || "date_hour".equals(input)) { + return DATE_HOUR; + } else if ("dateHourMinute".equals(input) || "date_hour_minute".equals(input)) { + return DATE_HOUR_MINUTE; + } else if ("dateHourMinuteSecond".equals(input) || "date_hour_minute_second".equals(input)) { + return DATE_HOUR_MINUTE_SECOND; + } else if ("dateHourMinuteSecondFraction".equals(input) || "date_hour_minute_second_fraction".equals(input)) { + return DATE_HOUR_MINUTE_SECOND_FRACTION; + } else if ("dateHourMinuteSecondMillis".equals(input) || "date_hour_minute_second_millis".equals(input)) { + return DATE_HOUR_MINUTE_SECOND_MILLIS; + } else if ("dateOptionalTime".equals(input) || "date_optional_time".equals(input)) { + return DATE_OPTIONAL_TIME; + } else if ("dateTime".equals(input) || "date_time".equals(input)) { + return DATE_TIME; + } else if ("dateTimeNoMillis".equals(input) || "date_time_no_millis".equals(input)) { + return DATE_TIME_NO_MILLIS; + } else if ("hour".equals(input)) { + return HOUR; + } else if ("hourMinute".equals(input) || "hour_minute".equals(input)) { + return HOUR_MINUTE; + } else if ("hourMinuteSecond".equals(input) || "hour_minute_second".equals(input)) { + return HOUR_MINUTE_SECOND; + } else if ("hourMinuteSecondFraction".equals(input) || "hour_minute_second_fraction".equals(input)) { + return HOUR_MINUTE_SECOND_MILLIS; + } else if ("hourMinuteSecondMillis".equals(input) || "hour_minute_second_millis".equals(input)) { + return HOUR_MINUTE_SECOND_MILLIS; + } else if ("ordinalDate".equals(input) || "ordinal_date".equals(input)) { + return ORDINAL_DATE; + } else if ("ordinalDateTime".equals(input) || "ordinal_date_time".equals(input)) { + return ORDINAL_DATE_TIME; + } else if ("ordinalDateTimeNoMillis".equals(input) || "ordinal_date_time_no_millis".equals(input)) { + return ORDINAL_DATE_TIME_NO_MILLIS; + } else if ("time".equals(input)) { + return TIME; + } else if ("timeNoMillis".equals(input) || "time_no_millis".equals(input)) { + return TIME_NO_MILLIS; + } else if ("tTime".equals(input) || "t_time".equals(input)) { + return T_TIME; + } else if ("tTimeNoMillis".equals(input) || "t_time_no_millis".equals(input)) { + return T_TIME_NO_MILLIS; + } else if ("weekDate".equals(input) || "week_date".equals(input)) { + return WEEK_DATE; + } else if ("weekDateTime".equals(input) || "week_date_time".equals(input)) { + return WEEK_DATE_TIME; + } else if ("weekDateTimeNoMillis".equals(input) || "week_date_time_no_millis".equals(input)) { + return WEEK_DATE_TIME_NO_MILLIS; + } else if ("weekyear".equals(input) || "week_year".equals(input)) { + return WEEK_YEAR; + } else if ("weekyearWeek".equals(input) || "weekyear_week".equals(input)) { + return WEEKYEAR_WEEK; + } else if ("weekyearWeekDay".equals(input) || "weekyear_week_day".equals(input)) { + return WEEKYEAR_WEEK_DAY; + } else if ("year".equals(input)) { + return YEAR; + } else if ("yearMonth".equals(input) || "year_month".equals(input)) { + return YEAR_MONTH; + } else if ("yearMonthDay".equals(input) || "year_month_day".equals(input)) { + return YEAR_MONTH_DAY; + } else if ("epoch_second".equals(input)) { + return EPOCH_SECOND; + } else if ("epoch_millis".equals(input)) { + return EPOCH_MILLIS; + // strict date formats here, must be at least 4 digits for year and two for months and two for day + } else if ("strictBasicWeekDate".equals(input) || "strict_basic_week_date".equals(input)) { + return STRICT_BASIC_WEEK_DATE; + } else if ("strictBasicWeekDateTime".equals(input) || "strict_basic_week_date_time".equals(input)) { + return STRICT_BASIC_WEEK_DATE_TIME; + } else if ("strictBasicWeekDateTimeNoMillis".equals(input) || "strict_basic_week_date_time_no_millis".equals(input)) { + return STRICT_BASIC_WEEK_DATE_TIME_NO_MILLIS; + } else if ("strictDate".equals(input) || "strict_date".equals(input)) { + return STRICT_DATE; + } else if ("strictDateHour".equals(input) || "strict_date_hour".equals(input)) { + return STRICT_DATE_HOUR; + } else if ("strictDateHourMinute".equals(input) || "strict_date_hour_minute".equals(input)) { + return STRICT_DATE_HOUR_MINUTE; + } else if ("strictDateHourMinuteSecond".equals(input) || "strict_date_hour_minute_second".equals(input)) { + return STRICT_DATE_HOUR_MINUTE_SECOND; + } else if ("strictDateHourMinuteSecondFraction".equals(input) || "strict_date_hour_minute_second_fraction".equals(input)) { + return STRICT_DATE_HOUR_MINUTE_SECOND_FRACTION; + } else if ("strictDateHourMinuteSecondMillis".equals(input) || "strict_date_hour_minute_second_millis".equals(input)) { + return STRICT_DATE_HOUR_MINUTE_SECOND_MILLIS; + } else if ("strictDateOptionalTime".equals(input) || "strict_date_optional_time".equals(input)) { + return STRICT_DATE_OPTIONAL_TIME; + } else if ("strictDateTime".equals(input) || "strict_date_time".equals(input)) { + return STRICT_DATE_TIME; + } else if ("strictDateTimeNoMillis".equals(input) || "strict_date_time_no_millis".equals(input)) { + return STRICT_DATE_TIME_NO_MILLIS; + } else if ("strictHour".equals(input) || "strict_hour".equals(input)) { + return STRICT_HOUR; + } else if ("strictHourMinute".equals(input) || "strict_hour_minute".equals(input)) { + return STRICT_HOUR_MINUTE; + } else if ("strictHourMinuteSecond".equals(input) || "strict_hour_minute_second".equals(input)) { + return STRICT_HOUR_MINUTE_SECOND; + } else if ("strictHourMinuteSecondFraction".equals(input) || "strict_hour_minute_second_fraction".equals(input)) { + return STRICT_HOUR_MINUTE_SECOND_FRACTION; + } else if ("strictHourMinuteSecondMillis".equals(input) || "strict_hour_minute_second_millis".equals(input)) { + return STRICT_HOUR_MINUTE_SECOND_MILLIS; + } else if ("strictOrdinalDate".equals(input) || "strict_ordinal_date".equals(input)) { + return ISO_ORDINAL_DATE; + } else if ("strictOrdinalDateTime".equals(input) || "strict_ordinal_date_time".equals(input)) { + return STRICT_ORDINAL_DATE_TIME; + } else if ("strictOrdinalDateTimeNoMillis".equals(input) || "strict_ordinal_date_time_no_millis".equals(input)) { + return STRICT_ORDINAL_DATE_TIME_NO_MILLIS; + } else if ("strictTime".equals(input) || "strict_time".equals(input)) { + return STRICT_TIME; + } else if ("strictTimeNoMillis".equals(input) || "strict_time_no_millis".equals(input)) { + return STRICT_TIME_NO_MILLIS; + } else if ("strictTTime".equals(input) || "strict_t_time".equals(input)) { + return STRICT_T_TIME; + } else if ("strictTTimeNoMillis".equals(input) || "strict_t_time_no_millis".equals(input)) { + return STRICT_T_TIME_NO_MILLIS; + } else if ("strictWeekDate".equals(input) || "strict_week_date".equals(input)) { + return STRICT_WEEK_DATE; + } else if ("strictWeekDateTime".equals(input) || "strict_week_date_time".equals(input)) { + return STRICT_WEEK_DATE_TIME; + } else if ("strictWeekDateTimeNoMillis".equals(input) || "strict_week_date_time_no_millis".equals(input)) { + return STRICT_WEEK_DATE_TIME_NO_MILLIS; + } else if ("strictWeekyear".equals(input) || "strict_weekyear".equals(input)) { + return STRICT_WEEKYEAR; + } else if ("strictWeekyearWeek".equals(input) || "strict_weekyear_week".equals(input)) { + return STRICT_WEEKYEAR_WEEK; + } else if ("strictWeekyearWeekDay".equals(input) || "strict_weekyear_week_day".equals(input)) { + return STRICT_WEEKYEAR_WEEK_DAY; + } else if ("strictYear".equals(input) || "strict_year".equals(input)) { + return STRICT_YEAR; + } else if ("strictYearMonth".equals(input) || "strict_year_month".equals(input)) { + return STRICT_YEAR_MONTH; + } else if ("strictYearMonthDay".equals(input) || "strict_year_month_day".equals(input)) { + return STRICT_YEAR_MONTH_DAY; + } else if (Strings.hasLength(input) && input.contains("||")) { + String[] formats = Strings.delimitedListToStringArray(input, "||"); + if (formats.length == 1) { + return forPattern(formats[0], locale); + } else { + Collection parsers = new LinkedHashSet<>(formats.length); + for (String format : formats) { + CompoundDateTimeFormatter dateTimeFormatter = forPattern(format, locale); + try { + parsers.addAll(Arrays.asList(dateTimeFormatter.parsers)); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e); + } + } + + return new CompoundDateTimeFormatter(parsers.toArray(new DateTimeFormatter[0])); + } + } else { + try { + return new CompoundDateTimeFormatter(new DateTimeFormatterBuilder().appendPattern(input).toFormatter(locale)); + } catch (IllegalArgumentException e) { + throw new IllegalArgumentException("Invalid format: [" + input + "]: " + e.getMessage(), e); + } + } + } + + private static final ZonedDateTime EPOCH_ZONED_DATE_TIME = Instant.EPOCH.atZone(ZoneOffset.UTC); + + public static ZonedDateTime toZonedDateTime(TemporalAccessor accessor) { + return toZonedDateTime(accessor, EPOCH_ZONED_DATE_TIME); + } + + public static ZonedDateTime toZonedDateTime(TemporalAccessor accessor, ZonedDateTime defaults) { + try { + return ZonedDateTime.from(accessor); + } catch (DateTimeException e ) { + } + + ZonedDateTime result = defaults; + + // special case epoch seconds + if (accessor.isSupported(ChronoField.INSTANT_SECONDS)) { + result = result.with(ChronoField.INSTANT_SECONDS, accessor.getLong(ChronoField.INSTANT_SECONDS)); + if (accessor.isSupported(ChronoField.NANO_OF_SECOND)) { + result = result.with(ChronoField.NANO_OF_SECOND, accessor.getLong(ChronoField.NANO_OF_SECOND)); + } + return result; + } + + // try to set current year + if (accessor.isSupported(ChronoField.YEAR)) { + result = result.with(ChronoField.YEAR, accessor.getLong(ChronoField.YEAR)); + } else if (accessor.isSupported(ChronoField.YEAR_OF_ERA)) { + result = result.with(ChronoField.YEAR_OF_ERA, accessor.getLong(ChronoField.YEAR_OF_ERA)); + } else if (accessor.isSupported(WeekFields.ISO.weekBasedYear())) { + if (accessor.isSupported(WeekFields.ISO.weekOfWeekBasedYear())) { + return LocalDate.from(result) + .with(WeekFields.ISO.weekBasedYear(), accessor.getLong(WeekFields.ISO.weekBasedYear())) + .withDayOfMonth(1) // makes this compatible with joda + .with(WeekFields.ISO.weekOfWeekBasedYear(), accessor.getLong(WeekFields.ISO.weekOfWeekBasedYear())) + .atStartOfDay(ZoneOffset.UTC); + } else { + return LocalDate.from(result) + .with(WeekFields.ISO.weekBasedYear(), accessor.getLong(WeekFields.ISO.weekBasedYear())) + // this exists solely to be BWC compatible with joda +// .with(TemporalAdjusters.nextOrSame(DayOfWeek.MONDAY)) + .with(TemporalAdjusters.firstInMonth(DayOfWeek.MONDAY)) + .atStartOfDay(defaults.getZone()); +// return result.withHour(0).withMinute(0).withSecond(0) +// .with(WeekFields.ISO.weekBasedYear(), 0) +// .with(WeekFields.ISO.weekBasedYear(), accessor.getLong(WeekFields.ISO.weekBasedYear())); +// return ((ZonedDateTime) tmp).with(WeekFields.ISO.weekOfWeekBasedYear(), 1); + } + } else if (accessor.isSupported(IsoFields.WEEK_BASED_YEAR)) { + // special case weekbased year + result = result.with(IsoFields.WEEK_BASED_YEAR, accessor.getLong(IsoFields.WEEK_BASED_YEAR)); + if (accessor.isSupported(IsoFields.WEEK_OF_WEEK_BASED_YEAR)) { + result = result.with(IsoFields.WEEK_OF_WEEK_BASED_YEAR, accessor.getLong(IsoFields.WEEK_OF_WEEK_BASED_YEAR)); + } + return result; + } + + // month + if (accessor.isSupported(ChronoField.MONTH_OF_YEAR)) { + result = result.with(ChronoField.MONTH_OF_YEAR, accessor.getLong(ChronoField.MONTH_OF_YEAR)); + } + + // day of month + if (accessor.isSupported(ChronoField.DAY_OF_MONTH)) { + result = result.with(ChronoField.DAY_OF_MONTH, accessor.getLong(ChronoField.DAY_OF_MONTH)); + } + + // hour + if (accessor.isSupported(ChronoField.HOUR_OF_DAY)) { + result = result.with(ChronoField.HOUR_OF_DAY, accessor.getLong(ChronoField.HOUR_OF_DAY)); + } + + // minute + if (accessor.isSupported(ChronoField.MINUTE_OF_HOUR)) { + result = result.with(ChronoField.MINUTE_OF_HOUR, accessor.getLong(ChronoField.MINUTE_OF_HOUR)); + } + + // second + if (accessor.isSupported(ChronoField.SECOND_OF_MINUTE)) { + result = result.with(ChronoField.SECOND_OF_MINUTE, accessor.getLong(ChronoField.SECOND_OF_MINUTE)); + } + + if (accessor.isSupported(ChronoField.OFFSET_SECONDS)) { + result = result.withZoneSameLocal(ZoneOffset.ofTotalSeconds(accessor.get(ChronoField.OFFSET_SECONDS))); + } + + // millis + if (accessor.isSupported(ChronoField.MILLI_OF_SECOND)) { + result = result.with(ChronoField.MILLI_OF_SECOND, accessor.getLong(ChronoField.MILLI_OF_SECOND)); + } + + if (accessor.isSupported(ChronoField.NANO_OF_SECOND)) { + result = result.with(ChronoField.NANO_OF_SECOND, accessor.getLong(ChronoField.NANO_OF_SECOND)); + } + + return result; + } +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java new file mode 100644 index 0000000000000..bcc249f8a8a51 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESSolrSynonymParser.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.logging.log4j.Logger; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.synonym.SolrSynonymParser; +import org.apache.lucene.util.CharsRef; +import org.apache.lucene.util.CharsRefBuilder; +import org.elasticsearch.common.logging.Loggers; + +import java.io.IOException; + +public class ESSolrSynonymParser extends SolrSynonymParser { + + private final boolean lenient; + private static final Logger logger = + Loggers.getLogger(ESSolrSynonymParser.class, "ESSolrSynonymParser"); + + public ESSolrSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { + super(dedup, expand, analyzer); + this.lenient = lenient; + } + + @Override + public void add(CharsRef input, CharsRef output, boolean includeOrig) { + // This condition follows up on the overridden analyze method. In case lenient was set to true and there was an + // exception during super.analyze we return a zero-length CharsRef for that word which caused an exception. When + // the synonym mappings for the words are added using the add method we skip the ones that were left empty by + // analyze i.e., in the case when lenient is set we only add those combinations which are non-zero-length. The + // else would happen only in the case when the input or output is empty and lenient is set, in which case we + // quietly ignore it. For more details on the control-flow see SolrSynonymParser::addInternal. + if (lenient == false || (input.length > 0 && output.length > 0)) { + super.add(input, output, includeOrig); + } + } + + @Override + public CharsRef analyze(String text, CharsRefBuilder reuse) throws IOException { + try { + return super.analyze(text, reuse); + } catch (IllegalArgumentException ex) { + if (lenient) { + logger.info("Synonym rule for [" + text + "] was ignored"); + return new CharsRef(""); + } else { + throw ex; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java new file mode 100644 index 0000000000000..3764820c4343d --- /dev/null +++ b/server/src/main/java/org/elasticsearch/index/analysis/ESWordnetSynonymParser.java @@ -0,0 +1,68 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.logging.log4j.Logger; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.analysis.synonym.WordnetSynonymParser; +import org.apache.lucene.util.CharsRef; +import org.apache.lucene.util.CharsRefBuilder; +import org.elasticsearch.common.logging.Loggers; + +import java.io.IOException; + +public class ESWordnetSynonymParser extends WordnetSynonymParser { + + private final boolean lenient; + private static final Logger logger = + Loggers.getLogger(ESSolrSynonymParser.class, "ESWordnetSynonymParser"); + + public ESWordnetSynonymParser(boolean dedup, boolean expand, boolean lenient, Analyzer analyzer) { + super(dedup, expand, analyzer); + this.lenient = lenient; + } + + @Override + public void add(CharsRef input, CharsRef output, boolean includeOrig) { + // This condition follows up on the overridden analyze method. In case lenient was set to true and there was an + // exception during super.analyze we return a zero-length CharsRef for that word which caused an exception. When + // the synonym mappings for the words are added using the add method we skip the ones that were left empty by + // analyze i.e., in the case when lenient is set we only add those combinations which are non-zero-length. The + // else would happen only in the case when the input or output is empty and lenient is set, in which case we + // quietly ignore it. For more details on the control-flow see SolrSynonymParser::addInternal. + if (lenient == false || (input.length > 0 && output.length > 0)) { + super.add(input, output, includeOrig); + } + } + + @Override + public CharsRef analyze(String text, CharsRefBuilder reuse) throws IOException { + try { + return super.analyze(text, reuse); + } catch (IllegalArgumentException ex) { + if (lenient) { + logger.info("Synonym rule for [" + text + "] was ignored"); + return new CharsRef(""); + } else { + throw ex; + } + } + } +} diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java index 2f7964f63d632..24dcb6d33fe84 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/SynonymGraphTokenFilterFactory.java @@ -21,10 +21,8 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.synonym.SolrSynonymParser; import org.apache.lucene.analysis.synonym.SynonymGraphFilter; import org.apache.lucene.analysis.synonym.SynonymMap; -import org.apache.lucene.analysis.synonym.WordnetSynonymParser; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -58,11 +56,11 @@ public Factory(String name, final Analyzer analyzerForParseSynonym, Reader rules try { SynonymMap.Builder parser; if ("wordnet".equalsIgnoreCase(format)) { - parser = new WordnetSynonymParser(true, expand, analyzerForParseSynonym); - ((WordnetSynonymParser) parser).parse(rulesReader); + parser = new ESWordnetSynonymParser(true, expand, lenient, analyzerForParseSynonym); + ((ESWordnetSynonymParser) parser).parse(rulesReader); } else { - parser = new SolrSynonymParser(true, expand, analyzerForParseSynonym); - ((SolrSynonymParser) parser).parse(rulesReader); + parser = new ESSolrSynonymParser(true, expand, lenient, analyzerForParseSynonym); + ((ESSolrSynonymParser) parser).parse(rulesReader); } synonymMap = parser.build(); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java index 56bae57198829..61c9aba7a3eaf 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java @@ -21,10 +21,8 @@ import org.apache.lucene.analysis.Analyzer; import org.apache.lucene.analysis.TokenStream; -import org.apache.lucene.analysis.synonym.SolrSynonymParser; import org.apache.lucene.analysis.synonym.SynonymFilter; import org.apache.lucene.analysis.synonym.SynonymMap; -import org.apache.lucene.analysis.synonym.WordnetSynonymParser; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; import org.elasticsearch.index.IndexSettings; @@ -38,6 +36,7 @@ public class SynonymTokenFilterFactory extends AbstractTokenFilterFactory { protected final String format; protected final boolean expand; + protected final boolean lenient; protected final Settings settings; public SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, AnalysisRegistry analysisRegistry, @@ -52,6 +51,7 @@ public SynonymTokenFilterFactory(IndexSettings indexSettings, Environment env, A } this.expand = settings.getAsBoolean("expand", true); + this.lenient = settings.getAsBoolean("lenient", false); this.format = settings.get("format", ""); } @@ -93,11 +93,11 @@ public Factory(String name, Analyzer analyzerForParseSynonym, Reader rulesReader try { SynonymMap.Builder parser; if ("wordnet".equalsIgnoreCase(format)) { - parser = new WordnetSynonymParser(true, expand, analyzerForParseSynonym); - ((WordnetSynonymParser) parser).parse(rulesReader); + parser = new ESWordnetSynonymParser(true, expand, lenient, analyzerForParseSynonym); + ((ESWordnetSynonymParser) parser).parse(rulesReader); } else { - parser = new SolrSynonymParser(true, expand, analyzerForParseSynonym); - ((SolrSynonymParser) parser).parse(rulesReader); + parser = new ESSolrSynonymParser(true, expand, lenient, analyzerForParseSynonym); + ((ESSolrSynonymParser) parser).parse(rulesReader); } synonymMap = parser.build(); } catch (Exception e) { diff --git a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java index b99084de8de46..6d888bd63e3ca 100644 --- a/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java +++ b/server/src/main/java/org/elasticsearch/index/fielddata/ScriptDocValues.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.geo.GeoUtils; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.ESLoggerFactory; +import org.elasticsearch.script.ScriptModule; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; import org.joda.time.MutableDateTime; @@ -125,6 +126,10 @@ protected void resize(int newSize) { public long getValue() { if (count == 0) { + if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return 0L; } return values[0]; @@ -167,6 +172,10 @@ public Dates(SortedNumericDocValues in) { */ public ReadableDateTime getValue() { if (count == 0) { + if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return EPOCH; } return get(0); @@ -268,6 +277,10 @@ public SortedNumericDoubleValues getInternalValues() { public double getValue() { if (count == 0) { + if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return 0d; } return values[0]; @@ -324,6 +337,10 @@ protected void resize(int newSize) { public GeoPoint getValue() { if (count == 0) { + if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } return null; } return values[0]; @@ -436,7 +453,14 @@ protected void resize(int newSize) { } public boolean getValue() { - return count != 0 && values[0]; + if (count == 0) { + if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } + return false; + } + return values[0]; } @Override @@ -519,7 +543,14 @@ public String get(int index) { } public String getValue() { - return count == 0 ? null : get(0); + if (count == 0) { + if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } + return null; + } + return get(0); } } @@ -540,7 +571,14 @@ public BytesRef get(int index) { } public BytesRef getValue() { - return count == 0 ? new BytesRef() : get(0); + if (count == 0) { + if (ScriptModule.EXCEPTION_FOR_MISSING_VALUE) { + throw new IllegalStateException("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!"); + } + return new BytesRef(); + } + return get(0); } } diff --git a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java index 3b6415437f97c..10a3e81163ab6 100644 --- a/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java +++ b/server/src/main/java/org/elasticsearch/monitor/jvm/HotThreads.java @@ -21,13 +21,15 @@ import org.apache.lucene.util.CollectionUtil; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.common.joda.FormatDateTimeFormatter; -import org.elasticsearch.common.joda.Joda; +import org.elasticsearch.common.time.CompoundDateTimeFormatter; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.TimeValue; import java.lang.management.ManagementFactory; import java.lang.management.ThreadInfo; import java.lang.management.ThreadMXBean; +import java.time.Clock; +import java.time.LocalDateTime; import java.util.ArrayList; import java.util.Comparator; import java.util.HashMap; @@ -41,7 +43,7 @@ public class HotThreads { private static final Object mutex = new Object(); - private static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("dateOptionalTime"); + private static final CompoundDateTimeFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("dateOptionalTime"); private int busiestThreads = 3; private TimeValue interval = new TimeValue(500, TimeUnit.MILLISECONDS); @@ -136,7 +138,7 @@ private String innerDetect() throws Exception { StringBuilder sb = new StringBuilder(); sb.append("Hot threads at "); - sb.append(DATE_TIME_FORMATTER.printer().print(System.currentTimeMillis())); + sb.append(DATE_TIME_FORMATTER.format(LocalDateTime.now(Clock.systemUTC()))); sb.append(", interval="); sb.append(interval); sb.append(", busiestThreads="); diff --git a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java index 73e9bad45a57b..6091994db96b7 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/admin/indices/RestIndexPutAliasAction.java @@ -66,6 +66,7 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC String routing = null; String indexRouting = null; String searchRouting = null; + Boolean writeIndex = null; if (request.hasContent()) { try (XContentParser parser = request.contentParser()) { @@ -90,6 +91,8 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC } else if ("searchRouting".equals(currentFieldName) || "search-routing".equals(currentFieldName) || "search_routing".equals(currentFieldName)) { searchRouting = parser.textOrNull(); + } else if ("is_write_index".equals(currentFieldName)) { + writeIndex = parser.booleanValue(); } } else if (token == XContentParser.Token.START_OBJECT) { if ("filter".equals(currentFieldName)) { @@ -117,6 +120,9 @@ public RestChannelConsumer prepareRequest(final RestRequest request, final NodeC if (filter != null) { aliasAction.filter(filter); } + if (writeIndex != null) { + aliasAction.writeIndex(writeIndex); + } indicesAliasesRequest.addAliasAction(aliasAction); return channel -> client.admin().indices().aliases(indicesAliasesRequest, new RestToXContentListener<>(channel)); } diff --git a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java index 52da10a378576..3a76c7ca0c952 100644 --- a/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java +++ b/server/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java @@ -45,9 +45,10 @@ import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestActionListener; import org.elasticsearch.rest.action.RestResponseListener; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -379,7 +380,7 @@ Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse res table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getDeleted()); table.addCell(indexMetaData.getCreationDate()); - table.addCell(new DateTime(indexMetaData.getCreationDate(), DateTimeZone.UTC)); + table.addCell(ZonedDateTime.ofInstant(Instant.ofEpochMilli(indexMetaData.getCreationDate()), ZoneOffset.UTC)); table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size()); table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size()); diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index f0e075eac7d93..042953117c5a5 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -31,6 +31,9 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.plugins.ScriptPlugin; import org.elasticsearch.search.aggregations.pipeline.movfn.MovingFunctionScript; +import org.elasticsearch.common.Booleans; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.Loggers; /** * Manages building {@link ScriptService}. @@ -61,6 +64,11 @@ public class ScriptModule { ).collect(Collectors.toMap(c -> c.name, Function.identity())); } + public static final boolean EXCEPTION_FOR_MISSING_VALUE = + Booleans.parseBoolean(System.getProperty("es.scripting.exception_for_missing_value", "false")); + + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(ScriptModule.class)); + private final ScriptService scriptService; public ScriptModule(Settings settings, List scriptPlugins) { @@ -84,6 +92,10 @@ public ScriptModule(Settings settings, List scriptPlugins) { } } } + if (EXCEPTION_FOR_MISSING_VALUE == false) + DEPRECATION_LOGGER.deprecated("Script: returning default values for missing document values is deprecated. " + + "Set system property '-Des.scripting.exception_for_missing_value=true' " + + "to make behaviour compatible with future major versions."); scriptService = new ScriptService(settings, Collections.unmodifiableMap(engines), Collections.unmodifiableMap(contexts)); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java index c38ce27cd8ad2..020189d461935 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovingFunctions.java @@ -85,7 +85,7 @@ public static double unweightedAvg(double[] values) { * The average is based on the count of non-null, non-NaN values. */ public static double stdDev(double[] values, double avg) { - if (avg == Double.NaN) { + if (Double.isNaN(avg)) { return Double.NaN; } else { long count = 0; diff --git a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java index 961d7fd9f59a7..3c0f0e80cebdb 100644 --- a/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java +++ b/server/src/main/java/org/elasticsearch/search/suggest/completion/context/ContextMappings.java @@ -25,8 +25,6 @@ import org.apache.lucene.util.CharsRefBuilder; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.Version; -import org.elasticsearch.common.logging.DeprecationLogger; -import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.index.mapper.CompletionFieldMapper; @@ -54,9 +52,6 @@ */ public class ContextMappings implements ToXContent { - private static final DeprecationLogger DEPRECATION_LOGGER = - new DeprecationLogger(Loggers.getLogger(ContextMappings.class)); - private final List> contextMappings; private final Map> contextNameMap; @@ -124,7 +119,7 @@ private class TypedContextField extends ContextSuggestField { private final ParseContext.Document document; TypedContextField(String name, String value, int weight, Map> contexts, - ParseContext.Document document) { + ParseContext.Document document) { super(name, value, weight); this.contexts = contexts; this.document = document; @@ -150,8 +145,7 @@ protected Iterable contexts() { } } if (typedContexts.isEmpty()) { - DEPRECATION_LOGGER.deprecated("The ability to index a suggestion with no context on a context enabled completion field" + - " is deprecated and will be removed in the next major release."); + throw new IllegalArgumentException("Contexts are mandatory in context enabled completion field [" + name + "]"); } return typedContexts; } @@ -186,8 +180,7 @@ public ContextQuery toContextQuery(CompletionQuery query, Map, ToXContent, public static final String CONTEXT_MODE_PARAM = "context_mode"; public static final String CONTEXT_MODE_SNAPSHOT = "SNAPSHOT"; - private static final FormatDateTimeFormatter DATE_TIME_FORMATTER = Joda.forPattern("strictDateOptionalTime"); + private static final CompoundDateTimeFormatter DATE_TIME_FORMATTER = DateFormatters.forPattern("strictDateOptionalTime"); private static final String SNAPSHOT = "snapshot"; private static final String UUID = "uuid"; private static final String INDICES = "indices"; @@ -530,11 +532,11 @@ public XContentBuilder toXContent(final XContentBuilder builder, final Params pa builder.field(REASON, reason); } if (verbose || startTime != 0) { - builder.field(START_TIME, DATE_TIME_FORMATTER.printer().print(startTime)); + builder.field(START_TIME, DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(startTime).atZone(ZoneOffset.UTC))); builder.field(START_TIME_IN_MILLIS, startTime); } if (verbose || endTime != 0) { - builder.field(END_TIME, DATE_TIME_FORMATTER.printer().print(endTime)); + builder.field(END_TIME, DATE_TIME_FORMATTER.format(Instant.ofEpochMilli(endTime).atZone(ZoneOffset.UTC))); builder.field(END_TIME_IN_MILLIS, endTime); builder.humanReadableField(DURATION_IN_MILLIS, DURATION, new TimeValue(endTime - startTime)); } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java new file mode 100644 index 0000000000000..490319ef84074 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexShardStatusTests.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.status; + +import java.io.IOException; +import java.util.function.Predicate; + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.test.AbstractXContentTestCase; + +public class SnapshotIndexShardStatusTests extends AbstractXContentTestCase { + + @Override + protected SnapshotIndexShardStatus createTestInstance() { + return createForIndex(randomAlphaOfLength(10)); + } + + protected SnapshotIndexShardStatus createForIndex(String indexName) { + ShardId shardId = new ShardId(new Index(indexName, IndexMetaData.INDEX_UUID_NA_VALUE), randomIntBetween(0, 500)); + SnapshotIndexShardStage stage = randomFrom(SnapshotIndexShardStage.values()); + SnapshotStats stats = new SnapshotStatsTests().createTestInstance(); + String nodeId = randomAlphaOfLength(20); + String failure = null; + if (rarely()) { + failure = randomAlphaOfLength(200); + } + return new SnapshotIndexShardStatus(shardId, stage, stats, nodeId, failure); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // Do not place random fields in the root object since its fields correspond to shard names. + return String::isEmpty; + } + + @Override + protected SnapshotIndexShardStatus doParseInstance(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation); + SnapshotIndexShardStatus status = SnapshotIndexShardStatus.fromXContent(parser, parser.currentName()); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation); + return status; + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java new file mode 100644 index 0000000000000..92eb355f3a621 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotIndexStatusTests.java @@ -0,0 +1,64 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.status; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentParserUtils; +import org.elasticsearch.test.AbstractXContentTestCase; + + +public class SnapshotIndexStatusTests extends AbstractXContentTestCase { + + @Override + protected SnapshotIndexStatus createTestInstance() { + String index = randomAlphaOfLength(10); + List shardStatuses = new ArrayList<>(); + SnapshotIndexShardStatusTests builder = new SnapshotIndexShardStatusTests(); + for (int idx = 0; idx < randomIntBetween(0, 10); idx++) { + shardStatuses.add(builder.createForIndex(index)); + } + return new SnapshotIndexStatus(index, shardStatuses); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // Do not place random fields in the root object or the shards field since their fields correspond to names. + return (s) -> s.isEmpty() || s.endsWith("shards"); + } + + @Override + protected SnapshotIndexStatus doParseInstance(XContentParser parser) throws IOException { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser::getTokenLocation); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.FIELD_NAME, parser.nextToken(), parser::getTokenLocation); + SnapshotIndexStatus status = SnapshotIndexStatus.fromXContent(parser); + XContentParserUtils.ensureExpectedToken(XContentParser.Token.END_OBJECT, parser.nextToken(), parser::getTokenLocation); + return status; + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java new file mode 100644 index 0000000000000..ac00896983d14 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotShardsStatsTests.java @@ -0,0 +1,49 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.status; + +import java.io.IOException; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +public class SnapshotShardsStatsTests extends AbstractXContentTestCase { + + @Override + protected SnapshotShardsStats createTestInstance() { + int initializingShards = randomInt(); + int startedShards = randomInt(); + int finalizingShards = randomInt(); + int doneShards = randomInt(); + int failedShards = randomInt(); + int totalShards = randomInt(); + return new SnapshotShardsStats(initializingShards, startedShards, finalizingShards, doneShards, failedShards, totalShards); + } + + @Override + protected SnapshotShardsStats doParseInstance(XContentParser parser) throws IOException { + return SnapshotShardsStats.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java new file mode 100644 index 0000000000000..2822a9661fd15 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatsTests.java @@ -0,0 +1,52 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.status; + +import java.io.IOException; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +public class SnapshotStatsTests extends AbstractXContentTestCase { + + @Override + protected SnapshotStats createTestInstance() { + long startTime = randomNonNegativeLong(); + long time = randomNonNegativeLong(); + int incrementalFileCount = randomIntBetween(0, Integer.MAX_VALUE); + int totalFileCount = randomIntBetween(0, Integer.MAX_VALUE); + int processedFileCount = randomIntBetween(0, Integer.MAX_VALUE); + long incrementalSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2; + long totalSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2; + long processedSize = ((long)randomIntBetween(0, Integer.MAX_VALUE)) * 2; + return new SnapshotStats(startTime, time, incrementalFileCount, totalFileCount, + processedFileCount, incrementalSize, totalSize, processedSize); + } + + @Override + protected SnapshotStats doParseInstance(XContentParser parser) throws IOException { + return SnapshotStats.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java index 3ece0f9f1072f..dbd45640c7b69 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotStatusTests.java @@ -21,16 +21,19 @@ import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.common.UUIDs; +import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.snapshots.Snapshot; import org.elasticsearch.snapshots.SnapshotId; -import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.test.AbstractXContentTestCase; +import java.io.IOException; import java.util.ArrayList; import java.util.List; +import java.util.function.Predicate; -public class SnapshotStatusTests extends ESTestCase { +public class SnapshotStatusTests extends AbstractXContentTestCase { public void testToString() throws Exception { @@ -146,4 +149,39 @@ public void testToString() throws Exception { "}"; assertEquals(expected, status.toString()); } + + @Override + protected SnapshotStatus createTestInstance() { + SnapshotsInProgress.State state = randomFrom(SnapshotsInProgress.State.values()); + String uuid = UUIDs.randomBase64UUID(); + SnapshotId id = new SnapshotId("test-snap", uuid); + Snapshot snapshot = new Snapshot("test-repo", id); + + SnapshotIndexShardStatusTests builder = new SnapshotIndexShardStatusTests(); + builder.createTestInstance(); + + List snapshotIndexShardStatuses = new ArrayList<>(); + for (int idx = 0; idx < randomIntBetween(0, 10); idx++) { + SnapshotIndexShardStatus snapshotIndexShardStatus = builder.createTestInstance(); + snapshotIndexShardStatuses.add(snapshotIndexShardStatus); + } + boolean includeGlobalState = randomBoolean(); + return new SnapshotStatus(snapshot, state, snapshotIndexShardStatuses, includeGlobalState); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // Do not place random fields in the indices field or shards field since their fields correspond to names. + return (s) -> s.endsWith("shards") || s.endsWith("indices"); + } + + @Override + protected SnapshotStatus doParseInstance(XContentParser parser) throws IOException { + return SnapshotStatus.fromXContent(parser); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java new file mode 100644 index 0000000000000..d1ad028296ddb --- /dev/null +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/status/SnapshotsStatusResponseTests.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.action.admin.cluster.snapshots.status; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.function.Predicate; + +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.test.AbstractXContentTestCase; + +public class SnapshotsStatusResponseTests extends AbstractXContentTestCase { + + @Override + protected SnapshotsStatusResponse doParseInstance(XContentParser parser) throws IOException { + return SnapshotsStatusResponse.fromXContent(parser); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + // Do not place random fields in the indices field or shards field since their fields correspond to names. + return (s) -> s.endsWith("shards") || s.endsWith("indices"); + } + + @Override + protected boolean supportsUnknownFields() { + return true; + } + + @Override + protected SnapshotsStatusResponse createTestInstance() { + SnapshotStatusTests statusBuilder = new SnapshotStatusTests(); + List snapshotStatuses = new ArrayList<>(); + for (int idx = 0; idx < randomIntBetween(0, 5); idx++) { + snapshotStatuses.add(statusBuilder.createTestInstance()); + } + return new SnapshotsStatusResponse(snapshotStatuses); + } +} diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java index 4d86dbbc51f33..5379769e819dc 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/rollover/RolloverIT.java @@ -25,16 +25,16 @@ import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.plugins.Plugin; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.Collection; import java.util.Collections; import java.util.List; @@ -197,8 +197,8 @@ public void testRolloverOnExistingIndex() throws Exception { } public void testRolloverWithDateMath() { - DateTime now = new DateTime(DateTimeZone.UTC); - String index = "test-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now) + "-1"; + ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); + String index = "test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-1"; String dateMathExp = ""; assertAcked(prepareCreate(dateMathExp).addAlias(new Alias("test_alias")).get()); ensureGreen(index); @@ -212,14 +212,14 @@ public void testRolloverWithDateMath() { ensureGreen(index); RolloverResponse response = client().admin().indices().prepareRolloverIndex("test_alias").get(); assertThat(response.getOldIndex(), equalTo(index)); - assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000002")); + assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002")); assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.getConditionStatus().size(), equalTo(0)); response = client().admin().indices().prepareRolloverIndex("test_alias").get(); - assertThat(response.getOldIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000002")); - assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000003")); + assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000002")); + assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003")); assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.getConditionStatus().size(), equalTo(0)); @@ -232,8 +232,8 @@ public void testRolloverWithDateMath() { IndexMetaData.SETTING_INDEX_PROVIDED_NAME)); response = client().admin().indices().prepareRolloverIndex("test_alias").setNewIndexName("").get(); - assertThat(response.getOldIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM").print(now) + "-000003")); - assertThat(response.getNewIndex(), equalTo("test-" + DateTimeFormat.forPattern("YYYY.MM.dd").print(now) + "-000004")); + assertThat(response.getOldIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM").format(now) + "-000003")); + assertThat(response.getNewIndex(), equalTo("test-" + DateFormatters.forPattern("YYYY.MM.dd").format(now) + "-000004")); assertThat(response.isDryRun(), equalTo(false)); assertThat(response.isRolledOver(), equalTo(true)); assertThat(response.getConditionStatus().size(), equalTo(0)); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java index 9be087e0e5dbc..83c615e48822a 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/HumanReadableIndexSettingsTests.java @@ -22,8 +22,10 @@ import org.elasticsearch.Version; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.ESTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; + +import java.time.Instant; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import static org.elasticsearch.test.VersionUtils.randomVersion; @@ -42,6 +44,7 @@ public void testHumanReadableSettings() { assertEquals(versionCreated.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_CREATED_STRING, null)); assertEquals(versionUpgraded.toString(), humanSettings.get(IndexMetaData.SETTING_VERSION_UPGRADED_STRING, null)); - assertEquals(new DateTime(created, DateTimeZone.UTC).toString(), humanSettings.get(IndexMetaData.SETTING_CREATION_DATE_STRING, null)); + ZonedDateTime creationDate = ZonedDateTime.ofInstant(Instant.ofEpochMilli(created), ZoneOffset.UTC); + assertEquals(creationDate.toString(), humanSettings.get(IndexMetaData.SETTING_CREATION_DATE_STRING, null)); } } diff --git a/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java new file mode 100644 index 0000000000000..d6f733d7c1cd4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/common/joda/JavaJodaTimeDuellingTests.java @@ -0,0 +1,492 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.common.joda; + +import org.elasticsearch.common.time.CompoundDateTimeFormatter; +import org.elasticsearch.common.time.DateFormatters; +import org.elasticsearch.test.ESTestCase; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.time.format.DateTimeParseException; +import java.time.temporal.TemporalAccessor; +import java.util.Locale; + +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.startsWith; + +public class JavaJodaTimeDuellingTests extends ESTestCase { + + public void testTimeZoneFormatting() { + assertSameDate("2001-01-01T00:00:00Z", "date_time_no_millis"); + // the following fail under java 8 but work under java 10, needs investigation + assertSameDate("2001-01-01T00:00:00-0800", "date_time_no_millis"); + assertSameDate("2001-01-01T00:00:00+1030", "date_time_no_millis"); + assertSameDate("2001-01-01T00:00:00-08", "date_time_no_millis"); + assertSameDate("2001-01-01T00:00:00+10:30", "date_time_no_millis"); + + // different timezone parsing styles require a different number of letters + DateTimeFormatter formatter = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss.SSSXXX", Locale.ROOT); + formatter.parse("20181126T121212.123Z"); + formatter.parse("20181126T121212.123-08:30"); + + DateTimeFormatter formatter2 = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss.SSSXXXX", Locale.ROOT); + formatter2.parse("20181126T121212.123+1030"); + formatter2.parse("20181126T121212.123-0830"); + + // ... and can be combined, note that this is not an XOR, so one could append both timezones with this example + DateTimeFormatter formatter3 = DateTimeFormatter.ofPattern("yyyyMMdd'T'HHmmss.SSS[XXXX][XXX]", Locale.ROOT); + formatter3.parse("20181126T121212.123Z"); + formatter3.parse("20181126T121212.123-08:30"); + formatter3.parse("20181126T121212.123+1030"); + formatter3.parse("20181126T121212.123-0830"); + } + + public void testCustomTimeFormats() { + assertSameDate("2010 12 06 11:05:15", "yyyy dd MM HH:mm:ss"); + assertSameDate("12/06", "dd/MM"); + assertSameDate("Nov 24 01:29:01 -0800", "MMM dd HH:mm:ss Z"); + } + + public void testDuellingFormatsValidParsing() { + assertSameDate("1522332219", "epoch_second"); + assertSameDate("1522332219321", "epoch_millis"); + + assertSameDate("20181126", "basic_date"); + assertSameDate("20181126T121212.123Z", "basic_date_time"); + assertSameDate("20181126T121212.123+10:00", "basic_date_time"); + assertSameDate("20181126T121212.123-0800", "basic_date_time"); + + assertSameDate("20181126T121212Z", "basic_date_time_no_millis"); + assertSameDate("2018363", "basic_ordinal_date"); + assertSameDate("2018363T121212.123Z", "basic_ordinal_date_time"); + assertSameDate("2018363T121212Z", "basic_ordinal_date_time_no_millis"); + assertSameDate("121212.123Z", "basic_time"); + assertSameDate("121212Z", "basic_time_no_millis"); + assertSameDate("T121212.123Z", "basic_t_time"); + assertSameDate("T121212Z", "basic_t_time_no_millis"); + assertSameDate("2018W313", "basic_week_date"); + assertSameDate("1W313", "basic_week_date"); + assertSameDate("18W313", "basic_week_date"); + assertSameDate("2018W313T121212.123Z", "basic_week_date_time"); + assertSameDate("2018W313T121212Z", "basic_week_date_time_no_millis"); + + assertSameDate("2018-12-31", "date"); + assertSameDate("18-5-6", "date"); + + assertSameDate("2018-12-31T12", "date_hour"); + assertSameDate("2018-12-31T8", "date_hour"); + + assertSameDate("2018-12-31T12:12", "date_hour_minute"); + assertSameDate("2018-12-31T8:3", "date_hour_minute"); + + assertSameDate("2018-12-31T12:12:12", "date_hour_minute_second"); + assertSameDate("2018-12-31T12:12:1", "date_hour_minute_second"); + + assertSameDate("2018-12-31T12:12:12.123", "date_hour_minute_second_fraction"); + assertSameDate("2018-12-31T12:12:12.123", "date_hour_minute_second_millis"); + assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_millis"); + assertSameDate("2018-12-31T12:12:12.1", "date_hour_minute_second_fraction"); + + assertSameDate("2018-12-31", "date_optional_time"); + assertSameDate("2018-12-1", "date_optional_time"); + assertSameDate("2018-12-31T10:15:30", "date_optional_time"); + assertSameDate("2018-12-31T10:15:3", "date_optional_time"); + assertSameDate("2018-12-31T10:5:30", "date_optional_time"); + assertSameDate("2018-12-31T1:15:30", "date_optional_time"); + + assertSameDate("2018-12-31T10:15:30.123Z", "date_time"); + assertSameDate("2018-12-31T10:15:30.11Z", "date_time"); + assertSameDate("2018-12-31T10:15:3.123Z", "date_time"); + + assertSameDate("2018-12-31T10:15:30Z", "date_time_no_millis"); + assertSameDate("2018-12-31T10:5:30Z", "date_time_no_millis"); + assertSameDate("2018-12-31T10:15:3Z", "date_time_no_millis"); + assertSameDate("2018-12-31T1:15:30Z", "date_time_no_millis"); + + assertSameDate("12", "hour"); + assertSameDate("01", "hour"); + assertSameDate("1", "hour"); + + assertSameDate("12:12", "hour_minute"); + assertSameDate("12:01", "hour_minute"); + assertSameDate("12:1", "hour_minute"); + + assertSameDate("12:12:12", "hour_minute_second"); + assertSameDate("12:12:01", "hour_minute_second"); + assertSameDate("12:12:1", "hour_minute_second"); + + assertSameDate("12:12:12.123", "hour_minute_second_fraction"); + assertSameDate("12:12:12.1", "hour_minute_second_fraction"); + assertParseException("12:12:12", "hour_minute_second_fraction"); + assertSameDate("12:12:12.123", "hour_minute_second_millis"); + assertSameDate("12:12:12.1", "hour_minute_second_millis"); + assertParseException("12:12:12", "hour_minute_second_millis"); + + assertSameDate("2018-128", "ordinal_date"); + assertSameDate("2018-1", "ordinal_date"); + + assertSameDate("2018-128T10:15:30.123Z", "ordinal_date_time"); + assertSameDate("2018-1T10:15:30.123Z", "ordinal_date_time"); + + assertSameDate("2018-128T10:15:30Z", "ordinal_date_time_no_millis"); + assertSameDate("2018-1T10:15:30Z", "ordinal_date_time_no_millis"); + + assertSameDate("10:15:30.123Z", "time"); + assertSameDate("1:15:30.123Z", "time"); + assertSameDate("10:1:30.123Z", "time"); + assertSameDate("10:15:3.123Z", "time"); + assertParseException("10:15:3.1", "time"); + assertParseException("10:15:3Z", "time"); + + assertSameDate("10:15:30Z", "time_no_millis"); + assertSameDate("01:15:30Z", "time_no_millis"); + assertSameDate("1:15:30Z", "time_no_millis"); + assertSameDate("10:5:30Z", "time_no_millis"); + assertSameDate("10:15:3Z", "time_no_millis"); + assertParseException("10:15:3", "time_no_millis"); + + assertSameDate("T10:15:30.123Z", "t_time"); + assertSameDate("T1:15:30.123Z", "t_time"); + assertSameDate("T10:1:30.123Z", "t_time"); + assertSameDate("T10:15:3.123Z", "t_time"); + assertParseException("T10:15:3.1", "t_time"); + assertParseException("T10:15:3Z", "t_time"); + + assertSameDate("T10:15:30Z", "t_time_no_millis"); + assertSameDate("T1:15:30Z", "t_time_no_millis"); + assertSameDate("T10:1:30Z", "t_time_no_millis"); + assertSameDate("T10:15:3Z", "t_time_no_millis"); + assertParseException("T10:15:3", "t_time_no_millis"); + + assertSameDate("2012-W48-6", "week_date"); + assertSameDate("2012-W01-6", "week_date"); + assertSameDate("2012-W1-6", "week_date"); + // joda comes up with a different exception message here, so we have to adapt + assertJodaParseException("2012-W1-8", "week_date", + "Cannot parse \"2012-W1-8\": Value 8 for dayOfWeek must be in the range [1,7]"); + assertJavaTimeParseException("2012-W1-8", "week_date", "Text '2012-W1-8' could not be parsed"); + + assertSameDate("2012-W48-6T10:15:30.123Z", "week_date_time"); + assertSameDate("2012-W1-6T10:15:30.123Z", "week_date_time"); + + assertSameDate("2012-W48-6T10:15:30Z", "week_date_time_no_millis"); + assertSameDate("2012-W1-6T10:15:30Z", "week_date_time_no_millis"); + + assertSameDate("2012", "year"); + assertSameDate("1", "year"); + assertSameDate("-2000", "year"); + + assertSameDate("2012-12", "yearMonth"); + assertSameDate("1-1", "yearMonth"); + + assertSameDate("2012-12-31", "yearMonthDay"); + assertSameDate("1-12-31", "yearMonthDay"); + assertSameDate("2012-1-31", "yearMonthDay"); + assertSameDate("2012-12-1", "yearMonthDay"); + + assertSameDate("2018", "week_year"); + assertSameDate("1", "week_year"); + assertSameDate("2017", "week_year"); + + assertSameDate("2018-W29", "weekyear_week"); + assertSameDate("2018-W1", "weekyear_week"); + + assertSameDate("2012-W31-5", "weekyear_week_day"); + assertSameDate("2012-W1-1", "weekyear_week_day"); + } + + public void testDuelingStrictParsing() { + assertSameDate("2018W313", "strict_basic_week_date"); + assertParseException("18W313", "strict_basic_week_date"); + assertSameDate("2018W313T121212.123Z", "strict_basic_week_date_time"); + assertParseException("2018W313T12128.123Z", "strict_basic_week_date_time"); + assertParseException("2018W313T81212.123Z", "strict_basic_week_date_time"); + assertParseException("2018W313T12812.123Z", "strict_basic_week_date_time"); + assertParseException("2018W313T12812.1Z", "strict_basic_week_date_time"); + assertSameDate("2018W313T121212Z", "strict_basic_week_date_time_no_millis"); + assertParseException("2018W313T12128Z", "strict_basic_week_date_time_no_millis"); + assertParseException("2018W313T81212Z", "strict_basic_week_date_time_no_millis"); + assertParseException("2018W313T12812Z", "strict_basic_week_date_time_no_millis"); + assertSameDate("2018-12-31", "strict_date"); + assertParseException("2018-8-31", "strict_date"); + assertSameDate("2018-12-31T12", "strict_date_hour"); + assertParseException("2018-12-31T8", "strict_date_hour"); + assertSameDate("2018-12-31T12:12", "strict_date_hour_minute"); + assertParseException("2018-12-31T8:3", "strict_date_hour_minute"); + assertSameDate("2018-12-31T12:12:12", "strict_date_hour_minute_second"); + assertParseException("2018-12-31T12:12:1", "strict_date_hour_minute_second"); + assertSameDate("2018-12-31T12:12:12.123", "strict_date_hour_minute_second_fraction"); + assertSameDate("2018-12-31T12:12:12.123", "strict_date_hour_minute_second_millis"); + assertSameDate("2018-12-31T12:12:12.1", "strict_date_hour_minute_second_millis"); + assertSameDate("2018-12-31T12:12:12.1", "strict_date_hour_minute_second_fraction"); + assertParseException("2018-12-31T12:12:12", "strict_date_hour_minute_second_millis"); + assertParseException("2018-12-31T12:12:12", "strict_date_hour_minute_second_fraction"); + assertSameDate("2018-12-31", "strict_date_optional_time"); + assertParseException("2018-12-1", "strict_date_optional_time"); + assertParseException("2018-1-31", "strict_date_optional_time"); + assertSameDate("2018-12-31T10:15:30", "strict_date_optional_time"); + assertParseException("2018-12-31T10:15:3", "strict_date_optional_time"); + assertParseException("2018-12-31T10:5:30", "strict_date_optional_time"); + assertParseException("2018-12-31T9:15:30", "strict_date_optional_time"); + assertSameDate("2018-12-31T10:15:30.123Z", "strict_date_time"); + assertSameDate("2018-12-31T10:15:30.11Z", "strict_date_time"); + assertParseException("2018-12-31T10:15:3.123Z", "strict_date_time"); + assertParseException("2018-12-31T10:5:30.123Z", "strict_date_time"); + assertParseException("2018-12-31T1:15:30.123Z", "strict_date_time"); + assertSameDate("2018-12-31T10:15:30Z", "strict_date_time_no_millis"); + assertParseException("2018-12-31T10:5:30Z", "strict_date_time_no_millis"); + assertParseException("2018-12-31T10:15:3Z", "strict_date_time_no_millis"); + assertParseException("2018-12-31T1:15:30Z", "strict_date_time_no_millis"); + assertSameDate("12", "strict_hour"); + assertSameDate("01", "strict_hour"); + assertParseException("1", "strict_hour"); + assertSameDate("12:12", "strict_hour_minute"); + assertSameDate("12:01", "strict_hour_minute"); + assertParseException("12:1", "strict_hour_minute"); + assertSameDate("12:12:12", "strict_hour_minute_second"); + assertSameDate("12:12:01", "strict_hour_minute_second"); + assertParseException("12:12:1", "strict_hour_minute_second"); + assertSameDate("12:12:12.123", "strict_hour_minute_second_fraction"); + assertSameDate("12:12:12.1", "strict_hour_minute_second_fraction"); + assertParseException("12:12:12", "strict_hour_minute_second_fraction"); + assertSameDate("12:12:12.123", "strict_hour_minute_second_millis"); + assertSameDate("12:12:12.1", "strict_hour_minute_second_millis"); + assertParseException("12:12:12", "strict_hour_minute_second_millis"); + assertSameDate("2018-128", "strict_ordinal_date"); + assertParseException("2018-1", "strict_ordinal_date"); + + assertSameDate("2018-128T10:15:30.123Z", "strict_ordinal_date_time"); + assertParseException("2018-1T10:15:30.123Z", "strict_ordinal_date_time"); + + assertSameDate("2018-128T10:15:30Z", "strict_ordinal_date_time_no_millis"); + assertParseException("2018-1T10:15:30Z", "strict_ordinal_date_time_no_millis"); + + assertSameDate("10:15:30.123Z", "strict_time"); + assertParseException("1:15:30.123Z", "strict_time"); + assertParseException("10:1:30.123Z", "strict_time"); + assertParseException("10:15:3.123Z", "strict_time"); + assertParseException("10:15:3.1", "strict_time"); + assertParseException("10:15:3Z", "strict_time"); + + assertSameDate("10:15:30Z", "strict_time_no_millis"); + assertSameDate("01:15:30Z", "strict_time_no_millis"); + assertParseException("1:15:30Z", "strict_time_no_millis"); + assertParseException("10:5:30Z", "strict_time_no_millis"); + assertParseException("10:15:3Z", "strict_time_no_millis"); + assertParseException("10:15:3", "strict_time_no_millis"); + + assertSameDate("T10:15:30.123Z", "strict_t_time"); + assertParseException("T1:15:30.123Z", "strict_t_time"); + assertParseException("T10:1:30.123Z", "strict_t_time"); + assertParseException("T10:15:3.123Z", "strict_t_time"); + assertParseException("T10:15:3.1", "strict_t_time"); + assertParseException("T10:15:3Z", "strict_t_time"); + + assertSameDate("T10:15:30Z", "strict_t_time_no_millis"); + assertParseException("T1:15:30Z", "strict_t_time_no_millis"); + assertParseException("T10:1:30Z", "strict_t_time_no_millis"); + assertParseException("T10:15:3Z", "strict_t_time_no_millis"); + assertParseException("T10:15:3", "strict_t_time_no_millis"); + + assertSameDate("2012-W48-6", "strict_week_date"); + assertSameDate("2012-W01-6", "strict_week_date"); + assertParseException("2012-W1-6", "strict_week_date"); + assertParseException("2012-W1-8", "strict_week_date"); + + assertSameDate("2012-W48-6", "strict_week_date"); + assertSameDate("2012-W01-6", "strict_week_date"); + assertParseException("2012-W1-6", "strict_week_date"); + // joda comes up with a different exception message here, so we have to adapt + assertJodaParseException("2012-W01-8", "strict_week_date", + "Cannot parse \"2012-W01-8\": Value 8 for dayOfWeek must be in the range [1,7]"); + assertJavaTimeParseException("2012-W01-8", "strict_week_date", "Text '2012-W01-8' could not be parsed"); + + assertSameDate("2012-W48-6T10:15:30.123Z", "strict_week_date_time"); + assertParseException("2012-W1-6T10:15:30.123Z", "strict_week_date_time"); + + assertSameDate("2012-W48-6T10:15:30Z", "strict_week_date_time_no_millis"); + assertParseException("2012-W1-6T10:15:30Z", "strict_week_date_time_no_millis"); + + assertSameDate("2012", "strict_year"); + assertParseException("1", "strict_year"); + assertSameDate("-2000", "strict_year"); + + assertSameDate("2012-12", "strict_year_month"); + assertParseException("1-1", "strict_year_month"); + + assertSameDate("2012-12-31", "strict_year_month_day"); + assertParseException("1-12-31", "strict_year_month_day"); + assertParseException("2012-1-31", "strict_year_month_day"); + assertParseException("2012-12-1", "strict_year_month_day"); + + assertSameDate("2018", "strict_weekyear"); + assertParseException("1", "strict_weekyear"); + + assertSameDate("2018", "strict_weekyear"); + assertSameDate("2017", "strict_weekyear"); + assertParseException("1", "strict_weekyear"); + + assertSameDate("2018-W29", "strict_weekyear_week"); + assertSameDate("2018-W01", "strict_weekyear_week"); + assertParseException("2018-W1", "strict_weekyear_week"); + + assertSameDate("2012-W31-5", "strict_weekyear_week_day"); + assertParseException("2012-W1-1", "strict_weekyear_week_day"); + } + + public void testSamePrinterOutput() { + int year = randomIntBetween(1970, 2030); + int month = randomIntBetween(1, 12); + int day = randomIntBetween(1, 28); + int hour = randomIntBetween(0, 23); + int minute = randomIntBetween(0, 59); + int second = randomIntBetween(0, 59); + + ZonedDateTime javaDate = ZonedDateTime.of(year, month, day, hour, minute, second, 0, ZoneOffset.UTC); + DateTime jodaDate = new DateTime(year, month, day, hour, minute, second, DateTimeZone.UTC); + + assertSamePrinterOutput("basicDate", javaDate, jodaDate); + assertSamePrinterOutput("basicDateTime", javaDate, jodaDate); + assertSamePrinterOutput("basicDateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("basicOrdinalDate", javaDate, jodaDate); + assertSamePrinterOutput("basicOrdinalDateTime", javaDate, jodaDate); + assertSamePrinterOutput("basicOrdinalDateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("basicTime", javaDate, jodaDate); + assertSamePrinterOutput("basicTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("basicTTime", javaDate, jodaDate); + assertSamePrinterOutput("basicTTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("basicWeekDate", javaDate, jodaDate); + assertSamePrinterOutput("basicWeekDateTime", javaDate, jodaDate); + assertSamePrinterOutput("basicWeekDateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("date", javaDate, jodaDate); + assertSamePrinterOutput("dateHour", javaDate, jodaDate); + assertSamePrinterOutput("dateHourMinute", javaDate, jodaDate); + assertSamePrinterOutput("dateHourMinuteSecond", javaDate, jodaDate); + assertSamePrinterOutput("dateHourMinuteSecondFraction", javaDate, jodaDate); + assertSamePrinterOutput("dateHourMinuteSecondMillis", javaDate, jodaDate); + assertSamePrinterOutput("dateOptionalTime", javaDate, jodaDate); + assertSamePrinterOutput("dateTime", javaDate, jodaDate); + assertSamePrinterOutput("dateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("hour", javaDate, jodaDate); + assertSamePrinterOutput("hourMinute", javaDate, jodaDate); + assertSamePrinterOutput("hourMinuteSecond", javaDate, jodaDate); + assertSamePrinterOutput("hourMinuteSecondFraction", javaDate, jodaDate); + assertSamePrinterOutput("hourMinuteSecondMillis", javaDate, jodaDate); + assertSamePrinterOutput("ordinalDate", javaDate, jodaDate); + assertSamePrinterOutput("ordinalDateTime", javaDate, jodaDate); + assertSamePrinterOutput("ordinalDateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("time", javaDate, jodaDate); + assertSamePrinterOutput("timeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("tTime", javaDate, jodaDate); + assertSamePrinterOutput("tTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("weekDate", javaDate, jodaDate); + assertSamePrinterOutput("weekDateTime", javaDate, jodaDate); + assertSamePrinterOutput("weekDateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("weekyear", javaDate, jodaDate); + assertSamePrinterOutput("weekyearWeek", javaDate, jodaDate); + assertSamePrinterOutput("weekyearWeekDay", javaDate, jodaDate); + assertSamePrinterOutput("year", javaDate, jodaDate); + assertSamePrinterOutput("yearMonth", javaDate, jodaDate); + assertSamePrinterOutput("yearMonthDay", javaDate, jodaDate); + assertSamePrinterOutput("epoch_second", javaDate, jodaDate); + assertSamePrinterOutput("epoch_millis", javaDate, jodaDate); + assertSamePrinterOutput("strictBasicWeekDate", javaDate, jodaDate); + assertSamePrinterOutput("strictBasicWeekDateTime", javaDate, jodaDate); + assertSamePrinterOutput("strictBasicWeekDateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("strictDate", javaDate, jodaDate); + assertSamePrinterOutput("strictDateHour", javaDate, jodaDate); + assertSamePrinterOutput("strictDateHourMinute", javaDate, jodaDate); + assertSamePrinterOutput("strictDateHourMinuteSecond", javaDate, jodaDate); + assertSamePrinterOutput("strictDateHourMinuteSecondFraction", javaDate, jodaDate); + assertSamePrinterOutput("strictDateHourMinuteSecondMillis", javaDate, jodaDate); + assertSamePrinterOutput("strictDateOptionalTime", javaDate, jodaDate); + assertSamePrinterOutput("strictDateTime", javaDate, jodaDate); + assertSamePrinterOutput("strictDateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("strictHour", javaDate, jodaDate); + assertSamePrinterOutput("strictHourMinute", javaDate, jodaDate); + assertSamePrinterOutput("strictHourMinuteSecond", javaDate, jodaDate); + assertSamePrinterOutput("strictHourMinuteSecondFraction", javaDate, jodaDate); + assertSamePrinterOutput("strictHourMinuteSecondMillis", javaDate, jodaDate); + assertSamePrinterOutput("strictOrdinalDate", javaDate, jodaDate); + assertSamePrinterOutput("strictOrdinalDateTime", javaDate, jodaDate); + assertSamePrinterOutput("strictOrdinalDateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("strictTime", javaDate, jodaDate); + assertSamePrinterOutput("strictTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("strictTTime", javaDate, jodaDate); + assertSamePrinterOutput("strictTTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("strictWeekDate", javaDate, jodaDate); + assertSamePrinterOutput("strictWeekDateTime", javaDate, jodaDate); + assertSamePrinterOutput("strictWeekDateTimeNoMillis", javaDate, jodaDate); + assertSamePrinterOutput("strictWeekyear", javaDate, jodaDate); + assertSamePrinterOutput("strictWeekyearWeek", javaDate, jodaDate); + assertSamePrinterOutput("strictWeekyearWeekDay", javaDate, jodaDate); + assertSamePrinterOutput("strictYear", javaDate, jodaDate); + assertSamePrinterOutput("strictYearMonth", javaDate, jodaDate); + assertSamePrinterOutput("strictYearMonthDay", javaDate, jodaDate); + } + + public void testSeveralTimeFormats() { + assertSameDate("2018-12-12", "year_month_day||ordinal_date"); + assertSameDate("2018-128", "year_month_day||ordinal_date"); + } + + private void assertSamePrinterOutput(String format, ZonedDateTime javaDate, DateTime jodaDate) { + assertThat(jodaDate.getMillis(), is(javaDate.toEpochSecond() * 1000)); + String javaTimeOut = DateFormatters.forPattern("dateOptionalTime").format(javaDate); + String jodaTimeOut = Joda.forPattern("dateOptionalTime").printer().print(jodaDate); + assertThat(javaTimeOut, is(jodaTimeOut)); + } + + private void assertSameDate(String input, String format) { + FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format); + DateTime jodaDateTime = jodaFormatter.parser().parseDateTime(input); + + CompoundDateTimeFormatter javaTimeFormatter = DateFormatters.forPattern(format); + TemporalAccessor javaTimeAccessor = javaTimeFormatter.parse(input); + ZonedDateTime zonedDateTime = DateFormatters.toZonedDateTime(javaTimeAccessor); + + String msg = String.format(Locale.ROOT, "Input [%s] Format [%s] Joda [%s], Java [%s]", input, format, jodaDateTime, + DateTimeFormatter.ISO_INSTANT.format(zonedDateTime.toInstant())); + + assertThat(msg, jodaDateTime.getMillis(), is(zonedDateTime.toInstant().toEpochMilli())); + } + + private void assertParseException(String input, String format) { + assertJodaParseException(input, format, "Invalid format: \"" + input); + assertJavaTimeParseException(input, format, "Text '" + input + "' could not be parsed"); + } + + private void assertJodaParseException(String input, String format, String expectedMessage) { + FormatDateTimeFormatter jodaFormatter = Joda.forPattern(format); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> jodaFormatter.parser().parseDateTime(input)); + assertThat(e.getMessage(), containsString(expectedMessage)); + } + + private void assertJavaTimeParseException(String input, String format, String expectedMessage) { + CompoundDateTimeFormatter javaTimeFormatter = DateFormatters.forPattern(format); + DateTimeParseException dateTimeParseException = expectThrows(DateTimeParseException.class, () -> javaTimeFormatter.parse(input)); + assertThat(dateTimeParseException.getMessage(), startsWith(expectedMessage)); + } +} diff --git a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java index 6d8e1a41c5b94..229cb99fbfbda 100644 --- a/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java +++ b/server/src/test/java/org/elasticsearch/explain/ExplainActionIT.java @@ -28,12 +28,12 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.query.QueryBuilders; import org.elasticsearch.test.ESIntegTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.ISODateTimeFormat; import java.io.ByteArrayInputStream; import java.io.ByteArrayOutputStream; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.HashSet; import java.util.Map; import java.util.Set; @@ -249,8 +249,9 @@ public void testExplainWithFilteredAliasFetchSource() throws Exception { public void testExplainDateRangeInQueryString() { createIndex("test"); - String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1)); - String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1)); + ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); + String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1)); + String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1)); client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); diff --git a/server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java b/server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java new file mode 100644 index 0000000000000..31aa1a9be2512 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/analysis/ESSolrSynonymParserTests.java @@ -0,0 +1,78 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.analysis.standard.StandardTokenizer; +import org.apache.lucene.analysis.synonym.SynonymFilter; +import org.apache.lucene.analysis.synonym.SynonymMap; +import org.elasticsearch.test.ESTokenStreamTestCase; + +import java.io.IOException; +import java.io.StringReader; +import java.text.ParseException; + +import static org.hamcrest.Matchers.containsString; + +public class ESSolrSynonymParserTests extends ESTokenStreamTestCase { + + public void testLenientParser() throws IOException, ParseException { + ESSolrSynonymParser parser = new ESSolrSynonymParser(true, false, true, new StandardAnalyzer()); + String rules = + "&,and\n" + + "come,advance,approach\n"; + StringReader rulesReader = new StringReader(rules); + parser.parse(rulesReader); + SynonymMap synonymMap = parser.build(); + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader("approach quietly then advance & destroy")); + TokenStream ts = new SynonymFilter(tokenizer, synonymMap, false); + assertTokenStreamContents(ts, new String[]{"come", "quietly", "then", "come", "destroy"}); + } + + public void testLenientParserWithSomeIncorrectLines() throws IOException, ParseException { + CharArraySet stopSet = new CharArraySet(1, true); + stopSet.add("bar"); + ESSolrSynonymParser parser = + new ESSolrSynonymParser(true, false, true, new StandardAnalyzer(stopSet)); + String rules = "foo,bar,baz"; + StringReader rulesReader = new StringReader(rules); + parser.parse(rulesReader); + SynonymMap synonymMap = parser.build(); + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader("first word is foo, then bar and lastly baz")); + TokenStream ts = new SynonymFilter(new StopFilter(tokenizer, stopSet), synonymMap, false); + assertTokenStreamContents(ts, new String[]{"first", "word", "is", "foo", "then", "and", "lastly", "foo"}); + } + + public void testNonLenientParser() { + ESSolrSynonymParser parser = new ESSolrSynonymParser(true, false, false, new StandardAnalyzer()); + String rules = + "&,and=>and\n" + + "come,advance,approach\n"; + StringReader rulesReader = new StringReader(rules); + ParseException ex = expectThrows(ParseException.class, () -> parser.parse(rulesReader)); + assertThat(ex.getMessage(), containsString("Invalid synonym rule at line 1")); + } +} diff --git a/server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java b/server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java new file mode 100644 index 0000000000000..6d0fd8944d4c4 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/analysis/ESWordnetSynonymParserTests.java @@ -0,0 +1,88 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.analysis; + +import org.apache.lucene.analysis.CharArraySet; +import org.apache.lucene.analysis.StopFilter; +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.Tokenizer; +import org.apache.lucene.analysis.standard.StandardAnalyzer; +import org.apache.lucene.analysis.standard.StandardTokenizer; +import org.apache.lucene.analysis.synonym.SynonymFilter; +import org.apache.lucene.analysis.synonym.SynonymMap; +import org.elasticsearch.test.ESTokenStreamTestCase; + +import java.io.IOException; +import java.io.StringReader; +import java.text.ParseException; + +import static org.hamcrest.Matchers.containsString; + +public class ESWordnetSynonymParserTests extends ESTokenStreamTestCase { + + public void testLenientParser() throws IOException, ParseException { + ESWordnetSynonymParser parser = new ESWordnetSynonymParser(true, false, true, new StandardAnalyzer()); + String rules = + "s(100000001,1,'&',a,1,0).\n" + + "s(100000001,2,'and',a,1,0).\n" + + "s(100000002,1,'come',v,1,0).\n" + + "s(100000002,2,'advance',v,1,0).\n" + + "s(100000002,3,'approach',v,1,0)."; + StringReader rulesReader = new StringReader(rules); + parser.parse(rulesReader); + SynonymMap synonymMap = parser.build(); + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader("approach quietly then advance & destroy")); + TokenStream ts = new SynonymFilter(tokenizer, synonymMap, false); + assertTokenStreamContents(ts, new String[]{"come", "quietly", "then", "come", "destroy"}); + } + + public void testLenientParserWithSomeIncorrectLines() throws IOException, ParseException { + CharArraySet stopSet = new CharArraySet(1, true); + stopSet.add("bar"); + ESWordnetSynonymParser parser = + new ESWordnetSynonymParser(true, false, true, new StandardAnalyzer(stopSet)); + String rules = + "s(100000001,1,'foo',v,1,0).\n" + + "s(100000001,2,'bar',v,1,0).\n" + + "s(100000001,3,'baz',v,1,0)."; + StringReader rulesReader = new StringReader(rules); + parser.parse(rulesReader); + SynonymMap synonymMap = parser.build(); + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader("first word is foo, then bar and lastly baz")); + TokenStream ts = new SynonymFilter(new StopFilter(tokenizer, stopSet), synonymMap, false); + assertTokenStreamContents(ts, new String[]{"first", "word", "is", "foo", "then", "and", "lastly", "foo"}); + } + + public void testNonLenientParser() { + ESWordnetSynonymParser parser = new ESWordnetSynonymParser(true, false, false, new StandardAnalyzer()); + String rules = + "s(100000001,1,'&',a,1,0).\n" + + "s(100000001,2,'and',a,1,0).\n" + + "s(100000002,1,'come',v,1,0).\n" + + "s(100000002,2,'advance',v,1,0).\n" + + "s(100000002,3,'approach',v,1,0)."; + StringReader rulesReader = new StringReader(rules); + ParseException ex = expectThrows(ParseException.class, () -> parser.parse(rulesReader)); + assertThat(ex.getMessage(), containsString("Invalid synonym rule at line 1")); + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java index 604a11843fc2a..43b9a01560ca8 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesDatesTests.java @@ -44,7 +44,13 @@ public void test() throws IOException { for (int round = 0; round < 10; round++) { int d = between(0, values.length - 1); dates.setNextDocId(d); - assertEquals(expectedDates[d].length > 0 ? expectedDates[d][0] : new DateTime(0, DateTimeZone.UTC), dates.getValue()); + if (expectedDates[d].length > 0) { + assertEquals(expectedDates[d][0] , dates.getValue()); + } else { + Exception e = expectThrows(IllegalStateException.class, () -> dates.getValue()); + assertEquals("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); + } assertEquals(values[d].length, dates.size()); for (int i = 0; i < values[d].length; i++) { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java index 1b948e02e0406..c22cb4919677a 100644 --- a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesLongsTests.java @@ -24,7 +24,6 @@ import java.io.IOException; - public class ScriptDocValuesLongsTests extends ESTestCase { public void testLongs() throws IOException { long[][] values = new long[between(3, 10)][]; @@ -39,8 +38,13 @@ public void testLongs() throws IOException { for (int round = 0; round < 10; round++) { int d = between(0, values.length - 1); longs.setNextDocId(d); - assertEquals(values[d].length > 0 ? values[d][0] : 0, longs.getValue()); - + if (values[d].length > 0) { + assertEquals(values[d][0], longs.getValue()); + } else { + Exception e = expectThrows(IllegalStateException.class, () -> longs.getValue()); + assertEquals("A document doesn't have a value for a field! " + + "Use doc[].size()==0 to check if a document is missing a field!", e.getMessage()); + } assertEquals(values[d].length, longs.size()); assertEquals(values[d].length, longs.getValues().size()); for (int i = 0; i < values[d].length; i++) { diff --git a/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java new file mode 100644 index 0000000000000..1dc836874d847 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/index/fielddata/ScriptDocValuesMissingV6BehaviourTests.java @@ -0,0 +1,195 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.index.fielddata; + +import org.elasticsearch.common.geo.GeoPoint; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.fielddata.ScriptDocValues.Longs; +import org.elasticsearch.index.fielddata.ScriptDocValues.Dates; +import org.elasticsearch.index.fielddata.ScriptDocValues.Booleans; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.MockScriptEngine; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptEngine; +import org.elasticsearch.script.ScriptModule; +import org.elasticsearch.test.ESTestCase; + +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.ReadableDateTime; +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; + +import static java.util.Collections.singletonList; + +public class ScriptDocValuesMissingV6BehaviourTests extends ESTestCase { + + public void testScriptMissingValuesWarning(){ + new ScriptModule(Settings.EMPTY, singletonList(new ScriptPlugin() { + @Override + public ScriptEngine getScriptEngine(Settings settings, Collection> contexts) { + return new MockScriptEngine(MockScriptEngine.NAME, Collections.singletonMap("1", script -> "1")); + } + })); + assertWarnings("Script: returning default values for missing document values is deprecated. " + + "Set system property '-Des.scripting.exception_for_missing_value=true' " + + "to make behaviour compatible with future major versions."); + } + + public void testZeroForMissingValueLong() throws IOException { + long[][] values = new long[between(3, 10)][]; + for (int d = 0; d < values.length; d++) { + values[d] = new long[0]; + } + Longs longs = wrap(values); + for (int round = 0; round < 10; round++) { + int d = between(0, values.length - 1); + longs.setNextDocId(d); + assertEquals(0, longs.getValue()); + } + } + + public void testEpochForMissingValueDate() throws IOException { + final ReadableDateTime EPOCH = new DateTime(0, DateTimeZone.UTC); + long[][] values = new long[between(3, 10)][]; + for (int d = 0; d < values.length; d++) { + values[d] = new long[0]; + } + Dates dates = wrapDates(values); + for (int round = 0; round < 10; round++) { + int d = between(0, values.length - 1); + dates.setNextDocId(d); + assertEquals(EPOCH, dates.getValue()); + } + } + + public void testFalseForMissingValueBoolean() throws IOException { + long[][] values = new long[between(3, 10)][]; + for (int d = 0; d < values.length; d++) { + values[d] = new long[0]; + } + Booleans bools = wrapBooleans(values); + for (int round = 0; round < 10; round++) { + int d = between(0, values.length - 1); + bools.setNextDocId(d); + assertEquals(false, bools.getValue()); + } + } + + public void testNullForMissingValueGeo() throws IOException{ + final MultiGeoPointValues values = wrap(new GeoPoint[0]); + final ScriptDocValues.GeoPoints script = new ScriptDocValues.GeoPoints(values); + script.setNextDocId(0); + assertEquals(null, script.getValue()); + } + + + private Longs wrap(long[][] values) { + return new Longs(new AbstractSortedNumericDocValues() { + long[] current; + int i; + @Override + public boolean advanceExact(int doc) { + i = 0; + current = values[doc]; + return current.length > 0; + } + @Override + public int docValueCount() { + return current.length; + } + @Override + public long nextValue() { + return current[i++]; + } + }); + } + + private Booleans wrapBooleans(long[][] values) { + return new Booleans(new AbstractSortedNumericDocValues() { + long[] current; + int i; + @Override + public boolean advanceExact(int doc) { + i = 0; + current = values[doc]; + return current.length > 0; + } + @Override + public int docValueCount() { + return current.length; + } + @Override + public long nextValue() { + return current[i++]; + } + }); + } + + private Dates wrapDates(long[][] values) { + return new Dates(new AbstractSortedNumericDocValues() { + long[] current; + int i; + @Override + public boolean advanceExact(int doc) { + current = values[doc]; + i = 0; + return current.length > 0; + } + @Override + public int docValueCount() { + return current.length; + } + @Override + public long nextValue() { + return current[i++]; + } + }); + } + + + private static MultiGeoPointValues wrap(final GeoPoint... points) { + return new MultiGeoPointValues() { + int docID = -1; + int i; + @Override + public GeoPoint nextValue() { + if (docID != 0) { + fail(); + } + return points[i++]; + } + @Override + public boolean advanceExact(int docId) { + docID = docId; + return points.length > 0; + } + @Override + public int docValueCount() { + if (docID != 0) { + return 0; + } + return points.length; + } + }; + } + +} diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java index d6d50b24d1f68..bda6de8aa7d61 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardIT.java @@ -73,6 +73,7 @@ import org.elasticsearch.test.ESSingleNodeTestCase; import org.elasticsearch.test.IndexSettingsModule; import org.elasticsearch.test.InternalSettingsPlugin; +import org.elasticsearch.test.junit.annotations.TestLogging; import java.io.IOException; import java.io.UncheckedIOException; @@ -84,6 +85,7 @@ import java.util.Collection; import java.util.Collections; import java.util.List; +import java.util.Locale; import java.util.concurrent.BrokenBarrierException; import java.util.concurrent.CountDownLatch; import java.util.concurrent.CyclicBarrier; @@ -404,6 +406,7 @@ public void testMaybeRollTranslogGeneration() throws Exception { } } + @TestLogging("_root:DEBUG,org.elasticsearch.index.shard:TRACE,org.elasticsearch.index.engine:TRACE") public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { createIndex("test"); ensureGreen(); @@ -446,13 +449,14 @@ public void testStressMaybeFlushOrRollTranslogGeneration() throws Exception { barrier.await(); final CheckedRunnable check; if (flush) { - final FlushStats flushStats = shard.flushStats(); - final long total = flushStats.getTotal(); - final long periodic = flushStats.getPeriodic(); + final FlushStats initialStats = shard.flushStats(); client().prepareIndex("test", "test", "1").setSource("{}", XContentType.JSON).get(); check = () -> { - assertThat(shard.flushStats().getTotal(), equalTo(total + 1)); - assertThat(shard.flushStats().getPeriodic(), equalTo(periodic + 1)); + final FlushStats currentStats = shard.flushStats(); + String msg = String.format(Locale.ROOT, "flush stats: total=[%d vs %d], periodic=[%d vs %d]", + initialStats.getTotal(), currentStats.getTotal(), initialStats.getPeriodic(), currentStats.getPeriodic()); + assertThat(msg, currentStats.getPeriodic(), equalTo(initialStats.getPeriodic() + 1)); + assertThat(msg, currentStats.getTotal(), equalTo(initialStats.getTotal() + 1)); }; } else { final long generation = getTranslog(shard).currentFileGeneration(); diff --git a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java index 70a633f02f4dc..0f12305f23969 100644 --- a/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java +++ b/server/src/test/java/org/elasticsearch/indices/IndicesRequestCacheIT.java @@ -30,11 +30,11 @@ import org.elasticsearch.search.aggregations.bucket.histogram.Histogram; import org.elasticsearch.search.aggregations.bucket.histogram.Histogram.Bucket; import org.elasticsearch.test.ESIntegTestCase; -import org.joda.time.DateTime; import org.joda.time.DateTimeZone; -import org.joda.time.chrono.ISOChronology; -import org.joda.time.format.DateTimeFormat; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.List; import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram; @@ -255,7 +255,7 @@ public void testQueryRewriteDatesWithNow() throws Exception { .setSettings(settings).get()); assertAcked(client.admin().indices().prepareCreate("index-3").addMapping("type", "d", "type=date") .setSettings(settings).get()); - DateTime now = new DateTime(ISOChronology.getInstanceUTC()); + ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); indexRandom(true, client.prepareIndex("index-1", "type", "1").setSource("d", now), client.prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1)), client.prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2)), @@ -456,9 +456,9 @@ public void testCacheWithFilteredAlias() { .setSettings(settings) .addAlias(new Alias("last_week").filter(QueryBuilders.rangeQuery("created_at").gte("now-7d/d"))) .get()); - DateTime now = new DateTime(DateTimeZone.UTC); + ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); client.prepareIndex("index", "type", "1").setRouting("1").setSource("created_at", - DateTimeFormat.forPattern("YYYY-MM-dd").print(now)).get(); + DateTimeFormatter.ISO_LOCAL_DATE.format(now)).get(); refresh(); assertThat(client.admin().indices().prepareStats("index").setRequestCache(true).get().getTotal().getRequestCache().getHitCount(), diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java index af1104879e92e..4a85c2c145329 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/MinDocCountIT.java @@ -22,10 +22,10 @@ import com.carrotsearch.hppc.LongHashSet; import com.carrotsearch.hppc.LongSet; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.search.SearchRequest; import org.elasticsearch.action.search.SearchResponse; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.index.fielddata.ScriptDocValues; import org.elasticsearch.index.query.QueryBuilder; import org.elasticsearch.index.query.QueryBuilders; @@ -40,10 +40,9 @@ import org.elasticsearch.search.aggregations.bucket.terms.Terms; import org.elasticsearch.search.aggregations.bucket.terms.TermsAggregationBuilder; import org.elasticsearch.test.ESIntegTestCase; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.DateTimeFormat; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Collection; import java.util.Collections; @@ -123,8 +122,9 @@ public void setupSuiteScopeCluster() throws Exception { longTerm = randomInt(cardinality * 2); } while (!longTerms.add(longTerm)); double doubleTerm = longTerm * Math.PI; - String dateTerm = DateTimeFormat.forPattern("yyyy-MM-dd") - .print(new DateTime(2014, 1, ((int) longTerm % 20) + 1, 0, 0, DateTimeZone.UTC)); + + ZonedDateTime time = ZonedDateTime.of(2014, 1, ((int) longTerm % 20) + 1, 0, 0, 0, 0, ZoneOffset.UTC); + String dateTerm = DateFormatters.forPattern("yyyy-MM-dd").format(time); final int frequency = randomBoolean() ? 1 : randomIntBetween(2, 20); for (int j = 0; j < frequency; ++j) { indexRequests.add(client().prepareIndex("idx", "type").setSource(jsonBuilder() diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java index 0a0f9d6ae3759..6d0e388e64325 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/pipeline/movfn/MovFnWhitelistedFunctionTests.java @@ -313,6 +313,10 @@ public void testEmptySimpleStdDev() { assertThat(actual, equalTo(Double.NaN)); } + public void testStdDevNaNAvg() { + assertThat(MovingFunctions.stdDev(new double[] { 1.0, 2.0, 3.0 }, Double.NaN), equalTo(Double.NaN)); + } + public void testLinearMovAvg() { int numValues = randomIntBetween(1, 100); diff --git a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java index 35c5a19cc2e8c..e5af22cd2ae65 100644 --- a/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java @@ -63,10 +63,10 @@ import org.elasticsearch.test.MockKeywordPlugin; import org.hamcrest.Matcher; import org.hamcrest.Matchers; -import org.joda.time.DateTime; -import org.joda.time.chrono.ISOChronology; import java.io.IOException; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; @@ -2865,7 +2865,7 @@ public void testHighlightQueryRewriteDatesWithNow() throws Exception { "field", "type=text,store=true,term_vector=with_positions_offsets") .setSettings(Settings.builder().put("index.number_of_replicas", 0).put("index.number_of_shards", 2)) .get()); - DateTime now = new DateTime(ISOChronology.getInstanceUTC()); + ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); indexRandom(true, client().prepareIndex("index-1", "type", "1").setSource("d", now, "field", "hello world"), client().prepareIndex("index-1", "type", "2").setSource("d", now.minusDays(1), "field", "hello"), client().prepareIndex("index-1", "type", "3").setSource("d", now.minusDays(2), "field", "world")); diff --git a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java index ab5387b6e3f48..2e9426bbe6879 100644 --- a/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java +++ b/server/src/test/java/org/elasticsearch/search/fields/SearchFieldsIT.java @@ -28,8 +28,8 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.MapBuilder; import org.elasticsearch.common.document.DocumentField; -import org.elasticsearch.common.joda.Joda; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.time.DateFormatters; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -46,10 +46,10 @@ import org.elasticsearch.search.sort.SortOrder; import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.ReadableDateTime; +import org.joda.time.base.BaseDateTime; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Base64; @@ -546,6 +546,7 @@ public void testStoredFieldsWithoutSource() throws Exception { client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); + ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("byte_field", (byte) 1) .field("short_field", (short) 2) @@ -553,7 +554,7 @@ public void testStoredFieldsWithoutSource() throws Exception { .field("long_field", 4L) .field("float_field", 5.0f) .field("double_field", 6.0d) - .field("date_field", Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC))) + .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date)) .field("boolean_field", true) .field("binary_field", Base64.getEncoder().encodeToString("testing text".getBytes("UTF-8"))) .endObject()).execute().actionGet(); @@ -578,7 +579,6 @@ public void testStoredFieldsWithoutSource() throws Exception { assertThat(fields, equalTo(newHashSet("byte_field", "short_field", "integer_field", "long_field", "float_field", "double_field", "date_field", "boolean_field", "binary_field"))); - SearchHit searchHit = searchResponse.getHits().getAt(0); assertThat(searchHit.getFields().get("byte_field").getValue().toString(), equalTo("1")); assertThat(searchHit.getFields().get("short_field").getValue().toString(), equalTo("2")); @@ -586,7 +586,7 @@ public void testStoredFieldsWithoutSource() throws Exception { assertThat(searchHit.getFields().get("long_field").getValue(), equalTo((Object) 4L)); assertThat(searchHit.getFields().get("float_field").getValue(), equalTo((Object) 5.0f)); assertThat(searchHit.getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); - String dateTime = Joda.forPattern("dateOptionalTime").printer().print(new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC)); + String dateTime = DateFormatters.forPattern("dateOptionalTime").format(date); assertThat(searchHit.getFields().get("date_field").getValue(), equalTo((Object) dateTime)); assertThat(searchHit.getFields().get("boolean_field").getValue(), equalTo((Object) Boolean.TRUE)); assertThat(searchHit.getFields().get("binary_field").getValue(), equalTo(new BytesArray("testing text" .getBytes("UTF8")))); @@ -756,7 +756,7 @@ public void testDocValueFields() throws Exception { client().admin().indices().preparePutMapping().setType("type1").setSource(mapping, XContentType.JSON).execute().actionGet(); - ReadableDateTime date = new DateTime(2012, 3, 22, 0, 0, DateTimeZone.UTC); + ZonedDateTime date = ZonedDateTime.of(2012, 3, 22, 0, 0, 0, 0, ZoneOffset.UTC); client().prepareIndex("test", "type1", "1").setSource(jsonBuilder().startObject() .field("text_field", "foo") .field("keyword_field", "foo") @@ -766,7 +766,7 @@ public void testDocValueFields() throws Exception { .field("long_field", 4L) .field("float_field", 5.0f) .field("double_field", 6.0d) - .field("date_field", Joda.forPattern("dateOptionalTime").printer().print(date)) + .field("date_field", DateFormatters.forPattern("dateOptionalTime").format(date)) .field("boolean_field", true) .field("binary_field", new byte[] {42, 100}) .field("ip_field", "::1") @@ -802,7 +802,8 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("long_field").getValue(), equalTo((Object) 4L)); assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0)); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); - assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), equalTo(date)); + BaseDateTime dateField = searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(); + assertThat(dateField.getMillis(), equalTo(date.toInstant().toEpochMilli())); assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true)); assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); @@ -839,7 +840,7 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo((Object) 5.0)); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo((Object) 6.0d)); assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), - equalTo(Joda.forPattern("dateOptionalTime").printer().print(date))); + equalTo(DateFormatters.forPattern("dateOptionalTime").format(date))); assertThat(searchResponse.getHits().getAt(0).getFields().get("boolean_field").getValue(), equalTo((Object) true)); assertThat(searchResponse.getHits().getAt(0).getFields().get("text_field").getValue(), equalTo("foo")); assertThat(searchResponse.getHits().getAt(0).getFields().get("keyword_field").getValue(), equalTo("foo")); @@ -869,7 +870,7 @@ public void testDocValueFields() throws Exception { assertThat(searchResponse.getHits().getAt(0).getFields().get("float_field").getValue(), equalTo("5.0")); assertThat(searchResponse.getHits().getAt(0).getFields().get("double_field").getValue(), equalTo("6.0")); assertThat(searchResponse.getHits().getAt(0).getFields().get("date_field").getValue(), - equalTo(Joda.forPattern("epoch_millis").printer().print(date))); + equalTo(DateFormatters.forPattern("epoch_millis").format(date))); } public void testScriptFields() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index d6acdf11cb2ab..a21893db3920f 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -43,9 +43,9 @@ import org.elasticsearch.test.ESIntegTestCase; import org.elasticsearch.test.InternalSettingsPlugin; import org.elasticsearch.test.VersionUtils; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -562,27 +562,27 @@ public void testValueMissingLin() throws Exception { } public void testDateWithoutOrigin() throws Exception { - DateTime dt = new DateTime(DateTimeZone.UTC); + ZonedDateTime dt = ZonedDateTime.now(ZoneOffset.UTC); assertAcked(prepareCreate("test").addMapping( "type1", jsonBuilder().startObject().startObject("type1").startObject("properties").startObject("test").field("type", "text") .endObject().startObject("num1").field("type", "date").endObject().endObject().endObject().endObject())); - DateTime docDate = dt.minusDays(1); - String docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-" + ZonedDateTime docDate = dt.minusDays(1); + String docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-" + String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth()); client().index( indexRequest("test").type("type1").id("1") .source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet(); docDate = dt.minusDays(2); - docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-" + docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-" + String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth()); client().index( indexRequest("test").type("type1").id("2") .source(jsonBuilder().startObject().field("test", "value").field("num1", docDateString).endObject())).actionGet(); docDate = dt.minusDays(3); - docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthOfYear()) + "-" + docDateString = docDate.getYear() + "-" + String.format(Locale.ROOT, "%02d", docDate.getMonthValue()) + "-" + String.format(Locale.ROOT, "%02d", docDate.getDayOfMonth()); client().index( indexRequest("test").type("type1").id("3") diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java index 0e92aba2a8552..12e48a3ae4f0a 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/FunctionScoreIT.java @@ -134,15 +134,12 @@ public void testScriptScoresWithAgg() throws IOException { } public void testMinScoreFunctionScoreBasic() throws IOException { - index(INDEX, TYPE, jsonBuilder().startObject().field("num", 2).endObject()); - refresh(); float score = randomFloat(); float minScore = randomFloat(); - index(INDEX, TYPE, jsonBuilder().startObject() - .field("num", 2) - .field("random_score", score) // Pass the random score as a document field so that it can be extracted in the script - .endObject()); + .field("num", 2) + .field("random_score", score) // Pass the random score as a document field so that it can be extracted in the script + .endObject()); refresh(); ensureYellow(); diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index eab3a6e9b4824..be71867edd2a0 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -52,6 +52,9 @@ import org.joda.time.format.ISODateTimeFormat; import java.io.IOException; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; import java.util.Collection; import java.util.Collections; import java.util.Random; @@ -480,8 +483,9 @@ public void testDateRangeInQueryString() { "type", "past", "type=date", "future", "type=date" )); - String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1)); - String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1)); + ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); + String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minusMonths(1)); + String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plusMonths(1)); client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); refresh(); diff --git a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java index 00defee8daaf4..d95db778a6a3f 100644 --- a/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java +++ b/server/src/test/java/org/elasticsearch/search/suggest/ContextCompletionSuggestSearchIT.java @@ -19,12 +19,10 @@ package org.elasticsearch.search.suggest; import com.carrotsearch.randomizedtesting.generators.RandomStrings; - import org.apache.lucene.util.LuceneTestCase.SuppressCodecs; import org.elasticsearch.action.index.IndexRequestBuilder; import org.elasticsearch.action.index.IndexResponse; import org.elasticsearch.action.search.SearchResponse; -import org.elasticsearch.common.geo.GeoHashUtils; import org.elasticsearch.common.geo.GeoPoint; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.Fuzziness; @@ -95,7 +93,9 @@ public void testContextPrefix() throws Exception { .setSource(source)); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") + .contexts(Collections.singletonMap("cat", + Collections.singletonList(CategoryQueryContext.builder().setCategory("cat").setPrefix(true).build()))); assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); } @@ -126,7 +126,9 @@ public void testContextRegex() throws Exception { .setSource(source)); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).regex("sugg.*es"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).regex("sugg.*es") + .contexts(Collections.singletonMap("cat", + Collections.singletonList(CategoryQueryContext.builder().setCategory("cat").setPrefix(true).build()))); assertSuggestions("foo", prefix, "sugg9estion", "sugg8estion", "sugg7estion", "sugg6estion", "sugg5estion"); } @@ -157,7 +159,9 @@ public void testContextFuzzy() throws Exception { .setSource(source)); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg", Fuzziness.ONE) + .contexts(Collections.singletonMap("cat", + Collections.singletonList(CategoryQueryContext.builder().setCategory("cat").setPrefix(true).build()))); assertSuggestions("foo", prefix, "sugxgestion9", "sugxgestion8", "sugxgestion7", "sugxgestion6", "sugxgestion5"); } @@ -236,32 +240,6 @@ public void testSingleContextBoosting() throws Exception { assertSuggestions("foo", prefix, "suggestion8", "suggestion6", "suggestion4", "suggestion9", "suggestion2"); } - public void testSingleContextMultipleContexts() throws Exception { - CategoryContextMapping contextMapping = ContextBuilder.category("cat").field("cat").build(); - LinkedHashMap> map = new LinkedHashMap<>(Collections.singletonMap("cat", contextMapping)); - final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); - createIndexAndMapping(mapping); - int numDocs = 10; - List contexts = Arrays.asList("type1", "type2", "type3", "type4"); - List indexRequestBuilders = new ArrayList<>(); - for (int i = 0; i < numDocs; i++) { - XContentBuilder source = jsonBuilder() - .startObject() - .startObject(FIELD) - .field("input", "suggestion" + i) - .field("weight", i + 1) - .endObject() - .field("cat", contexts) - .endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(source)); - } - indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - - assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); - } - public void testMultiContextFiltering() throws Exception { LinkedHashMap> map = new LinkedHashMap<>(); map.put("cat", ContextBuilder.category("cat").field("cat").build()); @@ -295,14 +273,6 @@ public void testMultiContextFiltering() throws Exception { typeFilterSuggest.contexts(Collections.singletonMap("type", Arrays.asList(CategoryQueryContext.builder().setCategory("type2").build(), CategoryQueryContext.builder().setCategory("type1").build()))); assertSuggestions("foo", typeFilterSuggest, "suggestion9", "suggestion6", "suggestion5", "suggestion2", "suggestion1"); - - CompletionSuggestionBuilder multiContextFilterSuggest = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - // query context order should never matter - Map> contextMap = new HashMap<>(); - contextMap.put("type", Collections.singletonList(CategoryQueryContext.builder().setCategory("type2").build())); - contextMap.put("cat", Collections.singletonList(CategoryQueryContext.builder().setCategory("cat2").build())); - multiContextFilterSuggest.contexts(contextMap); - assertSuggestions("foo", multiContextFilterSuggest, "suggestion6", "suggestion2"); } @AwaitsFix(bugUrl = "multiple context boosting is broken, as a suggestion, contexts pair is treated as (num(context) entries)") @@ -361,36 +331,6 @@ public void testMultiContextBoosting() throws Exception { assertSuggestions("foo", multiContextBoostSuggest, "suggestion9", "suggestion6", "suggestion5", "suggestion2", "suggestion1"); } - public void testMissingContextValue() throws Exception { - LinkedHashMap> map = new LinkedHashMap<>(); - map.put("cat", ContextBuilder.category("cat").field("cat").build()); - map.put("type", ContextBuilder.category("type").field("type").build()); - final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); - createIndexAndMapping(mapping); - int numDocs = 10; - List indexRequestBuilders = new ArrayList<>(); - for (int i = 0; i < numDocs; i++) { - XContentBuilder source = jsonBuilder() - .startObject() - .startObject(FIELD) - .field("input", "suggestion" + i) - .field("weight", i + 1) - .endObject(); - if (randomBoolean()) { - source.field("cat", "cat" + i % 2); - } - if (randomBoolean()) { - source.field("type", "type" + i % 4); - } - source.endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(source)); - } - indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); - } - public void testSeveralContexts() throws Exception { LinkedHashMap> map = new LinkedHashMap<>(); final int numContexts = randomIntBetween(2, 5); @@ -417,35 +357,12 @@ public void testSeveralContexts() throws Exception { } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); + CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") + .contexts(Collections.singletonMap("type0", + Collections.singletonList(CategoryQueryContext.builder().setCategory("type").setPrefix(true).build()))); assertSuggestions("foo", prefix, "suggestion0", "suggestion1", "suggestion2", "suggestion3", "suggestion4"); } - public void testSimpleGeoPrefix() throws Exception { - LinkedHashMap> map = new LinkedHashMap<>(); - map.put("geo", ContextBuilder.geo("geo").build()); - final CompletionMappingBuilder mapping = new CompletionMappingBuilder().context(map); - createIndexAndMapping(mapping); - int numDocs = 10; - List indexRequestBuilders = new ArrayList<>(); - for (int i = 0; i < numDocs; i++) { - XContentBuilder source = jsonBuilder() - .startObject() - .startObject(FIELD) - .field("input", "suggestion" + i) - .field("weight", i + 1) - .startObject("contexts") - .field("geo", GeoHashUtils.stringEncode(1.2, 1.3)) - .endObject() - .endObject().endObject(); - indexRequestBuilders.add(client().prepareIndex(INDEX, TYPE, "" + i) - .setSource(source)); - } - indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); - } - public void testGeoFiltering() throws Exception { LinkedHashMap> map = new LinkedHashMap<>(); map.put("geo", ContextBuilder.geo("geo").build()); @@ -468,8 +385,6 @@ public void testGeoFiltering() throws Exception { .setSource(source)); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); CompletionSuggestionBuilder geoFilteringPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .contexts(Collections.singletonMap("geo", Collections.singletonList( @@ -500,8 +415,6 @@ public void testGeoBoosting() throws Exception { .setSource(source)); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); GeoQueryContext context1 = GeoQueryContext.builder().setGeoPoint(geoPoints[0]).setBoost(11).build(); GeoQueryContext context2 = GeoQueryContext.builder().setGeoPoint(geoPoints[1]).build(); @@ -572,8 +485,6 @@ public void testGeoNeighbours() throws Exception { .setSource(source)); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder prefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg"); - assertSuggestions("foo", prefix, "suggestion9", "suggestion8", "suggestion7", "suggestion6", "suggestion5"); CompletionSuggestionBuilder geoNeighbourPrefix = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg") .contexts(Collections.singletonMap("geo", Collections.singletonList(GeoQueryContext.builder().setGeoPoint(GeoPoint.fromGeohash(geohash)).build()))); @@ -668,14 +579,9 @@ public void testSkipDuplicatesWithContexts() throws Exception { expected[i] = "suggestion" + (numUnique-1-i); } indexRandom(true, indexRequestBuilders); - CompletionSuggestionBuilder completionSuggestionBuilder = - SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").skipDuplicates(true).size(numUnique); - - assertSuggestions("suggestions", completionSuggestionBuilder, expected); - Map> contextMap = new HashMap<>(); contextMap.put("cat", Arrays.asList(CategoryQueryContext.builder().setCategory("cat0").build())); - completionSuggestionBuilder = + CompletionSuggestionBuilder completionSuggestionBuilder = SuggestBuilders.completionSuggestion(FIELD).prefix("sugg").contexts(contextMap).skipDuplicates(true).size(numUnique); String[] expectedModulo = Arrays.stream(expected) diff --git a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java index 8b3aff90e8d4c..cdbc2c702d8b1 100644 --- a/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java +++ b/server/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java @@ -35,12 +35,13 @@ import org.elasticsearch.test.ESIntegTestCase.ClusterScope; import org.elasticsearch.test.ESIntegTestCase.Scope; import org.hamcrest.Matcher; -import org.joda.time.DateTime; -import org.joda.time.DateTimeZone; -import org.joda.time.format.ISODateTimeFormat; import java.io.IOException; import java.nio.charset.StandardCharsets; +import java.time.ZoneOffset; +import java.time.ZonedDateTime; +import java.time.format.DateTimeFormatter; +import java.time.temporal.ChronoUnit; import java.util.Arrays; import java.util.List; @@ -124,8 +125,9 @@ public void testExplainDateRangeInQueryString() { .put(indexSettings()) .put("index.number_of_shards", 1))); - String aMonthAgo = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).minusMonths(1)); - String aMonthFromNow = ISODateTimeFormat.yearMonthDay().print(new DateTime(DateTimeZone.UTC).plusMonths(1)); + ZonedDateTime now = ZonedDateTime.now(ZoneOffset.UTC); + String aMonthAgo = DateTimeFormatter.ISO_LOCAL_DATE.format(now.plus(1, ChronoUnit.MONTHS)); + String aMonthFromNow = DateTimeFormatter.ISO_LOCAL_DATE.format(now.minus(1, ChronoUnit.MONTHS)); client().prepareIndex("test", "type", "1").setSource("past", aMonthAgo, "future", aMonthFromNow).get(); @@ -137,10 +139,10 @@ public void testExplainDateRangeInQueryString() { assertNoFailures(response); assertThat(response.getQueryExplanation().size(), equalTo(1)); assertThat(response.getQueryExplanation().get(0).getError(), nullValue()); - DateTime twoMonthsAgo = new DateTime(DateTimeZone.UTC).minusMonths(2).withTimeAtStartOfDay(); - DateTime now = new DateTime(DateTimeZone.UTC).plusDays(1).withTimeAtStartOfDay().minusMillis(1); - assertThat(response.getQueryExplanation().get(0).getExplanation(), - equalTo("past:[" + twoMonthsAgo.getMillis() + " TO " + now.getMillis() + "]")); + + long twoMonthsAgo = now.minus(2, ChronoUnit.MONTHS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000; + long rangeEnd = (now.plus(1, ChronoUnit.DAYS).truncatedTo(ChronoUnit.DAYS).toEpochSecond() * 1000) - 1; + assertThat(response.getQueryExplanation().get(0).getExplanation(), equalTo("past:[" + twoMonthsAgo + " TO " + rangeEnd + "]")); assertThat(response.isValid(), equalTo(true)); } diff --git a/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java b/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java index bb058c5cfdbb5..a7a8a3f11b1be 100644 --- a/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java +++ b/test/framework/src/main/java/org/elasticsearch/ingest/IngestDocumentMatcher.java @@ -20,48 +20,61 @@ package org.elasticsearch.ingest; import java.util.List; -import java.util.Locale; import java.util.Map; - -import static org.hamcrest.Matchers.equalTo; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertThat; +import java.util.Objects; public class IngestDocumentMatcher { /** * Helper method to assert the equivalence between two IngestDocuments. * - * @param a first object to compare - * @param b second object to compare + * @param docA first document to compare + * @param docB second document to compare */ - public static void assertIngestDocument(Object a, Object b) { + public static void assertIngestDocument(IngestDocument docA, IngestDocument docB) { + if ((deepEquals(docA.getIngestMetadata(), docB.getIngestMetadata(), true) && + deepEquals(docA.getSourceAndMetadata(), docB.getSourceAndMetadata(), false)) == false) { + throw new AssertionError("Expected [" + docA + "] but received [" + docB + "]."); + } + } + + private static boolean deepEquals(Object a, Object b, boolean isIngestMeta) { if (a instanceof Map) { Map mapA = (Map) a; + if (b instanceof Map == false) { + return false; + } Map mapB = (Map) b; + if (mapA.size() != mapB.size()) { + return false; + } for (Map.Entry entry : mapA.entrySet()) { - if (entry.getValue() instanceof List || entry.getValue() instanceof Map) { - assertIngestDocument(entry.getValue(), mapB.get(entry.getKey())); + Object key = entry.getKey(); + // Don't compare the timestamp of ingest metadata since it will differ between executions + if ((isIngestMeta && "timestamp".equals(key)) == false + && deepEquals(entry.getValue(), mapB.get(key), false) == false) { + return false; } } + return true; } else if (a instanceof List) { List listA = (List) a; + if (b instanceof List == false) { + return false; + } List listB = (List) b; - for (int i = 0; i < listA.size(); i++) { + int countA = listA.size(); + if (countA != listB.size()) { + return false; + } + for (int i = 0; i < countA; i++) { Object value = listA.get(i); - if (value instanceof List || value instanceof Map) { - assertIngestDocument(value, listB.get(i)); + if (deepEquals(value, listB.get(i), false) == false) { + return false; } } - } else if (a instanceof byte[]) { - assertArrayEquals((byte[]) a, (byte[])b); - } else if (a instanceof IngestDocument) { - IngestDocument docA = (IngestDocument) a; - IngestDocument docB = (IngestDocument) b; - assertIngestDocument(docA.getSourceAndMetadata(), docB.getSourceAndMetadata()); - assertIngestDocument(docA.getIngestMetadata(), docB.getIngestMetadata()); + return true; } else { - String msg = String.format(Locale.ROOT, "Expected %s class to be equal to %s", a.getClass().getName(), b.getClass().getName()); - assertThat(msg, a, equalTo(b)); + return Objects.deepEquals(a, b); } } } diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java index fd5700c68a981..c740a65d28a6e 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractXContentTestCase.java @@ -54,21 +54,16 @@ public static void testFromXContent(int numberOfTestRuns, for (int runs = 0; runs < numberOfTestRuns; runs++) { T testInstance = instanceSupplier.get(); XContentType xContentType = randomFrom(XContentType.values()); - BytesReference shuffled = toShuffledXContent(testInstance, xContentType, toXContentParams,false, - createParserFunction, shuffleFieldsExceptions); - BytesReference withRandomFields; - if (supportsUnknownFields) { - // we add a few random fields to check that parser is lenient on new fields - withRandomFields = XContentTestUtils.insertRandomFields(xContentType, shuffled, randomFieldsExcludeFilter, random()); - } else { - withRandomFields = shuffled; - } - XContentParser parser = createParserFunction.apply(XContentFactory.xContent(xContentType), withRandomFields); + BytesReference shuffledContent = insertRandomFieldsAndShuffle(testInstance, xContentType, supportsUnknownFields, + shuffleFieldsExceptions, randomFieldsExcludeFilter, createParserFunction, toXContentParams); + XContentParser parser = createParserFunction.apply(XContentFactory.xContent(xContentType), shuffledContent); T parsed = parseFunction.apply(parser); assertEqualsConsumer.accept(testInstance, parsed); if (assertToXContentEquivalence) { - assertToXContentEquivalent(shuffled, XContentHelper.toXContent(parsed, xContentType, toXContentParams, false), - xContentType); + assertToXContentEquivalent( + XContentHelper.toXContent(testInstance, xContentType, toXContentParams, false), + XContentHelper.toXContent(parsed, xContentType, toXContentParams, false), + xContentType); } } } @@ -132,9 +127,26 @@ protected String[] getShuffleFieldsExceptions() { } /** - * Params that have to be provided when calling calling {@link ToXContent#toXContent(XContentBuilder, ToXContent.Params)} + * Params that have to be provided when calling {@link ToXContent#toXContent(XContentBuilder, ToXContent.Params)} */ protected ToXContent.Params getToXContentParams() { return ToXContent.EMPTY_PARAMS; } + + static BytesReference insertRandomFieldsAndShuffle(ToXContent testInstance, XContentType xContentType, + boolean supportsUnknownFields, String[] shuffleFieldsExceptions, Predicate randomFieldsExcludeFilter, + CheckedBiFunction createParserFunction, + ToXContent.Params toXContentParams) throws IOException { + BytesReference xContent = XContentHelper.toXContent(testInstance, xContentType, toXContentParams, false); + BytesReference withRandomFields; + if (supportsUnknownFields) { + // add a few random fields to check that the parser is lenient on new fields + withRandomFields = XContentTestUtils.insertRandomFields(xContentType, xContent, randomFieldsExcludeFilter, random()); + } else { + withRandomFields = xContent; + } + XContentParser parserWithRandonFields = createParserFunction.apply(XContentFactory.xContent(xContentType), withRandomFields); + return BytesReference.bytes(ESTestCase.shuffleXContent(parserWithRandonFields, false, shuffleFieldsExceptions)); + } + } diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 7d44b3230a15f..9cdfc6776f883 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -124,6 +124,7 @@ import java.nio.file.DirectoryStream; import java.nio.file.Files; import java.nio.file.Path; +import java.time.ZoneId; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -176,6 +177,7 @@ public abstract class ESTestCase extends LuceneTestCase { private static final List JODA_TIMEZONE_IDS; private static final List JAVA_TIMEZONE_IDS; + private static final List JAVA_ZONE_IDS; private static final AtomicInteger portGenerator = new AtomicInteger(); @@ -203,6 +205,10 @@ public static void resetPortCounter() { List javaTZIds = Arrays.asList(TimeZone.getAvailableIDs()); Collections.sort(javaTZIds); JAVA_TIMEZONE_IDS = Collections.unmodifiableList(javaTZIds); + + List javaZoneIds = new ArrayList<>(ZoneId.getAvailableZoneIds()); + Collections.sort(javaZoneIds); + JAVA_ZONE_IDS = Collections.unmodifiableList(javaZoneIds); } protected final Logger logger = Loggers.getLogger(getClass()); @@ -701,12 +707,19 @@ public static DateTimeZone randomDateTimeZone() { } /** - * generate a random TimeZone from the ones available in java.time + * generate a random TimeZone from the ones available in java.util */ public static TimeZone randomTimeZone() { return TimeZone.getTimeZone(randomFrom(JAVA_TIMEZONE_IDS)); } + /** + * generate a random TimeZone from the ones available in java.time + */ + public static ZoneId randomZone() { + return ZoneId.of(randomFrom(JAVA_ZONE_IDS)); + } + /** * helper to randomly perform on consumer with value */ diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java index 81a9598496bf5..937adddf3a43d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/ESRestTestCase.java @@ -21,12 +21,6 @@ import org.apache.http.Header; import org.apache.http.HttpHost; -import org.apache.http.client.methods.HttpGet; -import org.apache.http.client.methods.HttpHead; -import org.apache.http.client.methods.HttpPost; -import org.apache.http.client.methods.HttpPut; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.message.BasicHeader; import org.apache.http.nio.conn.ssl.SSLIOSessionStrategy; import org.apache.http.ssl.SSLContexts; @@ -68,16 +62,12 @@ import java.security.NoSuchAlgorithmException; import java.security.cert.CertificateException; import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.TimeUnit; -import static java.util.Collections.emptyMap; -import static java.util.Collections.singletonMap; import static java.util.Collections.sort; import static java.util.Collections.unmodifiableList; import static org.hamcrest.Matchers.anyOf; @@ -307,25 +297,25 @@ private void wipeCluster() throws IOException { * the snapshots intact in the repository. */ private void wipeSnapshots() throws IOException { - for (Map.Entry repo : entityAsMap(adminClient.performRequest("GET", "_snapshot/_all")).entrySet()) { + for (Map.Entry repo : entityAsMap(adminClient.performRequest(new Request("GET", "/_snapshot/_all"))).entrySet()) { String repoName = repo.getKey(); Map repoSpec = (Map) repo.getValue(); String repoType = (String) repoSpec.get("type"); if (false == preserveSnapshotsUponCompletion() && repoType.equals("fs")) { // All other repo types we really don't have a chance of being able to iterate properly, sadly. - String url = "_snapshot/" + repoName + "/_all"; - Map params = singletonMap("ignore_unavailable", "true"); - List snapshots = (List) entityAsMap(adminClient.performRequest("GET", url, params)).get("snapshots"); + Request listRequest = new Request("GET", "/_snapshot/" + repoName + "/_all"); + listRequest.addParameter("ignore_unavailable", "true"); + List snapshots = (List) entityAsMap(adminClient.performRequest(listRequest)).get("snapshots"); for (Object snapshot : snapshots) { Map snapshotInfo = (Map) snapshot; String name = (String) snapshotInfo.get("snapshot"); logger.debug("wiping snapshot [{}/{}]", repoName, name); - adminClient().performRequest("DELETE", "_snapshot/" + repoName + "/" + name); + adminClient().performRequest(new Request("DELETE", "/_snapshot/" + repoName + "/" + name)); } } if (preserveReposUponCompletion() == false) { logger.debug("wiping snapshot repository [{}]", repoName); - adminClient().performRequest("DELETE", "_snapshot/" + repoName); + adminClient().performRequest(new Request("DELETE", "_snapshot/" + repoName)); } } } @@ -334,7 +324,7 @@ private void wipeSnapshots() throws IOException { * Remove any cluster settings. */ private void wipeClusterSettings() throws IOException { - Map getResponse = entityAsMap(adminClient().performRequest("GET", "/_cluster/settings")); + Map getResponse = entityAsMap(adminClient().performRequest(new Request("GET", "/_cluster/settings"))); boolean mustClear = false; XContentBuilder clearCommand = JsonXContent.contentBuilder(); @@ -355,8 +345,9 @@ private void wipeClusterSettings() throws IOException { clearCommand.endObject(); if (mustClear) { - adminClient().performRequest("PUT", "/_cluster/settings", emptyMap(), new StringEntity( - Strings.toString(clearCommand), ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/_cluster/settings"); + request.setJsonEntity(Strings.toString(clearCommand)); + adminClient().performRequest(request); } } @@ -365,7 +356,7 @@ private void wipeClusterSettings() throws IOException { * other tests. */ private void logIfThereAreRunningTasks() throws InterruptedException, IOException { - Set runningTasks = runningTasks(adminClient().performRequest("GET", "_tasks")); + Set runningTasks = runningTasks(adminClient().performRequest(new Request("GET", "/_tasks"))); // Ignore the task list API - it doesn't count against us runningTasks.remove(ListTasksAction.NAME); runningTasks.remove(ListTasksAction.NAME + "[n]"); @@ -389,7 +380,7 @@ private void logIfThereAreRunningTasks() throws InterruptedException, IOExceptio private void waitForClusterStateUpdatesToFinish() throws Exception { assertBusy(() -> { try { - Response response = adminClient().performRequest("GET", "_cluster/pending_tasks"); + Response response = adminClient().performRequest(new Request("GET", "/_cluster/pending_tasks")); List tasks = (List) entityAsMap(response).get("tasks"); if (false == tasks.isEmpty()) { StringBuilder message = new StringBuilder("there are still running tasks:"); @@ -514,12 +505,12 @@ protected static void assertOK(Response response) { * @param index index to test for **/ protected static void ensureGreen(String index) throws IOException { - Map params = new HashMap<>(); - params.put("wait_for_status", "green"); - params.put("wait_for_no_relocating_shards", "true"); - params.put("timeout", "70s"); - params.put("level", "shards"); - assertOK(client().performRequest("GET", "_cluster/health/" + index, params)); + Request request = new Request("GET", "/_cluster/health/" + index); + request.addParameter("wait_for_status", "green"); + request.addParameter("wait_for_no_relocating_shards", "true"); + request.addParameter("timeout", "70s"); + request.addParameter("level", "shards"); + client().performRequest(request); } /** @@ -527,11 +518,11 @@ protected static void ensureGreen(String index) throws IOException { * in the cluster and doesn't require to know how many nodes/replica there are. */ protected static void ensureNoInitializingShards() throws IOException { - Map params = new HashMap<>(); - params.put("wait_for_no_initializing_shards", "true"); - params.put("timeout", "70s"); - params.put("level", "shards"); - assertOK(client().performRequest("GET", "_cluster/health/", params)); + Request request = new Request("GET", "/_cluster/health"); + request.addParameter("wait_for_no_initializing_shards", "true"); + request.addParameter("timeout", "70s"); + request.addParameter("level", "shards"); + client().performRequest(request); } protected static void createIndex(String name, Settings settings) throws IOException { @@ -539,9 +530,10 @@ protected static void createIndex(String name, Settings settings) throws IOExcep } protected static void createIndex(String name, Settings settings, String mapping) throws IOException { - assertOK(client().performRequest(HttpPut.METHOD_NAME, name, Collections.emptyMap(), - new StringEntity("{ \"settings\": " + Strings.toString(settings) - + ", \"mappings\" : {" + mapping + "} }", ContentType.APPLICATION_JSON))); + Request request = new Request("PUT", "/" + name); + request.setJsonEntity("{\n \"settings\": " + Strings.toString(settings) + + ", \"mappings\" : {" + mapping + "} }"); + client().performRequest(request); } protected static void updateIndexSettings(String index, Settings.Builder settings) throws IOException { @@ -549,42 +541,42 @@ protected static void updateIndexSettings(String index, Settings.Builder setting } private static void updateIndexSettings(String index, Settings settings) throws IOException { - assertOK(client().performRequest("PUT", index + "/_settings", Collections.emptyMap(), - new StringEntity(Strings.toString(settings), ContentType.APPLICATION_JSON))); + Request request = new Request("PUT", "/" + index + "/_settings"); + request.setJsonEntity(Strings.toString(settings)); + client().performRequest(request); } protected static Map getIndexSettings(String index) throws IOException { - Map params = new HashMap<>(); - params.put("flat_settings", "true"); - Response response = client().performRequest(HttpGet.METHOD_NAME, index + "/_settings", params); - assertOK(response); + Request request = new Request("GET", "/" + index + "/_settings"); + request.addParameter("flat_settings", "true"); + Response response = client().performRequest(request); try (InputStream is = response.getEntity().getContent()) { return XContentHelper.convertToMap(XContentType.JSON.xContent(), is, true); } } protected static boolean indexExists(String index) throws IOException { - Response response = client().performRequest(HttpHead.METHOD_NAME, index); + Response response = client().performRequest(new Request("HEAD", "/" + index)); return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); } protected static void closeIndex(String index) throws IOException { - Response response = client().performRequest(HttpPost.METHOD_NAME, index + "/_close"); + Response response = client().performRequest(new Request("POST", "/" + index + "/_close")); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); } protected static void openIndex(String index) throws IOException { - Response response = client().performRequest(HttpPost.METHOD_NAME, index + "/_open"); + Response response = client().performRequest(new Request("POST", "/" + index + "/_open")); assertThat(response.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); } protected static boolean aliasExists(String alias) throws IOException { - Response response = client().performRequest(HttpHead.METHOD_NAME, "/_alias/" + alias); + Response response = client().performRequest(new Request("HEAD", "/_alias/" + alias)); return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); } protected static boolean aliasExists(String index, String alias) throws IOException { - Response response = client().performRequest(HttpHead.METHOD_NAME, "/" + index + "/_alias/" + alias); + Response response = client().performRequest(new Request("HEAD", "/" + index + "/_alias/" + alias)); return RestStatus.OK.getStatus() == response.getStatusLine().getStatusCode(); } @@ -602,7 +594,7 @@ protected static Map getAlias(final String index, final String a } protected static Map getAsMap(final String endpoint) throws IOException { - Response response = client().performRequest(HttpGet.METHOD_NAME, endpoint); + Response response = client().performRequest(new Request("GET", endpoint)); XContentType entityContentType = XContentType.fromMediaTypeOrFormat(response.getEntity().getContentType().getValue()); Map responseEntity = XContentHelper.convertToMap(entityContentType.xContent(), response.getEntity().getContent(), false); diff --git a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java index 69f4e0666eaa3..b97b4e8f6dace 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/rest/yaml/ESClientYamlSuiteTestCase.java @@ -47,7 +47,6 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.ArrayList; -import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; @@ -282,7 +281,9 @@ private static void validateSpec(ClientYamlSuiteRestSpec restSpec) { private static Tuple readVersionsFromCatNodes(RestClient restClient) throws IOException { // we simply go to the _cat/nodes API and parse all versions in the cluster - Response response = restClient.performRequest("GET", "/_cat/nodes", Collections.singletonMap("h", "version,master")); + Request request = new Request("GET", "/_cat/nodes"); + request.addParameter("h", "version,master"); + Response response = restClient.performRequest(request); ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); String nodesCatResponse = restTestResponse.getBodyAsString(); String[] split = nodesCatResponse.split("\n"); @@ -310,7 +311,7 @@ private static Version readVersionsFromInfo(RestClient restClient, int numHosts) Version version = null; for (int i = 0; i < numHosts; i++) { //we don't really use the urls here, we rely on the client doing round-robin to touch all the nodes in the cluster - Response response = restClient.performRequest("GET", "/"); + Response response = restClient.performRequest(new Request("GET", "/")); ClientYamlTestResponse restTestResponse = new ClientYamlTestResponse(response); Object latestVersion = restTestResponse.evaluate("version.number"); if (latestVersion == null) { diff --git a/test/framework/src/test/java/org/elasticsearch/ingest/IngestDocumentMatcherTests.java b/test/framework/src/test/java/org/elasticsearch/ingest/IngestDocumentMatcherTests.java new file mode 100644 index 0000000000000..bff9a923b9f75 --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/ingest/IngestDocumentMatcherTests.java @@ -0,0 +1,82 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.ingest; + +import org.elasticsearch.test.ESTestCase; + +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.ingest.IngestDocumentMatcher.assertIngestDocument; + +public class IngestDocumentMatcherTests extends ESTestCase { + + public void testDifferentMapData() { + Map sourceAndMetadata1 = new HashMap<>(); + sourceAndMetadata1.put("foo", "bar"); + IngestDocument document1 = new IngestDocument(sourceAndMetadata1, new HashMap<>()); + IngestDocument document2 = new IngestDocument(new HashMap<>(), new HashMap<>()); + assertThrowsOnComparision(document1, document2); + } + + public void testDifferentLengthListData() { + String rootKey = "foo"; + IngestDocument document1 = + new IngestDocument(Collections.singletonMap(rootKey, Arrays.asList("bar", "baz")), new HashMap<>()); + IngestDocument document2 = + new IngestDocument(Collections.singletonMap(rootKey, Collections.emptyList()), new HashMap<>()); + assertThrowsOnComparision(document1, document2); + } + + public void testDifferentNestedListFieldData() { + String rootKey = "foo"; + IngestDocument document1 = + new IngestDocument(Collections.singletonMap(rootKey, Arrays.asList("bar", "baz")), new HashMap<>()); + IngestDocument document2 = + new IngestDocument(Collections.singletonMap(rootKey, Arrays.asList("bar", "blub")), new HashMap<>()); + assertThrowsOnComparision(document1, document2); + } + + public void testDifferentNestedMapFieldData() { + String rootKey = "foo"; + IngestDocument document1 = + new IngestDocument(Collections.singletonMap(rootKey, Collections.singletonMap("bar", "baz")), new HashMap<>()); + IngestDocument document2 = + new IngestDocument(Collections.singletonMap(rootKey, Collections.singletonMap("bar", "blub")), new HashMap<>()); + assertThrowsOnComparision(document1, document2); + } + + public void testOnTypeConflict() { + String rootKey = "foo"; + IngestDocument document1 = + new IngestDocument(Collections.singletonMap(rootKey, Collections.singletonList("baz")), new HashMap<>()); + IngestDocument document2 = new IngestDocument( + Collections.singletonMap(rootKey, Collections.singletonMap("blub", "blab")), new HashMap<>() + ); + assertThrowsOnComparision(document1, document2); + } + + private static void assertThrowsOnComparision(IngestDocument document1, IngestDocument document2) { + expectThrows(AssertionError.class, () -> assertIngestDocument(document1, document2)); + expectThrows(AssertionError.class, () -> assertIngestDocument(document2, document1)); + } +} diff --git a/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java new file mode 100644 index 0000000000000..165df584c030c --- /dev/null +++ b/test/framework/src/test/java/org/elasticsearch/test/AbstractXContentTestCaseTests.java @@ -0,0 +1,65 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.test; + +import com.carrotsearch.randomizedtesting.RandomizedContext; + +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; + +import java.io.IOException; +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; + +public class AbstractXContentTestCaseTests extends ESTestCase { + + public void testInsertRandomFieldsAndShuffle() throws Exception { + TestInstance t = new TestInstance(); + BytesReference insertRandomFieldsAndShuffle = RandomizedContext.current().runWithPrivateRandomness(1, + () -> AbstractXContentTestCase.insertRandomFieldsAndShuffle(t, XContentType.JSON, true, new String[] {}, null, + this::createParser, ToXContent.EMPTY_PARAMS)); + try (XContentParser parser = createParser(XContentType.JSON.xContent(), insertRandomFieldsAndShuffle)) { + Map mapOrdered = parser.mapOrdered(); + assertThat(mapOrdered.size(), equalTo(2)); + assertThat(mapOrdered.keySet().iterator().next(), not(equalTo("field"))); + } + } + + private class TestInstance implements ToXContentObject { + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + builder.field("field", 1); + } + builder.endObject(); + return builder; + } + + } + +} \ No newline at end of file diff --git a/x-pack/build.gradle b/x-pack/build.gradle index 78e3f4ffb34a6..c2226b5463dec 100644 --- a/x-pack/build.gradle +++ b/x-pack/build.gradle @@ -12,16 +12,23 @@ subprojects { // helper method to find the path to a module ext.xpackModule = { String moduleName -> xpackProject("plugin:${moduleName}").path } - ext.licenseName = 'Elastic License' - ext.licenseUrl = ext.elasticLicenseUrl - - project.ext.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt') - project.ext.noticeFile = xpackRootProject.file('NOTICE.txt') - plugins.withType(PluginBuildPlugin).whenPluginAdded { project.esplugin.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt') project.esplugin.noticeFile = xpackRootProject.file('NOTICE.txt') } + + if (project.name != 'protocol') { + tasks.withType(LicenseHeadersTask.class) { + approvedLicenses = ['Elastic License', 'Generated'] + additionalLicense 'ELAST', 'Elastic License', 'Licensed under the Elastic License' + } + + ext.licenseName = 'Elastic License' + ext.licenseUrl = ext.elasticLicenseUrl + + project.ext.licenseFile = rootProject.file('licenses/ELASTIC-LICENSE.txt') + project.ext.noticeFile = xpackRootProject.file('NOTICE.txt') + } } File checkstyleSuppressions = file('dev-tools/checkstyle_suppressions.xml') @@ -34,10 +41,6 @@ subprojects { ] } - tasks.withType(LicenseHeadersTask.class) { - approvedLicenses = ['Elastic License', 'Generated'] - additionalLicense 'ELAST', 'Elastic License', 'Licensed under the Elastic License' - } ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-core:${version}": xpackModule('core')] ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-deprecation:${version}": xpackModule('deprecation')] ext.projectSubstitutions += [ "org.elasticsearch.plugin:x-pack-graph:${version}": xpackModule('graph')] diff --git a/x-pack/docs/en/rest-api/security/roles.asciidoc b/x-pack/docs/en/rest-api/security/roles.asciidoc index d82c260006237..b7b2260a0e426 100644 --- a/x-pack/docs/en/rest-api/security/roles.asciidoc +++ b/x-pack/docs/en/rest-api/security/roles.asciidoc @@ -11,6 +11,8 @@ realm. `GET /_xpack/security/role/` + +`DELETE /_xpack/security/role/` + + `POST /_xpack/security/role//_clear_cache` + `POST /_xpack/security/role/` + diff --git a/x-pack/docs/en/rollup/api-quickref.asciidoc b/x-pack/docs/en/rollup/api-quickref.asciidoc index 10aed1b572d38..937c6a84e5e14 100644 --- a/x-pack/docs/en/rollup/api-quickref.asciidoc +++ b/x-pack/docs/en/rollup/api-quickref.asciidoc @@ -19,7 +19,7 @@ Most {rollup} endpoints have the following base: * {ref}/rollup-get-job.html[GET /job]: List jobs * {ref}/rollup-get-job.html[GET /job/+++]: Get job details * {ref}/rollup-start-job.html[POST /job//_start]: Start a job -* {ref}/rollup-stop-job.html[POST /job/+++]: Stop a job +* {ref}/rollup-stop-job.html[POST /job//_stop]: Stop a job * {ref}/rollup-delete-job.html[DELETE /job/+++]: Delete a job [float] diff --git a/x-pack/plugin/core/build.gradle b/x-pack/plugin/core/build.gradle index 4bb0e0ffc031b..5db149bc6774e 100644 --- a/x-pack/plugin/core/build.gradle +++ b/x-pack/plugin/core/build.gradle @@ -25,6 +25,7 @@ dependencyLicenses { dependencies { compileOnly "org.elasticsearch:elasticsearch:${version}" + compile project(':x-pack:protocol') compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" compile "org.apache.httpcomponents:httpcore-nio:${versions.httpcore}" diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java index 144eec96858c6..b2130ac9f4b81 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/License.java @@ -29,6 +29,7 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; /** * Data structure for license. Use {@link Builder} to build a license. @@ -267,14 +268,14 @@ public synchronized void removeOperationModeFileWatcher() { /** * @return the current license's status */ - public Status status() { + public LicenseStatus status() { long now = System.currentTimeMillis(); if (issueDate > now) { - return Status.INVALID; + return LicenseStatus.INVALID; } else if (expiryDate < now) { - return Status.EXPIRED; + return LicenseStatus.EXPIRED; } - return Status.ACTIVE; + return LicenseStatus.ACTIVE; } private void validate() { @@ -767,41 +768,6 @@ public Builder validate() { } } - public enum Status { - - ACTIVE("active"), - INVALID("invalid"), - EXPIRED("expired"); - - private final String label; - - Status(String label) { - this.label = label; - } - - public String label() { - return label; - } - - public void writeTo(StreamOutput out) throws IOException { - out.writeString(label); - } - - public static Status readFrom(StreamInput in) throws IOException { - String value = in.readString(); - switch (value) { - case "active": - return ACTIVE; - case "invalid": - return INVALID; - case "expired": - return EXPIRED; - default: - throw new IllegalArgumentException("unknown license status [" + value + "]"); - } - } - } - /** * Returns true iff the license is a production licnese */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java index 40c694cedb764..a39e9f412d767 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/LicenseService.java @@ -27,6 +27,7 @@ import org.elasticsearch.discovery.DiscoveryModule; import org.elasticsearch.env.Environment; import org.elasticsearch.gateway.GatewayService; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.watcher.ResourceWatcherService; import org.elasticsearch.xpack.core.XPackPlugin; import org.elasticsearch.xpack.core.XPackSettings; @@ -72,7 +73,8 @@ public class LicenseService extends AbstractLifecycleComponent implements Cluste */ static final TimeValue GRACE_PERIOD_DURATION = days(7); - public static final long BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS = Long.MAX_VALUE - days(365).millis(); + public static final long BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS = + XPackInfoResponse.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS; private final ClusterService clusterService; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackInfoResponse.java deleted file mode 100644 index 4d5c90ada4960..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackInfoResponse.java +++ /dev/null @@ -1,298 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.license; - -import org.elasticsearch.Version; -import org.elasticsearch.action.ActionResponse; -import org.elasticsearch.common.Nullable; -import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.io.stream.StreamOutput; -import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.xcontent.ToXContentObject; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.xpack.core.XPackBuild; - -import java.io.IOException; -import java.util.ArrayList; -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Locale; -import java.util.Map; -import java.util.Set; -import java.util.stream.Collectors; - -public class XPackInfoResponse extends ActionResponse { - - @Nullable private BuildInfo buildInfo; - @Nullable private LicenseInfo licenseInfo; - @Nullable private FeatureSetsInfo featureSetsInfo; - - public XPackInfoResponse() {} - - public XPackInfoResponse(@Nullable BuildInfo buildInfo, @Nullable LicenseInfo licenseInfo, @Nullable FeatureSetsInfo featureSetsInfo) { - this.buildInfo = buildInfo; - this.licenseInfo = licenseInfo; - this.featureSetsInfo = featureSetsInfo; - } - - /** - * @return The build info (incl. build hash and timestamp) - */ - public BuildInfo getBuildInfo() { - return buildInfo; - } - - /** - * @return The current license info (incl. UID, type/mode. status and expiry date). May return {@code null} when no - * license is currently installed. - */ - public LicenseInfo getLicenseInfo() { - return licenseInfo; - } - - /** - * @return The current status of the feature sets in X-Pack. Feature sets describe the features available/enabled in X-Pack. - */ - public FeatureSetsInfo getFeatureSetsInfo() { - return featureSetsInfo; - } - - @Override - public void writeTo(StreamOutput out) throws IOException { - super.writeTo(out); - out.writeOptionalWriteable(buildInfo); - out.writeOptionalWriteable(licenseInfo); - out.writeOptionalWriteable(featureSetsInfo); - } - - @Override - public void readFrom(StreamInput in) throws IOException { - this.buildInfo = in.readOptionalWriteable(BuildInfo::new); - this.licenseInfo = in.readOptionalWriteable(LicenseInfo::new); - this.featureSetsInfo = in.readOptionalWriteable(FeatureSetsInfo::new); - } - - public static class LicenseInfo implements ToXContentObject, Writeable { - - private final String uid; - private final String type; - private final String mode; - private final long expiryDate; - private final License.Status status; - - public LicenseInfo(License license) { - this(license.uid(), license.type(), license.operationMode().name().toLowerCase(Locale.ROOT), - license.status(), license.expiryDate()); - } - - public LicenseInfo(StreamInput in) throws IOException { - this(in.readString(), in.readString(), in.readString(), License.Status.readFrom(in), in.readLong()); - } - - public LicenseInfo(String uid, String type, String mode, License.Status status, long expiryDate) { - this.uid = uid; - this.type = type; - this.mode = mode; - this.status = status; - this.expiryDate = expiryDate; - } - - public String getUid() { - return uid; - } - - public String getType() { - return type; - } - - public String getMode() { - return mode; - } - - public long getExpiryDate() { - return expiryDate; - } - - public License.Status getStatus() { - return status; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject() - .field("uid", uid) - .field("type", type) - .field("mode", mode) - .field("status", status.label()); - if (expiryDate != LicenseService.BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) { - builder.timeField("expiry_date_in_millis", "expiry_date", expiryDate); - } - return builder.endObject(); - } - - public void writeTo(StreamOutput out) throws IOException { - out.writeString(uid); - out.writeString(type); - out.writeString(mode); - status.writeTo(out); - out.writeLong(expiryDate); - } - } - - public static class BuildInfo implements ToXContentObject, Writeable { - - private final String hash; - private final String timestamp; - - public BuildInfo(XPackBuild build) { - this(build.shortHash(), build.date()); - } - - public BuildInfo(StreamInput input) throws IOException { - this(input.readString(), input.readString()); - } - - public BuildInfo(String hash, String timestamp) { - this.hash = hash; - this.timestamp = timestamp; - } - - public String getHash() { - return hash; - } - - public String getTimestamp() { - return timestamp; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - return builder.startObject() - .field("hash", hash) - .field("date", timestamp) - .endObject(); - } - - public void writeTo(StreamOutput output) throws IOException { - output.writeString(hash); - output.writeString(timestamp); - } - } - - public static class FeatureSetsInfo implements ToXContentObject, Writeable { - - private final Map featureSets; - - public FeatureSetsInfo(StreamInput in) throws IOException { - int size = in.readVInt(); - Map featureSets = new HashMap<>(size); - for (int i = 0; i < size; i++) { - FeatureSet featureSet = new FeatureSet(in); - featureSets.put(featureSet.name, featureSet); - } - this.featureSets = Collections.unmodifiableMap(featureSets); - } - - public FeatureSetsInfo(Set featureSets) { - Map map = new HashMap<>(featureSets.size()); - for (FeatureSet featureSet : featureSets) { - map.put(featureSet.name, featureSet); - } - this.featureSets = Collections.unmodifiableMap(map); - } - - public Map getFeatureSets() { - return featureSets; - } - - @Override - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - List names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().collect(Collectors.toList()); - for (String name : names) { - builder.field(name, featureSets.get(name), params); - } - return builder.endObject(); - } - - public void writeTo(StreamOutput out) throws IOException { - out.writeVInt(featureSets.size()); - for (FeatureSet featureSet : featureSets.values()) { - featureSet.writeTo(out); - } - } - - public static class FeatureSet implements ToXContentObject, Writeable { - - private final String name; - @Nullable private final String description; - private final boolean available; - private final boolean enabled; - @Nullable private final Map nativeCodeInfo; - - public FeatureSet(StreamInput in) throws IOException { - this(in.readString(), in.readOptionalString(), in.readBoolean(), in.readBoolean(), - in.getVersion().onOrAfter(Version.V_5_4_0) ? in.readMap() : null); - } - - public FeatureSet(String name, @Nullable String description, boolean available, boolean enabled, - @Nullable Map nativeCodeInfo) { - this.name = name; - this.description = description; - this.available = available; - this.enabled = enabled; - this.nativeCodeInfo = nativeCodeInfo; - } - - public String name() { - return name; - } - - @Nullable - public String description() { - return description; - } - - public boolean available() { - return available; - } - - public boolean enabled() { - return enabled; - } - - @Nullable - public Map nativeCodeInfo() { - return nativeCodeInfo; - } - - public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { - builder.startObject(); - if (description != null) { - builder.field("description", description); - } - builder.field("available", available); - builder.field("enabled", enabled); - if (nativeCodeInfo != null) { - builder.field("native_code_info", nativeCodeInfo); - } - return builder.endObject(); - } - - public void writeTo(StreamOutput out) throws IOException { - out.writeString(name); - out.writeOptionalString(description); - out.writeBoolean(available); - out.writeBoolean(enabled); - if (out.getVersion().onOrAfter(Version.V_5_4_0)) { - out.writeMap(nativeCodeInfo); - } - } - } - - } -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java index e58c5eda06316..ea30e30ae3cd9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/license/XPackLicenseState.java @@ -52,6 +52,9 @@ public class XPackLicenseState { messages.put(XPackField.LOGSTASH, new String[] { "Logstash will continue to poll centrally-managed pipelines" }); + messages.put(XPackField.BEATS, new String[] { + "Beats will continue to poll centrally-managed configuration" + }); messages.put(XPackField.DEPRECATION, new String[] { "Deprecation APIs are disabled" }); @@ -81,6 +84,7 @@ public class XPackLicenseState { messages.put(XPackField.GRAPH, XPackLicenseState::graphAcknowledgementMessages); messages.put(XPackField.MACHINE_LEARNING, XPackLicenseState::machineLearningAcknowledgementMessages); messages.put(XPackField.LOGSTASH, XPackLicenseState::logstashAcknowledgementMessages); + messages.put(XPackField.BEATS, XPackLicenseState::beatsAcknowledgementMessages); messages.put(XPackField.SQL, XPackLicenseState::sqlAcknowledgementMessages); ACKNOWLEDGMENT_MESSAGES = Collections.unmodifiableMap(messages); } @@ -205,12 +209,19 @@ private static String[] machineLearningAcknowledgementMessages(OperationMode cur private static String[] logstashAcknowledgementMessages(OperationMode currentMode, OperationMode newMode) { switch (newMode) { case BASIC: - switch (currentMode) { - case TRIAL: - case STANDARD: - case GOLD: - case PLATINUM: - return new String[] { "Logstash will no longer poll for centrally-managed pipelines" }; + if (isBasic(currentMode) == false) { + return new String[] { "Logstash will no longer poll for centrally-managed pipelines" }; + } + break; + } + return Strings.EMPTY_ARRAY; + } + + private static String[] beatsAcknowledgementMessages(OperationMode currentMode, OperationMode newMode) { + switch (newMode) { + case BASIC: + if (isBasic(currentMode) == false) { + return new String[] { "Beats will no longer be able to use centrally-managed configuration" }; } break; } @@ -232,6 +243,10 @@ private static String[] sqlAcknowledgementMessages(OperationMode currentMode, Op return Strings.EMPTY_ARRAY; } + private static boolean isBasic(OperationMode mode) { + return mode == OperationMode.BASIC; + } + /** A wrapper for the license mode and state, to allow atomically swapping. */ private static class Status { @@ -500,20 +515,17 @@ public boolean isRollupAllowed() { */ public boolean isLogstashAllowed() { Status localStatus = status; + return localStatus.active && (isBasic(localStatus.mode) == false); + } - if (localStatus.active == false) { - return false; - } + /** + * Beats is allowed as long as there is an active license of type TRIAL, STANDARD, GOLD or PLATINUM + * @return {@code true} as long as there is a valid license + */ + public boolean isBeatsAllowed() { + Status localStatus = status; + return localStatus.active && (isBasic(localStatus.mode) == false); - switch (localStatus.mode) { - case TRIAL: - case GOLD: - case PLATINUM: - case STANDARD: - return true; - default: - return false; - } } /** diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClient.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClient.java index 588a9c0543379..77f511ba4d0ef 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClient.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClient.java @@ -9,9 +9,9 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.license.LicensingClient; -import org.elasticsearch.license.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; import org.elasticsearch.xpack.core.action.XPackInfoAction; -import org.elasticsearch.xpack.core.action.XPackInfoRequest; import org.elasticsearch.xpack.core.action.XPackInfoRequestBuilder; import org.elasticsearch.xpack.core.ml.client.MachineLearningClient; import org.elasticsearch.xpack.core.monitoring.client.MonitoringClient; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java index d3ddac3289999..aa60456d8052a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackClientPlugin.java @@ -42,6 +42,7 @@ import org.elasticsearch.xpack.core.graph.GraphFeatureSetUsage; import org.elasticsearch.xpack.core.graph.action.GraphExploreAction; import org.elasticsearch.xpack.core.logstash.LogstashFeatureSetUsage; +import org.elasticsearch.xpack.core.beats.BeatsFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MachineLearningFeatureSetUsage; import org.elasticsearch.xpack.core.ml.MlMetadata; import org.elasticsearch.xpack.core.ml.action.CloseJobAction; @@ -320,6 +321,8 @@ public List getNamedWriteables() { new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.GRAPH, GraphFeatureSetUsage::new), // logstash new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.LOGSTASH, LogstashFeatureSetUsage::new), + // beats + new NamedWriteableRegistry.Entry(XPackFeatureSet.Usage.class, XPackField.BEATS, BeatsFeatureSetUsage::new), // ML - Custom metadata new NamedWriteableRegistry.Entry(MetaData.Custom.class, "ml", MlMetadata::new), new NamedWriteableRegistry.Entry(NamedDiff.class, "ml", MlMetadata.MlMetadataDiff::new), diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java index dd482c4e22d78..70eb047c8edef 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackField.java @@ -19,6 +19,8 @@ public final class XPackField { public static final String MACHINE_LEARNING = "ml"; /** Name constant for the Logstash feature. */ public static final String LOGSTASH = "logstash"; + /** Name constant for the Beats feature. */ + public static final String BEATS = "beats"; /** Name constant for the Deprecation API feature. */ public static final String DEPRECATION = "deprecation"; /** Name constant for the upgrade feature. */ diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java index b0d0c4f2c2eea..8c4c5e2c7600b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/XPackSettings.java @@ -67,6 +67,10 @@ private XPackSettings() { public static final Setting LOGSTASH_ENABLED = Setting.boolSetting("xpack.logstash.enabled", true, Setting.Property.NodeScope); + /** Setting for enabling or disabling Beats extensions. Defaults to true. */ + public static final Setting BEATS_ENABLED = Setting.boolSetting("xpack.beats.enabled", true, + Setting.Property.NodeScope); + /** Setting for enabling or disabling TLS. Defaults to false. */ public static final Setting TRANSPORT_SSL_ENABLED = Setting.boolSetting("xpack.security.transport.ssl.enabled", false, Property.NodeScope); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java index b149fa300832b..24ebf0530deab 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackInfoAction.java @@ -12,14 +12,16 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; -import org.elasticsearch.license.XPackInfoResponse; -import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; -import org.elasticsearch.license.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; import org.elasticsearch.tasks.Task; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackBuild; import org.elasticsearch.xpack.core.XPackFeatureSet; +import java.util.Locale; import java.util.Set; import java.util.stream.Collectors; @@ -43,14 +45,15 @@ protected void doExecute(Task task, XPackInfoRequest request, ActionListener { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequestBuilder.java index 1d888325c3f2d..a21899d7362b4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequestBuilder.java @@ -7,7 +7,8 @@ import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; -import org.elasticsearch.license.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; import java.util.EnumSet; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/beats/BeatsFeatureSetUsage.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/beats/BeatsFeatureSetUsage.java new file mode 100644 index 0000000000000..1702bf3869df6 --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/beats/BeatsFeatureSetUsage.java @@ -0,0 +1,24 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.beats; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.xpack.core.XPackFeatureSet; +import org.elasticsearch.xpack.core.XPackField; + +import java.io.IOException; + +public final class BeatsFeatureSetUsage extends XPackFeatureSet.Usage { + + public BeatsFeatureSetUsage(StreamInput in) throws IOException { + super(in); + } + + public BeatsFeatureSetUsage(boolean available, boolean enabled) { + super(XPackField.BEATS, available, enabled); + } + +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index d06c911e13cfe..e9ac704171bef 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -156,14 +156,14 @@ private DatafeedConfig(String id, String jobId, TimeValue queryDelay, TimeValue this.jobId = jobId; this.queryDelay = queryDelay; this.frequency = frequency; - this.indices = indices; - this.types = types; + this.indices = indices == null ? null : Collections.unmodifiableList(indices); + this.types = types == null ? null : Collections.unmodifiableList(types); this.query = query; this.aggregations = aggregations; - this.scriptFields = scriptFields; + this.scriptFields = scriptFields == null ? null : Collections.unmodifiableList(scriptFields); this.scrollSize = scrollSize; this.chunkingConfig = chunkingConfig; - this.headers = Objects.requireNonNull(headers); + this.headers = Collections.unmodifiableMap(headers); } public DatafeedConfig(StreamInput in) throws IOException { @@ -172,19 +172,19 @@ public DatafeedConfig(StreamInput in) throws IOException { this.queryDelay = in.readOptionalTimeValue(); this.frequency = in.readOptionalTimeValue(); if (in.readBoolean()) { - this.indices = in.readList(StreamInput::readString); + this.indices = Collections.unmodifiableList(in.readList(StreamInput::readString)); } else { this.indices = null; } if (in.readBoolean()) { - this.types = in.readList(StreamInput::readString); + this.types = Collections.unmodifiableList(in.readList(StreamInput::readString)); } else { this.types = null; } this.query = in.readNamedWriteable(QueryBuilder.class); this.aggregations = in.readOptionalWriteable(AggregatorFactories.Builder::new); if (in.readBoolean()) { - this.scriptFields = in.readList(SearchSourceBuilder.ScriptField::new); + this.scriptFields = Collections.unmodifiableList(in.readList(SearchSourceBuilder.ScriptField::new)); } else { this.scriptFields = null; } @@ -195,7 +195,7 @@ public DatafeedConfig(StreamInput in) throws IOException { } this.chunkingConfig = in.readOptionalWriteable(ChunkingConfig::new); if (in.getVersion().onOrAfter(Version.V_6_2_0)) { - this.headers = in.readMap(StreamInput::readString, StreamInput::readString); + this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); } else { this.headers = Collections.emptyMap(); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java index 27498bd1549ee..5d8fd3ffc71e9 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java @@ -352,6 +352,18 @@ public String toString() { return Strings.toString(this); } + boolean isNoop(DatafeedConfig datafeed) { + return (frequency == null || Objects.equals(frequency, datafeed.getFrequency())) + && (queryDelay == null || Objects.equals(queryDelay, datafeed.getQueryDelay())) + && (indices == null || Objects.equals(indices, datafeed.getIndices())) + && (types == null || Objects.equals(types, datafeed.getTypes())) + && (query == null || Objects.equals(query, datafeed.getQuery())) + && (scrollSize == null || Objects.equals(scrollSize, datafeed.getQueryDelay())) + && (aggregations == null || Objects.equals(aggregations, datafeed.getAggregations())) + && (scriptFields == null || Objects.equals(scriptFields, datafeed.getScriptFields())) + && (chunkingConfig == null || Objects.equals(chunkingConfig, datafeed.getChunkingConfig())); + } + public static class Builder { private String id; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java index 02d8b6f529327..0c702e5afb0bb 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/AnalysisConfig.java @@ -144,20 +144,20 @@ private AnalysisConfig(TimeValue bucketSpan, String categorizationFieldName, Lis this.latency = latency; this.categorizationFieldName = categorizationFieldName; this.categorizationAnalyzerConfig = categorizationAnalyzerConfig; - this.categorizationFilters = categorizationFilters; + this.categorizationFilters = categorizationFilters == null ? null : Collections.unmodifiableList(categorizationFilters); this.summaryCountFieldName = summaryCountFieldName; - this.influencers = influencers; + this.influencers = Collections.unmodifiableList(influencers); this.overlappingBuckets = overlappingBuckets; this.resultFinalizationWindow = resultFinalizationWindow; this.multivariateByFields = multivariateByFields; - this.multipleBucketSpans = multipleBucketSpans; + this.multipleBucketSpans = multipleBucketSpans == null ? null : Collections.unmodifiableList(multipleBucketSpans); this.usePerPartitionNormalization = usePerPartitionNormalization; } public AnalysisConfig(StreamInput in) throws IOException { bucketSpan = in.readTimeValue(); categorizationFieldName = in.readOptionalString(); - categorizationFilters = in.readBoolean() ? in.readList(StreamInput::readString) : null; + categorizationFilters = in.readBoolean() ? Collections.unmodifiableList(in.readList(StreamInput::readString)) : null; if (in.getVersion().onOrAfter(Version.V_6_2_0)) { categorizationAnalyzerConfig = in.readOptionalWriteable(CategorizationAnalyzerConfig::new); } else { @@ -165,8 +165,8 @@ public AnalysisConfig(StreamInput in) throws IOException { } latency = in.readOptionalTimeValue(); summaryCountFieldName = in.readOptionalString(); - detectors = in.readList(Detector::new); - influencers = in.readList(StreamInput::readString); + detectors = Collections.unmodifiableList(in.readList(Detector::new)); + influencers = Collections.unmodifiableList(in.readList(StreamInput::readString)); overlappingBuckets = in.readOptionalBoolean(); resultFinalizationWindow = in.readOptionalLong(); multivariateByFields = in.readOptionalBoolean(); @@ -176,7 +176,7 @@ public AnalysisConfig(StreamInput in) throws IOException { for (int i = 0; i < arraySize; i++) { spans.add(in.readTimeValue()); } - multipleBucketSpans = spans; + multipleBucketSpans = Collections.unmodifiableList(spans); } else { multipleBucketSpans = null; } @@ -487,18 +487,20 @@ public Builder(List detectors) { } public Builder(AnalysisConfig analysisConfig) { - this.detectors = analysisConfig.detectors; + this.detectors = new ArrayList<>(analysisConfig.detectors); this.bucketSpan = analysisConfig.bucketSpan; this.latency = analysisConfig.latency; this.categorizationFieldName = analysisConfig.categorizationFieldName; - this.categorizationFilters = analysisConfig.categorizationFilters; + this.categorizationFilters = analysisConfig.categorizationFilters == null ? null + : new ArrayList<>(analysisConfig.categorizationFilters); this.categorizationAnalyzerConfig = analysisConfig.categorizationAnalyzerConfig; this.summaryCountFieldName = analysisConfig.summaryCountFieldName; - this.influencers = analysisConfig.influencers; + this.influencers = new ArrayList<>(analysisConfig.influencers); this.overlappingBuckets = analysisConfig.overlappingBuckets; this.resultFinalizationWindow = analysisConfig.resultFinalizationWindow; this.multivariateByFields = analysisConfig.multivariateByFields; - this.multipleBucketSpans = analysisConfig.multipleBucketSpans; + this.multipleBucketSpans = analysisConfig.multipleBucketSpans == null ? null + : new ArrayList<>(analysisConfig.multipleBucketSpans); this.usePerPartitionNormalization = analysisConfig.usePerPartitionNormalization; } @@ -518,6 +520,10 @@ public void setDetectors(List detectors) { this.detectors = sequentialIndexDetectors; } + public void setDetector(int detectorIndex, Detector detector) { + detectors.set(detectorIndex, detector); + } + public void setBucketSpan(TimeValue bucketSpan) { this.bucketSpan = bucketSpan; } @@ -543,7 +549,7 @@ public void setSummaryCountFieldName(String summaryCountFieldName) { } public void setInfluencers(List influencers) { - this.influencers = influencers; + this.influencers = ExceptionsHelper.requireNonNull(influencers, INFLUENCERS.getPreferredName()); } public void setOverlappingBuckets(Boolean overlappingBuckets) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java index 9ff578be50b85..6e9652bdfa263 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/DataDescription.java @@ -353,7 +353,8 @@ public void setTimeFormat(String format) { try { DateTimeFormatterTimestampConverter.ofPattern(format, ZoneOffset.UTC); } catch (IllegalArgumentException e) { - throw ExceptionsHelper.badRequestException(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format)); + throw ExceptionsHelper.badRequestException( + Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, format), e.getCause()); } } timeFormat = format; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java index bae5e654ba4fa..dc4b55d73a59c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java @@ -252,7 +252,7 @@ public Detector(StreamInput in) throws IOException { partitionFieldName = in.readOptionalString(); useNull = in.readBoolean(); excludeFrequent = in.readBoolean() ? ExcludeFrequent.readFromStream(in) : null; - rules = in.readList(DetectionRule::new); + rules = Collections.unmodifiableList(in.readList(DetectionRule::new)); if (in.getVersion().onOrAfter(Version.V_5_5_0)) { detectorIndex = in.readInt(); } else { @@ -508,7 +508,7 @@ public Builder(Detector detector) { partitionFieldName = detector.partitionFieldName; useNull = detector.useNull; excludeFrequent = detector.excludeFrequent; - rules = new ArrayList<>(detector.getRules()); + rules = new ArrayList<>(detector.rules); detectorIndex = detector.detectorIndex; } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index dc109ba084a53..c8290521f98e4 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -193,7 +193,7 @@ private Job(String jobId, String jobType, Version jobVersion, List group this.jobId = jobId; this.jobType = jobType; this.jobVersion = jobVersion; - this.groups = groups; + this.groups = Collections.unmodifiableList(groups); this.description = description; this.createTime = createTime; this.finishedTime = finishedTime; @@ -207,7 +207,7 @@ private Job(String jobId, String jobType, Version jobVersion, List group this.backgroundPersistInterval = backgroundPersistInterval; this.modelSnapshotRetentionDays = modelSnapshotRetentionDays; this.resultsRetentionDays = resultsRetentionDays; - this.customSettings = customSettings; + this.customSettings = customSettings == null ? null : Collections.unmodifiableMap(customSettings); this.modelSnapshotId = modelSnapshotId; this.modelSnapshotMinVersion = modelSnapshotMinVersion; this.resultsIndexName = resultsIndexName; @@ -223,7 +223,7 @@ public Job(StreamInput in) throws IOException { jobVersion = null; } if (in.getVersion().onOrAfter(Version.V_6_1_0)) { - groups = in.readList(StreamInput::readString); + groups = Collections.unmodifiableList(in.readList(StreamInput::readString)); } else { groups = Collections.emptyList(); } @@ -244,7 +244,8 @@ public Job(StreamInput in) throws IOException { backgroundPersistInterval = in.readOptionalTimeValue(); modelSnapshotRetentionDays = in.readOptionalLong(); resultsRetentionDays = in.readOptionalLong(); - customSettings = in.readMap(); + Map readCustomSettings = in.readMap(); + customSettings = readCustomSettings == null ? null : Collections.unmodifiableMap(readCustomSettings); modelSnapshotId = in.readOptionalString(); if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1) && in.readBoolean()) { modelSnapshotMinVersion = Version.readVersion(in); @@ -627,7 +628,8 @@ public boolean equals(Object other) { && Objects.equals(this.lastDataTime, that.lastDataTime) && Objects.equals(this.establishedModelMemory, that.establishedModelMemory) && Objects.equals(this.analysisConfig, that.analysisConfig) - && Objects.equals(this.analysisLimits, that.analysisLimits) && Objects.equals(this.dataDescription, that.dataDescription) + && Objects.equals(this.analysisLimits, that.analysisLimits) + && Objects.equals(this.dataDescription, that.dataDescription) && Objects.equals(this.modelPlotConfig, that.modelPlotConfig) && Objects.equals(this.renormalizationWindowDays, that.renormalizationWindowDays) && Objects.equals(this.backgroundPersistInterval, that.backgroundPersistInterval) @@ -1055,6 +1057,7 @@ public boolean equals(Object o) { return Objects.equals(this.id, that.id) && Objects.equals(this.jobType, that.jobType) && Objects.equals(this.jobVersion, that.jobVersion) + && Objects.equals(this.groups, that.groups) && Objects.equals(this.description, that.description) && Objects.equals(this.analysisConfig, that.analysisConfig) && Objects.equals(this.analysisLimits, that.analysisLimits) @@ -1077,7 +1080,7 @@ public boolean equals(Object o) { @Override public int hashCode() { - return Objects.hash(id, jobType, jobVersion, description, analysisConfig, analysisLimits, dataDescription, createTime, + return Objects.hash(id, jobType, jobVersion, groups, description, analysisConfig, analysisLimits, dataDescription, createTime, finishedTime, lastDataTime, establishedModelMemory, modelPlotConfig, renormalizationWindowDays, backgroundPersistInterval, modelSnapshotRetentionDays, resultsRetentionDays, customSettings, modelSnapshotId, modelSnapshotMinVersion, resultsIndexName, deleted); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java index 16243ed16edd4..7b6843a2415d2 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdate.java @@ -373,6 +373,9 @@ public Set getUpdateFields() { */ public Job mergeWithJob(Job source, ByteSizeValue maxModelMemoryLimit) { Job.Builder builder = new Job.Builder(source); + AnalysisConfig currentAnalysisConfig = source.getAnalysisConfig(); + AnalysisConfig.Builder newAnalysisConfig = new AnalysisConfig.Builder(currentAnalysisConfig); + if (groups != null) { builder.setGroups(groups); } @@ -380,26 +383,23 @@ public Job mergeWithJob(Job source, ByteSizeValue maxModelMemoryLimit) { builder.setDescription(description); } if (detectorUpdates != null && detectorUpdates.isEmpty() == false) { - AnalysisConfig ac = source.getAnalysisConfig(); - int numDetectors = ac.getDetectors().size(); + int numDetectors = currentAnalysisConfig.getDetectors().size(); for (DetectorUpdate dd : detectorUpdates) { if (dd.getDetectorIndex() >= numDetectors) { throw ExceptionsHelper.badRequestException("Supplied detector_index [{}] is >= the number of detectors [{}]", dd.getDetectorIndex(), numDetectors); } - Detector.Builder detectorbuilder = new Detector.Builder(ac.getDetectors().get(dd.getDetectorIndex())); + Detector.Builder detectorBuilder = new Detector.Builder(currentAnalysisConfig.getDetectors().get(dd.getDetectorIndex())); if (dd.getDescription() != null) { - detectorbuilder.setDetectorDescription(dd.getDescription()); + detectorBuilder.setDetectorDescription(dd.getDescription()); } if (dd.getRules() != null) { - detectorbuilder.setRules(dd.getRules()); + detectorBuilder.setRules(dd.getRules()); } - ac.getDetectors().set(dd.getDetectorIndex(), detectorbuilder.build()); - } - AnalysisConfig.Builder acBuilder = new AnalysisConfig.Builder(ac); - builder.setAnalysisConfig(acBuilder); + newAnalysisConfig.setDetector(dd.getDetectorIndex(), detectorBuilder.build()); + } } if (modelPlotConfig != null) { builder.setModelPlotConfig(modelPlotConfig); @@ -422,9 +422,7 @@ public Job mergeWithJob(Job source, ByteSizeValue maxModelMemoryLimit) { builder.setResultsRetentionDays(resultsRetentionDays); } if (categorizationFilters != null) { - AnalysisConfig.Builder analysisConfigBuilder = new AnalysisConfig.Builder(source.getAnalysisConfig()); - analysisConfigBuilder.setCategorizationFilters(categorizationFilters); - builder.setAnalysisConfig(analysisConfigBuilder); + newAnalysisConfig.setCategorizationFilters(categorizationFilters); } if (customSettings != null) { builder.setCustomSettings(customSettings); @@ -446,9 +444,48 @@ public Job mergeWithJob(Job source, ByteSizeValue maxModelMemoryLimit) { if (jobVersion != null) { builder.setJobVersion(jobVersion); } + + builder.setAnalysisConfig(newAnalysisConfig); return builder.build(); } + boolean isNoop(Job job) { + return (groups == null || Objects.equals(groups, job.getGroups())) + && (description == null || Objects.equals(description, job.getDescription())) + && (modelPlotConfig == null || Objects.equals(modelPlotConfig, job.getModelPlotConfig())) + && (analysisLimits == null || Objects.equals(analysisLimits, job.getAnalysisLimits())) + && updatesDetectors(job) == false + && (renormalizationWindowDays == null || Objects.equals(renormalizationWindowDays, job.getRenormalizationWindowDays())) + && (backgroundPersistInterval == null || Objects.equals(backgroundPersistInterval, job.getBackgroundPersistInterval())) + && (modelSnapshotRetentionDays == null || Objects.equals(modelSnapshotRetentionDays, job.getModelSnapshotRetentionDays())) + && (resultsRetentionDays == null || Objects.equals(resultsRetentionDays, job.getResultsRetentionDays())) + && (categorizationFilters == null + || Objects.equals(categorizationFilters, job.getAnalysisConfig().getCategorizationFilters())) + && (customSettings == null || Objects.equals(customSettings, job.getCustomSettings())) + && (modelSnapshotId == null || Objects.equals(modelSnapshotId, job.getModelSnapshotId())) + && (modelSnapshotMinVersion == null || Objects.equals(modelSnapshotMinVersion, job.getModelSnapshotMinVersion())) + && (establishedModelMemory == null || Objects.equals(establishedModelMemory, job.getEstablishedModelMemory())) + && (jobVersion == null || Objects.equals(jobVersion, job.getJobVersion())); + } + + boolean updatesDetectors(Job job) { + AnalysisConfig analysisConfig = job.getAnalysisConfig(); + if (detectorUpdates == null) { + return false; + } + for (DetectorUpdate detectorUpdate : detectorUpdates) { + if (detectorUpdate.description == null && detectorUpdate.rules == null) { + continue; + } + Detector detector = analysisConfig.getDetectors().get(detectorUpdate.detectorIndex); + if (Objects.equals(detectorUpdate.description, detector.getDetectorDescription()) == false + || Objects.equals(detectorUpdate.rules, detector.getRules()) == false) { + return true; + } + } + return false; + } + @Override public boolean equals(Object other) { if (this == other) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleScope.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleScope.java index b6b3b4e061bdd..0b11fa0e15b3d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleScope.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/RuleScope.java @@ -60,7 +60,7 @@ public RuleScope() { } public RuleScope(Map scope) { - this.scope = Objects.requireNonNull(scope); + this.scope = Collections.unmodifiableMap(scope); } public RuleScope(StreamInput in) throws IOException { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java index 556c2f37b485d..0efb5feb38b91 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/utils/time/DateTimeFormatterTimestampConverter.java @@ -54,9 +54,9 @@ public static TimestampConverter ofPattern(String pattern, ZoneId defaultTimezon .parseDefaulting(ChronoField.YEAR_OF_ERA, LocalDate.now(defaultTimezone).getYear()) .toFormatter(); - String now = formatter.format(ZonedDateTime.ofInstant(Instant.ofEpochSecond(0), ZoneOffset.UTC)); + String formattedTime = formatter.format(ZonedDateTime.ofInstant(Instant.ofEpochSecond(0), ZoneOffset.UTC)); try { - TemporalAccessor parsed = formatter.parse(now); + TemporalAccessor parsed = formatter.parse(formattedTime); boolean hasTimeZone = parsed.isSupported(ChronoField.INSTANT_SECONDS); if (hasTimeZone) { Instant.from(parsed); @@ -67,7 +67,7 @@ public static TimestampConverter ofPattern(String pattern, ZoneId defaultTimezon return new DateTimeFormatterTimestampConverter(formatter, hasTimeZone, defaultTimezone); } catch (DateTimeException e) { - throw new IllegalArgumentException("Timestamp cannot be derived from pattern: " + pattern); + throw new IllegalArgumentException("Timestamp cannot be derived from pattern: " + pattern, e); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java index e1c694742cba4..c057c04cc637d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rest/action/RestXPackInfoAction.java @@ -6,15 +6,11 @@ package org.elasticsearch.xpack.core.rest.action; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.license.XPackInfoResponse; -import org.elasticsearch.rest.BytesRestResponse; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; -import org.elasticsearch.rest.RestResponse; -import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.rest.action.RestToXContentListener; import org.elasticsearch.xpack.core.XPackClient; -import org.elasticsearch.xpack.core.action.XPackInfoRequest; import org.elasticsearch.xpack.core.rest.XPackRestHandler; import java.io.IOException; @@ -22,7 +18,6 @@ import static org.elasticsearch.rest.RestRequest.Method.GET; import static org.elasticsearch.rest.RestRequest.Method.HEAD; -import static org.elasticsearch.rest.RestStatus.OK; public class RestXPackInfoAction extends XPackRestHandler { public RestXPackInfoAction(Settings settings, RestController controller) { @@ -48,36 +43,6 @@ public RestChannelConsumer doPrepareRequest(RestRequest request, XPackClient cli client.prepareInfo() .setVerbose(verbose) .setCategories(categories) - .execute(new RestBuilderListener(channel) { - @Override - public RestResponse buildResponse(XPackInfoResponse infoResponse, XContentBuilder builder) throws Exception { - - builder.startObject(); - - if (infoResponse.getBuildInfo() != null) { - builder.field("build", infoResponse.getBuildInfo(), request); - } - - if (infoResponse.getLicenseInfo() != null) { - builder.field("license", infoResponse.getLicenseInfo(), request); - } else if (categories.contains(XPackInfoRequest.Category.LICENSE)) { - // if the user requested the license info, and there is no license, we should send - // back an explicit null value (indicating there is no license). This is different - // than not adding the license info at all - builder.nullField("license"); - } - - if (infoResponse.getFeatureSetsInfo() != null) { - builder.field("features", infoResponse.getFeatureSetsInfo(), request); - } - - if (verbose) { - builder.field("tagline", "You know, for X"); - } - - builder.endObject(); - return new BytesRestResponse(OK, builder); - } - }); + .execute(new RestToXContentListener<>(channel)); } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityField.java index 8d813925e33dc..610876e1c54ec 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityField.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecurityField.java @@ -13,6 +13,7 @@ public final class SecurityField { public static final String NAME4 = XPackField.SECURITY + "4"; + public static final String NIO = XPackField.SECURITY + "-nio"; public static final Setting> USER_SETTING = new Setting<>(setting("user"), (String) null, Optional::ofNullable, Setting.Property.NodeScope); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecuritySettings.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecuritySettings.java index c48245c054fb8..bceb1de29491d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecuritySettings.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/SecuritySettings.java @@ -19,9 +19,10 @@ public static Settings addTransportSettings(final Settings settings) { final Settings.Builder builder = Settings.builder(); if (NetworkModule.TRANSPORT_TYPE_SETTING.exists(settings)) { final String transportType = NetworkModule.TRANSPORT_TYPE_SETTING.get(settings); - if (SecurityField.NAME4.equals(transportType) == false) { + if (SecurityField.NAME4.equals(transportType) == false && SecurityField.NIO.equals(transportType) == false) { throw new IllegalArgumentException("transport type setting [" + NetworkModule.TRANSPORT_TYPE_KEY - + "] must be [" + SecurityField.NAME4 + "] but is [" + transportType + "]"); + + "] must be [" + SecurityField.NAME4 + "] or [" + SecurityField.NIO + "]" + " but is [" + + transportType + "]"); } } else { // default to security4 @@ -39,7 +40,7 @@ public static Settings addUserSettings(final Settings settings) { final int i = userSetting.indexOf(":"); if (i < 0 || i == userSetting.length() - 1) { throw new IllegalArgumentException("invalid [" + SecurityField.USER_SETTING.getKey() - + "] setting. must be in the form of \":\""); + + "] setting. must be in the form of \":\""); } String username = userSetting.substring(0, i); String password = userSetting.substring(i + 1); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java index 8fed501ece2c9..a850a4a16f65b 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/permission/Role.java @@ -55,11 +55,7 @@ public RunAsPermission runAs() { } public static Builder builder(String... names) { - return new Builder(names, null); - } - - public static Builder builder(String[] names, FieldPermissionsCache fieldPermissionsCache) { - return new Builder(names, fieldPermissionsCache); + return new Builder(names); } public static Builder builder(RoleDescriptor rd, FieldPermissionsCache fieldPermissionsCache) { @@ -94,16 +90,13 @@ public static class Builder { private ClusterPermission cluster = ClusterPermission.NONE; private RunAsPermission runAs = RunAsPermission.NONE; private List groups = new ArrayList<>(); - private FieldPermissionsCache fieldPermissionsCache = null; - private Builder(String[] names, FieldPermissionsCache fieldPermissionsCache) { + private Builder(String[] names) { this.names = names; - this.fieldPermissionsCache = fieldPermissionsCache; } private Builder(RoleDescriptor rd, @Nullable FieldPermissionsCache fieldPermissionsCache) { this.names = new String[] { rd.getName() }; - this.fieldPermissionsCache = fieldPermissionsCache; if (rd.getClusterPrivileges().length == 0) { cluster = ClusterPermission.NONE; } else { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java index 059c4dfbb6547..aeb448faa9c1c 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStore.java @@ -80,11 +80,19 @@ private static Map initializeReservedRoles() { new RoleDescriptor.IndicesPrivileges[] { RoleDescriptor.IndicesPrivileges.builder().indices(".kibana*", ".reporting-*").privileges("all").build(), RoleDescriptor.IndicesPrivileges.builder() - .indices(".monitoring-*").privileges("read", "read_cross_cluster").build() + .indices(".monitoring-*").privileges("read", "read_cross_cluster").build(), + RoleDescriptor.IndicesPrivileges.builder() + .indices(".management-beats").privileges("create_index", "read", "write").build() }, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("logstash_system", new RoleDescriptor("logstash_system", new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) + .put("beats_admin", new RoleDescriptor("beats_admin", + null, + new RoleDescriptor.IndicesPrivileges[] { + RoleDescriptor.IndicesPrivileges.builder().indices(".management-beats").privileges("all").build() + }, + null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put(UsernamesField.BEATS_ROLE, new RoleDescriptor(UsernamesField.BEATS_ROLE, new String[] { "monitor", MonitoringBulkAction.NAME}, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA)) .put("machine_learning_user", new RoleDescriptor("machine_learning_user", new String[] { "monitor_ml" }, diff --git a/x-pack/plugin/core/src/main/resources/monitoring-beats.json b/x-pack/plugin/core/src/main/resources/monitoring-beats.json index 68e6c06ad09f6..ed027387a49cc 100644 --- a/x-pack/plugin/core/src/main/resources/monitoring-beats.json +++ b/x-pack/plugin/core/src/main/resources/monitoring-beats.json @@ -14,6 +14,47 @@ "doc": { "dynamic": false, "properties": { + "beats_state": { + "properties": { + "beat": { + "properties": { + "host": { + "type": "keyword" + }, + "name": { + "type": "keyword" + }, + "type": { + "type": "keyword" + }, + "uuid": { + "type": "keyword" + }, + "version": { + "type": "keyword" + } + } + }, + "state": { + "properties": { + "module": { + "properties": { + "count": { + "type": "long" + }, + "names": { + "type": "keyword" + } + } + } + } + }, + "timestamp": { + "format": "date_time", + "type": "date" + } + } + }, "beats_stats": { "properties": { "beat": { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java index 2ee6716262f03..60050bd93114a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/action/TransportXPackInfoActionTests.java @@ -10,8 +10,10 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.license.License; import org.elasticsearch.license.LicenseService; -import org.elasticsearch.license.XPackInfoResponse; -import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.transport.Transport; @@ -61,7 +63,7 @@ public void testDoExecute() throws Exception { License license = mock(License.class); long expiryDate = randomLong(); when(license.expiryDate()).thenReturn(expiryDate); - License.Status status = randomFrom(License.Status.values()); + LicenseStatus status = randomFrom(LicenseStatus.values()); when(license.status()).thenReturn(status); String type = randomAlphaOfLength(10); when(license.type()).thenReturn(type); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java index 358f9d1c97bd7..7e0615e85f8c9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdateTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.core.ml.datafeed; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; @@ -24,6 +25,7 @@ import org.elasticsearch.search.builder.SearchSourceBuilder.ScriptField; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.datafeed.ChunkingConfig.Mode; +import org.elasticsearch.xpack.core.ml.job.config.JobTests; import java.util.ArrayList; import java.util.Collections; @@ -31,6 +33,7 @@ import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; public class DatafeedUpdateTests extends AbstractSerializingTestCase { @@ -40,8 +43,12 @@ protected DatafeedUpdate createTestInstance() { } public static DatafeedUpdate createRandomized(String datafeedId) { + return createRandomized(datafeedId, null); + } + + public static DatafeedUpdate createRandomized(String datafeedId, @Nullable DatafeedConfig datafeed) { DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder(datafeedId); - if (randomBoolean()) { + if (randomBoolean() && datafeed == null) { builder.setJobId(randomAlphaOfLength(10)); } if (randomBoolean()) { @@ -68,7 +75,7 @@ public static DatafeedUpdate createRandomized(String datafeedId) { } builder.setScriptFields(scriptFields); } - if (randomBoolean()) { + if (randomBoolean() && datafeed == null) { // can only test with a single agg as the xcontent order gets randomized by test base class and then // the actual xcontent isn't the same and test fail. // Testing with a single agg is ok as we don't have special list writeable / xconent logic @@ -184,6 +191,25 @@ public void testApply_givenAggregations() { AggregationBuilders.histogram("a").interval(300000).field("time").subAggregation(maxTime)))); } + public void testApply_GivenRandomUpdates_AssertImmutability() { + for (int i = 0; i < 100; ++i) { + DatafeedConfig datafeed = DatafeedConfigTests.createRandomizedDatafeedConfig(JobTests.randomValidJobId()); + if (datafeed.getAggregations() != null) { + DatafeedConfig.Builder withoutAggs = new DatafeedConfig.Builder(datafeed); + withoutAggs.setAggregations(null); + datafeed = withoutAggs.build(); + } + DatafeedUpdate update = createRandomized(datafeed.getId(), datafeed); + while (update.isNoop(datafeed)) { + update = createRandomized(datafeed.getId(), datafeed); + } + + DatafeedConfig updatedDatafeed = update.apply(datafeed, Collections.emptyMap()); + + assertThat(datafeed, not(equalTo(updatedDatafeed))); + } + } + @Override protected DatafeedUpdate mutateInstance(DatafeedUpdate instance) { DatafeedUpdate.Builder builder = new DatafeedUpdate.Builder(instance); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java index 3ca4bac47cb29..bb7c329cf4508 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/DataDescriptionTests.java @@ -17,6 +17,8 @@ import org.elasticsearch.xpack.core.ml.job.config.DataDescription.DataFormat; import org.elasticsearch.xpack.core.ml.job.messages.Messages; +import java.time.DateTimeException; + import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.instanceOf; @@ -51,8 +53,12 @@ public void testVerify_GivenValidFormat() { description.setTimeFormat("epoch"); description.setTimeFormat("epoch_ms"); description.setTimeFormat("yyyy-MM-dd HH"); - String goodFormat = "yyyy.MM.dd G 'at' HH:mm:ss z"; - description.setTimeFormat(goodFormat); + } + + @AwaitsFix(bugUrl = "https://bugs.java.com/bugdatabase/view_bug.do?bug_id=JDK-8206980") + public void testVerify_GivenValidFormat_Java11Bug() { + DataDescription.Builder description = new DataDescription.Builder(); + description.setTimeFormat("yyyy.MM.dd G 'at' HH:mm:ss z"); } public void testVerify_GivenInValidFormat() { @@ -68,6 +74,10 @@ public void testVerify_GivenInValidFormat() { e = expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("y-M-dd")); assertEquals(Messages.getMessage(Messages.JOB_CONFIG_INVALID_TIMEFORMAT, "y-M-dd"), e.getMessage()); expectThrows(ElasticsearchException.class, () -> description.setTimeFormat("YYY-mm-UU hh:mm:ssY")); + + Throwable cause = e.getCause(); + assertNotNull(cause); + assertThat(cause, instanceOf(DateTimeException.class)); } public void testTransform_GivenDelimitedAndEpoch() { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java index c529d6ebfb368..c1f25bead224e 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobUpdateTests.java @@ -7,6 +7,7 @@ import org.elasticsearch.ElasticsearchStatusException; import org.elasticsearch.Version; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -18,10 +19,13 @@ import java.util.Arrays; import java.util.Collections; import java.util.Date; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; public class JobUpdateTests extends AbstractSerializingTestCase { @@ -30,7 +34,15 @@ public class JobUpdateTests extends AbstractSerializingTestCase { @Override protected JobUpdate createTestInstance() { - JobUpdate.Builder update = new JobUpdate.Builder(randomAlphaOfLength(4)); + return createRandom(randomAlphaOfLength(4), null); + } + + /** + * Creates a completely random update when the job is null + * or a random update that is is valid for the given job + */ + public JobUpdate createRandom(String jobId, @Nullable Job job) { + JobUpdate.Builder update = new JobUpdate.Builder(jobId); if (randomBoolean()) { int groupsNum = randomIntBetween(0, 10); List groups = new ArrayList<>(groupsNum); @@ -43,28 +55,16 @@ protected JobUpdate createTestInstance() { update.setDescription(randomAlphaOfLength(20)); } if (randomBoolean()) { - int size = randomInt(10); - List detectorUpdates = new ArrayList<>(size); - for (int i = 0; i < size; i++) { - String detectorDescription = null; - if (randomBoolean()) { - detectorDescription = randomAlphaOfLength(12); - } - List detectionRules = null; - if (randomBoolean()) { - detectionRules = new ArrayList<>(); - detectionRules.add(new DetectionRule.Builder( - Collections.singletonList(new RuleCondition(RuleCondition.AppliesTo.ACTUAL, Operator.GT, 5))).build()); - } - detectorUpdates.add(new JobUpdate.DetectorUpdate(i, detectorDescription, detectionRules)); - } + List detectorUpdates = job == null ? createRandomDetectorUpdates() + : createRandomDetectorUpdatesForJob(job); update.setDetectorUpdates(detectorUpdates); } if (randomBoolean()) { update.setModelPlotConfig(new ModelPlotConfig(randomBoolean(), randomAlphaOfLength(10))); } if (randomBoolean()) { - update.setAnalysisLimits(AnalysisLimitsTests.createRandomized()); + update.setAnalysisLimits(AnalysisLimits.validateAndSetDefaults(AnalysisLimitsTests.createRandomized(), null, + AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB)); } if (randomBoolean()) { update.setRenormalizationWindowDays(randomNonNegativeLong()); @@ -78,7 +78,7 @@ protected JobUpdate createTestInstance() { if (randomBoolean()) { update.setResultsRetentionDays(randomNonNegativeLong()); } - if (randomBoolean()) { + if (randomBoolean() && jobSupportsCategorizationFilters(job)) { update.setCategorizationFilters(Arrays.asList(generateRandomStringArray(10, 10, false))); } if (randomBoolean()) { @@ -100,6 +100,77 @@ protected JobUpdate createTestInstance() { return update.build(); } + private static boolean jobSupportsCategorizationFilters(@Nullable Job job) { + if (job == null) { + return true; + } + if (job.getAnalysisConfig().getCategorizationFieldName() == null) { + return false; + } + if (job.getAnalysisConfig().getCategorizationAnalyzerConfig() != null) { + return false; + } + return true; + } + + private static List createRandomDetectorUpdates() { + int size = randomInt(10); + List detectorUpdates = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + String detectorDescription = null; + if (randomBoolean()) { + detectorDescription = randomAlphaOfLength(12); + } + List detectionRules = null; + if (randomBoolean()) { + detectionRules = new ArrayList<>(); + detectionRules.add(new DetectionRule.Builder( + Collections.singletonList(new RuleCondition(RuleCondition.AppliesTo.ACTUAL, Operator.GT, 5))).build()); + } + detectorUpdates.add(new JobUpdate.DetectorUpdate(i, detectorDescription, detectionRules)); + } + return detectorUpdates; + } + + private static List createRandomDetectorUpdatesForJob(Job job) { + AnalysisConfig analysisConfig = job.getAnalysisConfig(); + int size = randomInt(analysisConfig.getDetectors().size()); + List detectorUpdates = new ArrayList<>(size); + for (int i = 0; i < size; i++) { + String detectorDescription = null; + if (randomBoolean()) { + detectorDescription = randomAlphaOfLength(12); + } + int rulesSize = randomBoolean() ? randomIntBetween(1, 5) : 0; + List detectionRules = rulesSize == 0 ? null : new ArrayList<>(rulesSize); + for (int ruleIndex = 0; ruleIndex < rulesSize; ++ruleIndex) { + int detectorIndex = randomInt(analysisConfig.getDetectors().size() - 1); + Detector detector = analysisConfig.getDetectors().get(detectorIndex); + List analysisFields = detector.extractAnalysisFields(); + if (randomBoolean() || analysisFields.isEmpty()) { + detectionRules.add(new DetectionRule.Builder(Collections.singletonList(new RuleCondition( + randomFrom(RuleCondition.AppliesTo.values()), randomFrom(Operator.values()), randomDouble()))).build()); + } else { + RuleScope.Builder ruleScope = RuleScope.builder(); + int scopeSize = randomIntBetween(1, analysisFields.size()); + Set analysisFieldsPickPot = new HashSet<>(analysisFields); + for (int scopeIndex = 0; scopeIndex < scopeSize; ++scopeIndex) { + String scopedField = randomFrom(analysisFieldsPickPot); + analysisFieldsPickPot.remove(scopedField); + if (randomBoolean()) { + ruleScope.include(scopedField, MlFilterTests.randomValidFilterId()); + } else { + ruleScope.exclude(scopedField, MlFilterTests.randomValidFilterId()); + } + } + detectionRules.add(new DetectionRule.Builder(ruleScope).build()); + } + } + detectorUpdates.add(new JobUpdate.DetectorUpdate(i, detectorDescription, detectionRules)); + } + return detectorUpdates; + } + @Override protected Writeable.Reader instanceReader() { return JobUpdate::new; @@ -156,8 +227,9 @@ public void testMergeWithJob() { jobBuilder.setAnalysisConfig(ac); jobBuilder.setDataDescription(new DataDescription.Builder()); jobBuilder.setCreateTime(new Date()); + Job job = jobBuilder.build(); - Job updatedJob = update.mergeWithJob(jobBuilder.build(), new ByteSizeValue(0L)); + Job updatedJob = update.mergeWithJob(job, new ByteSizeValue(0L)); assertEquals(update.getGroups(), updatedJob.getGroups()); assertEquals(update.getDescription(), updatedJob.getDescription()); @@ -172,12 +244,26 @@ public void testMergeWithJob() { assertEquals(update.getModelSnapshotId(), updatedJob.getModelSnapshotId()); assertEquals(update.getJobVersion(), updatedJob.getJobVersion()); for (JobUpdate.DetectorUpdate detectorUpdate : update.getDetectorUpdates()) { - assertNotNull(updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getDetectorIndex()).getDetectorDescription()); - assertEquals(detectorUpdate.getDescription(), - updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getDetectorIndex()).getDetectorDescription()); - assertNotNull(updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getDetectorIndex()).getDetectorDescription()); - assertEquals(detectorUpdate.getRules(), - updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getDetectorIndex()).getRules()); + Detector updatedDetector = updatedJob.getAnalysisConfig().getDetectors().get(detectorUpdate.getDetectorIndex()); + assertNotNull(updatedDetector); + assertEquals(detectorUpdate.getDescription(), updatedDetector.getDetectorDescription()); + assertEquals(detectorUpdate.getRules(), updatedDetector.getRules()); + } + + assertThat(job, not(equalTo(updatedJob))); + } + + public void testMergeWithJob_GivenRandomUpdates_AssertImmutability() { + for (int i = 0; i < 100; ++i) { + Job job = JobTests.createRandomizedJob(); + JobUpdate update = createRandom(job.getId(), job); + while (update.isNoop(job)) { + update = createRandom(job.getId(), job); + } + + Job updatedJob = update.mergeWithJob(job, new ByteSizeValue(0L)); + + assertThat(job, not(equalTo(updatedJob))); } } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java index b25f3f374b389..85d2bc16dd06a 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/authz/store/ReservedRolesStoreTests.java @@ -132,6 +132,7 @@ public void testIsReserved() { assertThat(ReservedRolesStore.isReserved("watcher_user"), is(true)); assertThat(ReservedRolesStore.isReserved("watcher_admin"), is(true)); assertThat(ReservedRolesStore.isReserved("kibana_dashboard_only_user"), is(true)); + assertThat(ReservedRolesStore.isReserved("beats_admin"), is(true)); assertThat(ReservedRolesStore.isReserved(XPackUser.ROLE_NAME), is(true)); assertThat(ReservedRolesStore.isReserved(LogstashSystemUser.ROLE_NAME), is(true)); assertThat(ReservedRolesStore.isReserved(BeatsSystemUser.ROLE_NAME), is(true)); @@ -220,6 +221,20 @@ public void testKibanaSystemRole() { assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(index), is(true)); }); + + // Beats management index + final String index = ".management-beats"; + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(false)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); + assertThat(kibanaRole.indices().allowedIndicesMatcher(READ_CROSS_CLUSTER_NAME).test(index), is(false)); } public void testKibanaUserRole() { @@ -478,6 +493,39 @@ public void testLogstashSystemRole() { is(false)); } + public void testBeatsAdminRole() { + final RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor("beats_admin"); + assertNotNull(roleDescriptor); + assertThat(roleDescriptor.getMetadata(), hasEntry("_reserved", true)); + + final Role beatsAdminRole = Role.builder(roleDescriptor, null).build(); + assertThat(beatsAdminRole.cluster().check(ClusterHealthAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterStateAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterStatsAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(PutIndexTemplateAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterRerouteAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(ClusterUpdateSettingsAction.NAME), is(false)); + assertThat(beatsAdminRole.cluster().check(MonitoringBulkAction.NAME), is(false)); + + assertThat(beatsAdminRole.runAs().check(randomAlphaOfLengthBetween(1, 30)), is(false)); + + assertThat(beatsAdminRole.indices().allowedIndicesMatcher("indices:foo").test(randomAlphaOfLengthBetween(8, 24)), + is(false)); + + final String index = ".management-beats"; + logger.info("index name [{}]", index); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher("indices:foo").test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher("indices:bar").test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(DeleteIndexAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(CreateIndexAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(IndexAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(DeleteAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(UpdateSettingsAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(SearchAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(MultiSearchAction.NAME).test(index), is(true)); + assertThat(beatsAdminRole.indices().allowedIndicesMatcher(GetAction.NAME).test(index), is(true)); + } + public void testBeatsSystemRole() { RoleDescriptor roleDescriptor = new ReservedRolesStore().roleDescriptor(BeatsSystemUser.ROLE_NAME); assertNotNull(roleDescriptor); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseChecker.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseChecker.java index b55713f6d0ab7..b0eeed2c800ec 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseChecker.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseChecker.java @@ -13,11 +13,12 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.license.License; -import org.elasticsearch.license.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import org.elasticsearch.transport.ActionNotFoundTransportException; import org.elasticsearch.transport.RemoteClusterAware; import org.elasticsearch.xpack.core.action.XPackInfoAction; -import org.elasticsearch.xpack.core.action.XPackInfoRequest; import java.util.EnumSet; import java.util.Iterator; @@ -136,7 +137,7 @@ private void remoteClusterLicense(String clusterName, ActionListener remoteClusterNames(List indices) { public static String buildErrorMessage(RemoteClusterLicenseInfo clusterLicenseInfo) { StringBuilder error = new StringBuilder(); - if (clusterLicenseInfo.licenseInfo.getStatus() != License.Status.ACTIVE) { + if (clusterLicenseInfo.licenseInfo.getStatus() != LicenseStatus.ACTIVE) { error.append("The license on cluster [").append(clusterLicenseInfo.clusterName) .append("] is not active. "); } else { diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseCheckerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseCheckerTests.java index dfd7c886ebf42..81e4c75cfad7c 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseCheckerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/MlRemoteLicenseCheckerTests.java @@ -11,8 +11,8 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.concurrent.ThreadContext; -import org.elasticsearch.license.License; -import org.elasticsearch.license.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.xpack.core.action.XPackInfoAction; @@ -66,16 +66,16 @@ public void testRemoteClusterNames() { public void testLicenseSupportsML() { XPackInfoResponse.LicenseInfo licenseInfo = new XPackInfoResponse.LicenseInfo("uid", "trial", "trial", - License.Status.ACTIVE, randomNonNegativeLong()); + LicenseStatus.ACTIVE, randomNonNegativeLong()); assertTrue(MlRemoteLicenseChecker.licenseSupportsML(licenseInfo)); - licenseInfo = new XPackInfoResponse.LicenseInfo("uid", "trial", "trial", License.Status.EXPIRED, randomNonNegativeLong()); + licenseInfo = new XPackInfoResponse.LicenseInfo("uid", "trial", "trial", LicenseStatus.EXPIRED, randomNonNegativeLong()); assertFalse(MlRemoteLicenseChecker.licenseSupportsML(licenseInfo)); - licenseInfo = new XPackInfoResponse.LicenseInfo("uid", "GOLD", "GOLD", License.Status.ACTIVE, randomNonNegativeLong()); + licenseInfo = new XPackInfoResponse.LicenseInfo("uid", "GOLD", "GOLD", LicenseStatus.ACTIVE, randomNonNegativeLong()); assertFalse(MlRemoteLicenseChecker.licenseSupportsML(licenseInfo)); - licenseInfo = new XPackInfoResponse.LicenseInfo("uid", "PLATINUM", "PLATINUM", License.Status.ACTIVE, randomNonNegativeLong()); + licenseInfo = new XPackInfoResponse.LicenseInfo("uid", "PLATINUM", "PLATINUM", LicenseStatus.ACTIVE, randomNonNegativeLong()); assertTrue(MlRemoteLicenseChecker.licenseSupportsML(licenseInfo)); } @@ -186,14 +186,14 @@ private Client createMockClient() { } private XPackInfoResponse.LicenseInfo createPlatinumLicenseResponse() { - return new XPackInfoResponse.LicenseInfo("uid", "PLATINUM", "PLATINUM", License.Status.ACTIVE, randomNonNegativeLong()); + return new XPackInfoResponse.LicenseInfo("uid", "PLATINUM", "PLATINUM", LicenseStatus.ACTIVE, randomNonNegativeLong()); } private XPackInfoResponse.LicenseInfo createBasicLicenseResponse() { - return new XPackInfoResponse.LicenseInfo("uid", "BASIC", "BASIC", License.Status.ACTIVE, randomNonNegativeLong()); + return new XPackInfoResponse.LicenseInfo("uid", "BASIC", "BASIC", LicenseStatus.ACTIVE, randomNonNegativeLong()); } private XPackInfoResponse.LicenseInfo createExpiredLicenseResponse() { - return new XPackInfoResponse.LicenseInfo("uid", "PLATINUM", "PLATINUM", License.Status.EXPIRED, randomNonNegativeLong()); + return new XPackInfoResponse.LicenseInfo("uid", "PLATINUM", "PLATINUM", LicenseStatus.EXPIRED, randomNonNegativeLong()); } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java index 5b4f8cbbdef68..596acaeeac6a0 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java @@ -203,6 +203,7 @@ import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4HttpServerTransport; import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4ServerTransport; import org.elasticsearch.xpack.core.template.TemplateUtils; +import org.elasticsearch.xpack.security.transport.nio.SecurityNioTransport; import org.joda.time.DateTime; import org.joda.time.DateTimeZone; @@ -846,8 +847,14 @@ public Map> getTransports(Settings settings, ThreadP if (transportClientMode || enabled == false) { // don't register anything if we are not enabled, or in transport client mode return Collections.emptyMap(); } - return Collections.singletonMap(SecurityField.NAME4, () -> new SecurityNetty4ServerTransport(settings, threadPool, - networkService, bigArrays, namedWriteableRegistry, circuitBreakerService, ipFilter.get(), getSslService())); + + Map> transports = new HashMap<>(); + transports.put(SecurityField.NAME4, () -> new SecurityNetty4ServerTransport(settings, threadPool, + networkService, bigArrays, namedWriteableRegistry, circuitBreakerService, ipFilter.get(), getSslService())); + transports.put(SecurityField.NIO, () -> new SecurityNioTransport(settings, threadPool, + networkService, bigArrays, pageCacheRecycler, namedWriteableRegistry, circuitBreakerService, ipFilter.get(), getSslService())); + + return Collections.unmodifiableMap(transports); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java index 5706f79011ac5..5da6a9eb77cdd 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrail.java @@ -23,6 +23,7 @@ import org.elasticsearch.common.util.concurrent.ThreadContext; import org.elasticsearch.node.Node; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportMessage; import org.elasticsearch.xpack.core.security.authc.Authentication; @@ -180,11 +181,11 @@ public void authenticationSuccess(String realm, User user, RestRequest request) if (events.contains(AUTHENTICATION_SUCCESS) && (eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(user), Optional.of(realm), Optional.empty(), Optional.empty())) == false)) { if (includeRequestBody) { - logger.info("{}[rest] [authentication_success]\t{}, realm=[{}], uri=[{}], params=[{}], request_body=[{}]", - localNodeInfo.prefix, principal(user), realm, request.uri(), request.params(), restRequestContent(request)); + logger.info("{}[rest] [authentication_success]\t{}, realm=[{}], uri=[{}], params=[{}]{}, request_body=[{}]", + localNodeInfo.prefix, principal(user), realm, request.uri(), request.params(), opaqueId(), restRequestContent(request)); } else { - logger.info("{}[rest] [authentication_success]\t{}, realm=[{}], uri=[{}], params=[{}]", localNodeInfo.prefix, - principal(user), realm, request.uri(), request.params()); + logger.info("{}[rest] [authentication_success]\t{}, realm=[{}], uri=[{}], params=[{}]{}", + localNodeInfo.prefix, principal(user), realm, request.uri(), request.params(), opaqueId()); } } } @@ -197,13 +198,13 @@ public void authenticationSuccess(String realm, User user, String action, Transp .test(new AuditEventMetaInfo(Optional.of(user), Optional.of(realm), Optional.empty(), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [authentication_success]\t{}, {}, realm=[{}], action=[{}], indices=[{}], request=[{}]", + logger.info("{}[transport] [authentication_success]\t{}, {}, realm=[{}], action=[{}], indices=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), principal(user), realm, action, - arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName()); + arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [authentication_success]\t{}, {}, realm=[{}], action=[{}], request=[{}]", + logger.info("{}[transport] [authentication_success]\t{}, {}, realm=[{}], action=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), principal(user), realm, action, - message.getClass().getSimpleName()); + message.getClass().getSimpleName(), opaqueId()); } } } @@ -217,12 +218,13 @@ public void anonymousAccessDenied(String action, TransportMessage message) { .test(new AuditEventMetaInfo(Optional.empty(), Optional.empty(), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], indices=[{}], request=[{}]", + logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], indices=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action, - arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName()); + arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, message, localNodeInfo), action, message.getClass().getSimpleName()); + logger.info("{}[transport] [anonymous_access_denied]\t{}, action=[{}], request=[{}]{}", + localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action, + message.getClass().getSimpleName(), opaqueId()); } } } @@ -233,11 +235,11 @@ public void anonymousAccessDenied(RestRequest request) { if (events.contains(ANONYMOUS_ACCESS_DENIED) && (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) { if (includeRequestBody) { - logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}], request_body=[{}]", localNodeInfo.prefix, - hostAttributes(request), request.uri(), restRequestContent(request)); + logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}]{}, request_body=[{}]", localNodeInfo.prefix, + hostAttributes(request), request.uri(), opaqueId(), restRequestContent(request)); } else { - logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}]", localNodeInfo.prefix, hostAttributes(request), - request.uri()); + logger.info("{}[rest] [anonymous_access_denied]\t{}, uri=[{}]{}", localNodeInfo.prefix, + hostAttributes(request), request.uri(), opaqueId()); } } } @@ -250,13 +252,13 @@ public void authenticationFailed(AuthenticationToken token, String action, Trans .test(new AuditEventMetaInfo(Optional.of(token), Optional.empty(), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], indices=[{}], request=[{}]", + logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], indices=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), token.principal(), action, - arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName()); + arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], request=[{}]", + logger.info("{}[transport] [authentication_failed]\t{}, principal=[{}], action=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), token.principal(), action, - message.getClass().getSimpleName()); + message.getClass().getSimpleName(), opaqueId()); } } } @@ -267,10 +269,11 @@ public void authenticationFailed(RestRequest request) { if (events.contains(AUTHENTICATION_FAILED) && (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) { if (includeRequestBody) { - logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}], request_body=[{}]", localNodeInfo.prefix, - hostAttributes(request), request.uri(), restRequestContent(request)); + logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}]{}, request_body=[{}]", localNodeInfo.prefix, + hostAttributes(request), request.uri(), opaqueId(), restRequestContent(request)); } else { - logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}]", localNodeInfo.prefix, hostAttributes(request), request.uri()); + logger.info("{}[rest] [authentication_failed]\t{}, uri=[{}]{}", localNodeInfo.prefix, + hostAttributes(request), request.uri(), opaqueId()); } } } @@ -283,12 +286,13 @@ public void authenticationFailed(String action, TransportMessage message) { .test(new AuditEventMetaInfo(Optional.empty(), Optional.empty(), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], indices=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, message, localNodeInfo), action, arrayToCommaDelimitedString(indices.get()), - message.getClass().getSimpleName()); + logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], indices=[{}], request=[{}]{}", + localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action, + arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, message, localNodeInfo), action, message.getClass().getSimpleName()); + logger.info("{}[transport] [authentication_failed]\t{}, action=[{}], request=[{}]{}", + localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action, + message.getClass().getSimpleName(), opaqueId()); } } } @@ -300,11 +304,12 @@ public void authenticationFailed(AuthenticationToken token, RestRequest request) && (eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(token), Optional.empty(), Optional.empty())) == false)) { if (includeRequestBody) { - logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}], request_body=[{}]", localNodeInfo.prefix, - hostAttributes(request), token.principal(), request.uri(), restRequestContent(request)); + logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}]{}, request_body=[{}]", + localNodeInfo.prefix, hostAttributes(request), token.principal(), request.uri(), opaqueId(), + restRequestContent(request)); } else { - logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}]", localNodeInfo.prefix, hostAttributes(request), - token.principal(), request.uri()); + logger.info("{}[rest] [authentication_failed]\t{}, principal=[{}], uri=[{}]{}", + localNodeInfo.prefix, hostAttributes(request), token.principal(), request.uri(), opaqueId()); } } } @@ -319,13 +324,13 @@ public void authenticationFailed(String realm, AuthenticationToken token, String if (indices.isPresent()) { logger.info( "{}[transport] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], indices=[{}], " - + "request=[{}]", + + "request=[{}]{}", localNodeInfo.prefix, realm, originAttributes(threadContext, message, localNodeInfo), token.principal(), action, - arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName()); + arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], request=[{}]", + logger.info("{}[transport] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], action=[{}], request=[{}]{}", localNodeInfo.prefix, realm, originAttributes(threadContext, message, localNodeInfo), token.principal(), action, - message.getClass().getSimpleName()); + message.getClass().getSimpleName(), opaqueId()); } } } @@ -337,12 +342,13 @@ public void authenticationFailed(String realm, AuthenticationToken token, RestRe && (eventFilterPolicyRegistry.ignorePredicate() .test(new AuditEventMetaInfo(Optional.of(token), Optional.of(realm), Optional.empty())) == false)) { if (includeRequestBody) { - logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}], request_body=[{}]", - localNodeInfo.prefix, realm, hostAttributes(request), token.principal(), request.uri(), + logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}]{}, " + + "request_body=[{}]", + localNodeInfo.prefix, realm, hostAttributes(request), token.principal(), request.uri(), opaqueId(), restRequestContent(request)); } else { - logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}]", localNodeInfo.prefix, realm, - hostAttributes(request), token.principal(), request.uri()); + logger.info("{}[rest] [realm_authentication_failed]\trealm=[{}], {}, principal=[{}], uri=[{}]{}", + localNodeInfo.prefix, realm, hostAttributes(request), token.principal(), request.uri(), opaqueId()); } } } @@ -357,14 +363,14 @@ public void accessGranted(Authentication authentication, String action, Transpor Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [access_granted]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]", + logger.info("{}[transport] [access_granted]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), subject(authentication), arrayToCommaDelimitedString(roleNames), action, arrayToCommaDelimitedString(indices.get()), - message.getClass().getSimpleName()); + message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [access_granted]\t{}, {}, roles=[{}], action=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, message, localNodeInfo), subject(authentication), - arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName()); + logger.info("{}[transport] [access_granted]\t{}, {}, roles=[{}], action=[{}], request=[{}]{}", + localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), subject(authentication), + arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName(), opaqueId()); } } } @@ -378,14 +384,14 @@ public void accessDenied(Authentication authentication, String action, Transport Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [access_denied]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]", + logger.info("{}[transport] [access_denied]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), subject(authentication), arrayToCommaDelimitedString(roleNames), action, arrayToCommaDelimitedString(indices.get()), - message.getClass().getSimpleName()); + message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [access_denied]\t{}, {}, roles=[{}], action=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, message, localNodeInfo), subject(authentication), - arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName()); + logger.info("{}[transport] [access_denied]\t{}, {}, roles=[{}], action=[{}], request=[{}]{}", + localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), subject(authentication), + arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName(), opaqueId()); } } } @@ -395,10 +401,11 @@ localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), s public void tamperedRequest(RestRequest request) { if (events.contains(TAMPERED_REQUEST) && (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) { if (includeRequestBody) { - logger.info("{}[rest] [tampered_request]\t{}, uri=[{}], request_body=[{}]", localNodeInfo.prefix, hostAttributes(request), - request.uri(), restRequestContent(request)); + logger.info("{}[rest] [tampered_request]\t{}, uri=[{}]{}, request_body=[{}]", localNodeInfo.prefix, + hostAttributes(request), request.uri(), opaqueId(), restRequestContent(request)); } else { - logger.info("{}[rest] [tampered_request]\t{}, uri=[{}]", localNodeInfo.prefix, hostAttributes(request), request.uri()); + logger.info("{}[rest] [tampered_request]\t{}, uri=[{}]{}", localNodeInfo.prefix, hostAttributes(request), + request.uri(), opaqueId()); } } } @@ -411,12 +418,13 @@ public void tamperedRequest(String action, TransportMessage message) { .test(new AuditEventMetaInfo(Optional.empty(), Optional.empty(), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [tampered_request]\t{}, action=[{}], indices=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, message, localNodeInfo), action, arrayToCommaDelimitedString(indices.get()), - message.getClass().getSimpleName()); + logger.info("{}[transport] [tampered_request]\t{}, action=[{}], indices=[{}], request=[{}]{}", + localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), action, + arrayToCommaDelimitedString(indices.get()), message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [tampered_request]\t{}, action=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, message, localNodeInfo), action, message.getClass().getSimpleName()); + logger.info("{}[transport] [tampered_request]\t{}, action=[{}], request=[{}]{}", localNodeInfo.prefix, + originAttributes(threadContext, message, localNodeInfo), action, message.getClass().getSimpleName(), + opaqueId()); } } } @@ -430,13 +438,13 @@ public void tamperedRequest(User user, String action, TransportMessage request) .test(new AuditEventMetaInfo(Optional.of(user), Optional.empty(), Optional.empty(), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], indices=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, request, localNodeInfo), principal(user), action, - arrayToCommaDelimitedString(indices.get()), request.getClass().getSimpleName()); + logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], indices=[{}], request=[{}]{}", + localNodeInfo.prefix, originAttributes(threadContext, request, localNodeInfo), principal(user), action, + arrayToCommaDelimitedString(indices.get()), request.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], request=[{}]", localNodeInfo.prefix, + logger.info("{}[transport] [tampered_request]\t{}, {}, action=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, request, localNodeInfo), principal(user), action, - request.getClass().getSimpleName()); + request.getClass().getSimpleName(), opaqueId()); } } } @@ -445,16 +453,16 @@ public void tamperedRequest(User user, String action, TransportMessage request) @Override public void connectionGranted(InetAddress inetAddress, String profile, SecurityIpFilterRule rule) { if (events.contains(CONNECTION_GRANTED) && (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) { - logger.info("{}[ip_filter] [connection_granted]\torigin_address=[{}], transport_profile=[{}], rule=[{}]", localNodeInfo.prefix, - NetworkAddress.format(inetAddress), profile, rule); + logger.info("{}[ip_filter] [connection_granted]\torigin_address=[{}], transport_profile=[{}], rule=[{}]{}", + localNodeInfo.prefix, NetworkAddress.format(inetAddress), profile, rule, opaqueId()); } } @Override public void connectionDenied(InetAddress inetAddress, String profile, SecurityIpFilterRule rule) { if (events.contains(CONNECTION_DENIED) && (eventFilterPolicyRegistry.ignorePredicate().test(AuditEventMetaInfo.EMPTY) == false)) { - logger.info("{}[ip_filter] [connection_denied]\torigin_address=[{}], transport_profile=[{}], rule=[{}]", localNodeInfo.prefix, - NetworkAddress.format(inetAddress), profile, rule); + logger.info("{}[ip_filter] [connection_denied]\torigin_address=[{}], transport_profile=[{}], rule=[{}]{}", + localNodeInfo.prefix, NetworkAddress.format(inetAddress), profile, rule, opaqueId()); } } @@ -466,15 +474,14 @@ public void runAsGranted(Authentication authentication, String action, Transport Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [run_as_granted]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]", + logger.info("{}[transport] [run_as_granted]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication), arrayToCommaDelimitedString(roleNames), action, arrayToCommaDelimitedString(indices.get()), - message.getClass().getSimpleName()); + message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [run_as_granted]\t{}, {}, roles=[{}], action=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication), - arrayToCommaDelimitedString(roleNames), action, - message.getClass().getSimpleName()); + logger.info("{}[transport] [run_as_granted]\t{}, {}, roles=[{}], action=[{}], request=[{}]{}", + localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication), + arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName(), opaqueId()); } } } @@ -488,14 +495,14 @@ public void runAsDenied(Authentication authentication, String action, TransportM Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), indices)) == false) { final LocalNodeInfo localNodeInfo = this.localNodeInfo; if (indices.isPresent()) { - logger.info("{}[transport] [run_as_denied]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]", + logger.info("{}[transport] [run_as_denied]\t{}, {}, roles=[{}], action=[{}], indices=[{}], request=[{}]{}", localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication), arrayToCommaDelimitedString(roleNames), action, arrayToCommaDelimitedString(indices.get()), - message.getClass().getSimpleName()); + message.getClass().getSimpleName(), opaqueId()); } else { - logger.info("{}[transport] [run_as_denied]\t{}, {}, roles=[{}], action=[{}], request=[{}]", localNodeInfo.prefix, - originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication), - arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName()); + logger.info("{}[transport] [run_as_denied]\t{}, {}, roles=[{}], action=[{}], request=[{}]{}", + localNodeInfo.prefix, originAttributes(threadContext, message, localNodeInfo), runAsSubject(authentication), + arrayToCommaDelimitedString(roleNames), action, message.getClass().getSimpleName(), opaqueId()); } } } @@ -507,12 +514,13 @@ public void runAsDenied(Authentication authentication, RestRequest request, Stri && (eventFilterPolicyRegistry.ignorePredicate().test(new AuditEventMetaInfo(Optional.of(authentication.getUser()), Optional.of(effectiveRealmName(authentication)), Optional.of(roleNames), Optional.empty())) == false)) { if (includeRequestBody) { - logger.info("{}[rest] [run_as_denied]\t{}, {}, roles=[{}], uri=[{}], request_body=[{}]", localNodeInfo.prefix, - hostAttributes(request), runAsSubject(authentication), arrayToCommaDelimitedString(roleNames), request.uri(), - restRequestContent(request)); + logger.info("{}[rest] [run_as_denied]\t{}, {}, roles=[{}], uri=[{}], request_body=[{}]{}", + localNodeInfo.prefix, hostAttributes(request), runAsSubject(authentication), + arrayToCommaDelimitedString(roleNames), request.uri(), restRequestContent(request), opaqueId()); } else { - logger.info("{}[rest] [run_as_denied]\t{}, {}, roles=[{}], uri=[{}]", localNodeInfo.prefix, hostAttributes(request), - runAsSubject(authentication), arrayToCommaDelimitedString(roleNames), request.uri()); + logger.info("{}[rest] [run_as_denied]\t{}, {}, roles=[{}], uri=[{}]{}", localNodeInfo.prefix, + hostAttributes(request), runAsSubject(authentication), arrayToCommaDelimitedString(roleNames), request.uri(), + opaqueId()); } } } @@ -552,6 +560,15 @@ protected static String originAttributes(ThreadContext threadContext, TransportM return restOriginTag(threadContext).orElse(transportOriginTag(message).orElse(localNodeInfo.localOriginTag)); } + private String opaqueId() { + String opaqueId = threadContext.getHeader(Task.X_OPAQUE_ID); + if (opaqueId != null) { + return ", opaque_id=[" + opaqueId + "]"; + } else { + return ""; + } + } + private static Optional restOriginTag(ThreadContext threadContext) { final InetSocketAddress restAddress = RemoteHostHeader.restRemoteAddress(threadContext); if (restAddress == null) { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java index 295e00e19a8a9..d175e1b229312 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactory.java @@ -11,6 +11,7 @@ import com.unboundid.ldap.sdk.LDAPException; import com.unboundid.ldap.sdk.LDAPInterface; import com.unboundid.ldap.sdk.SearchResultEntry; +import com.unboundid.ldap.sdk.ServerSet; import com.unboundid.ldap.sdk.SimpleBindRequest; import com.unboundid.ldap.sdk.controls.AuthorizationIdentityRequestControl; import org.apache.logging.log4j.Logger; @@ -62,8 +63,6 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory { final DownLevelADAuthenticator downLevelADAuthenticator; final UpnADAuthenticator upnADAuthenticator; - private final int ldapPort; - ActiveDirectorySessionFactory(RealmConfig config, SSLService sslService, ThreadPool threadPool) throws LDAPException { super(config, sslService, new ActiveDirectoryGroupsResolver(config.settings()), ActiveDirectorySessionFactorySettings.POOL_ENABLED, @@ -85,7 +84,7 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory { + "] setting for active directory"); } String domainDN = buildDnFromDomain(domainName); - ldapPort = ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING.get(settings); + final int ldapPort = ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING.get(settings); final int ldapsPort = ActiveDirectorySessionFactorySettings.AD_LDAPS_PORT_SETTING.get(settings); final int gcLdapPort = ActiveDirectorySessionFactorySettings.AD_GC_LDAP_PORT_SETTING.get(settings); final int gcLdapsPort = ActiveDirectorySessionFactorySettings.AD_GC_LDAPS_PORT_SETTING.get(settings); @@ -102,7 +101,7 @@ class ActiveDirectorySessionFactory extends PoolingSessionFactory { @Override protected List getDefaultLdapUrls(Settings settings) { return Collections.singletonList("ldap://" + settings.get(ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING) + - ":" + ldapPort); + ":" + ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING.get(settings)); } @Override @@ -197,6 +196,11 @@ static String getBindDN(Settings settings) { return bindDN; } + // Exposed for testing + ServerSet getServerSet() { + return super.serverSet; + } + ADAuthenticator getADAuthenticator(String username) { if (username.indexOf('\\') > 0) { return downLevelADAuthenticator; diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java index b5a20af8d30b9..1018ceeda093c 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authz/store/CompositeRolesStore.java @@ -278,7 +278,7 @@ public static Role buildRoleFromDescriptors(Set roleDescriptors, final Set clusterPrivs = clusterPrivileges.isEmpty() ? null : clusterPrivileges; final Privilege runAsPrivilege = runAs.isEmpty() ? Privilege.NONE : new Privilege(runAs, runAs.toArray(Strings.EMPTY_ARRAY)); - Role.Builder builder = Role.builder(roleNames.toArray(new String[roleNames.size()]), fieldPermissionsCache) + Role.Builder builder = Role.builder(roleNames.toArray(new String[roleNames.size()])) .cluster(ClusterPrivilege.get(clusterPrivs)) .runAs(runAsPrivilege); indicesPrivilegesMap.entrySet().forEach((entry) -> { diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java index 2170c55ee0192..da348ea1f78e1 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java @@ -17,6 +17,7 @@ import java.io.IOException; import java.util.function.BiConsumer; import java.util.function.Consumer; +import java.util.function.Predicate; /** * Provides a TLS/SSL read/write layer over a channel. This context will use a {@link SSLDriver} to handshake @@ -30,7 +31,13 @@ public final class SSLChannelContext extends SocketChannelContext { SSLChannelContext(NioSocketChannel channel, NioSelector selector, Consumer exceptionHandler, SSLDriver sslDriver, ReadWriteHandler readWriteHandler, InboundChannelBuffer channelBuffer) { - super(channel, selector, exceptionHandler, readWriteHandler, channelBuffer); + this(channel, selector, exceptionHandler, sslDriver, readWriteHandler, channelBuffer, ALWAYS_ALLOW_CHANNEL); + } + + SSLChannelContext(NioSocketChannel channel, NioSelector selector, Consumer exceptionHandler, SSLDriver sslDriver, + ReadWriteHandler readWriteHandler, InboundChannelBuffer channelBuffer, + Predicate allowChannelPredicate) { + super(channel, selector, exceptionHandler, readWriteHandler, channelBuffer, allowChannelPredicate); this.sslDriver = sslDriver; } @@ -52,7 +59,7 @@ public void queueWriteOperation(WriteOperation writeOperation) { @Override public void flushChannel() throws IOException { - if (hasIOException()) { + if (closeNow()) { return; } // If there is currently data in the outbound write buffer, flush the buffer. @@ -116,7 +123,7 @@ public boolean readyForFlush() { @Override public int read() throws IOException { int bytesRead = 0; - if (hasIOException()) { + if (closeNow()) { return bytesRead; } bytesRead = readFromChannel(sslDriver.getNetworkReadBuffer()); @@ -133,7 +140,7 @@ public int read() throws IOException { @Override public boolean selectorShouldClose() { - return isPeerClosed() || hasIOException() || sslDriver.isClosed(); + return closeNow() || sslDriver.isClosed(); } @Override diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java index 874dc36a31cce..1e00019793025 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java @@ -5,30 +5,39 @@ */ package org.elasticsearch.xpack.security.transport.nio; +import org.apache.logging.log4j.message.ParameterizedMessage; +import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.network.CloseableChannel; import org.elasticsearch.common.network.NetworkService; import org.elasticsearch.common.recycler.Recycler; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.util.BigArrays; import org.elasticsearch.common.util.PageCacheRecycler; import org.elasticsearch.indices.breaker.CircuitBreakerService; +import org.elasticsearch.nio.BytesChannelContext; import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.nio.NioSelector; import org.elasticsearch.nio.NioSocketChannel; import org.elasticsearch.nio.ServerChannelContext; +import org.elasticsearch.nio.SocketChannelContext; import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.transport.TcpChannel; import org.elasticsearch.transport.TcpTransport; import org.elasticsearch.transport.nio.NioTcpChannel; import org.elasticsearch.transport.nio.NioTcpServerChannel; import org.elasticsearch.transport.nio.NioTransport; import org.elasticsearch.transport.nio.TcpReadWriteHandler; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper; import org.elasticsearch.xpack.core.security.transport.netty4.SecurityNetty4Transport; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; +import org.elasticsearch.xpack.security.transport.filter.IPFilter; import javax.net.ssl.SSLEngine; import java.io.IOException; +import java.net.InetSocketAddress; import java.nio.ByteBuffer; import java.nio.channels.ServerSocketChannel; import java.nio.channels.SocketChannel; @@ -36,6 +45,7 @@ import java.util.HashMap; import java.util.Map; import java.util.function.Consumer; +import java.util.function.Predicate; import java.util.function.Supplier; import static org.elasticsearch.xpack.core.security.SecurityField.setting; @@ -45,42 +55,83 @@ * protocol that allows two channels to go through a handshake process prior to application data being * exchanged. The handshake process enables the channels to exchange parameters that will allow them to * encrypt the application data they exchange. - * + *

    * The specific SSL/TLS parameters and configurations are setup in the {@link SSLService} class. The actual * implementation of the SSL/TLS layer is in the {@link SSLChannelContext} and {@link SSLDriver} classes. */ public class SecurityNioTransport extends NioTransport { - private final SSLConfiguration sslConfiguration; + private final IPFilter authenticator; private final SSLService sslService; private final Map profileConfiguration; private final boolean sslEnabled; - SecurityNioTransport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, - PageCacheRecycler pageCacheRecycler, NamedWriteableRegistry namedWriteableRegistry, - CircuitBreakerService circuitBreakerService, SSLService sslService) { + public SecurityNioTransport(Settings settings, ThreadPool threadPool, NetworkService networkService, BigArrays bigArrays, + PageCacheRecycler pageCacheRecycler, NamedWriteableRegistry namedWriteableRegistry, + CircuitBreakerService circuitBreakerService, @Nullable final IPFilter authenticator, + SSLService sslService) { super(settings, threadPool, networkService, bigArrays, pageCacheRecycler, namedWriteableRegistry, circuitBreakerService); + this.authenticator = authenticator; this.sslService = sslService; this.sslEnabled = XPackSettings.TRANSPORT_SSL_ENABLED.get(settings); final Settings transportSSLSettings = settings.getByPrefix(setting("transport.ssl.")); if (sslEnabled) { - this.sslConfiguration = sslService.sslConfiguration(transportSSLSettings, Settings.EMPTY); Map profileSettingsMap = settings.getGroups("transport.profiles.", true); Map profileConfiguration = new HashMap<>(profileSettingsMap.size() + 1); for (Map.Entry entry : profileSettingsMap.entrySet()) { Settings profileSettings = entry.getValue(); final Settings profileSslSettings = SecurityNetty4Transport.profileSslSettings(profileSettings); - SSLConfiguration configuration = sslService.sslConfiguration(profileSslSettings, transportSSLSettings); + SSLConfiguration configuration = sslService.sslConfiguration(profileSslSettings, transportSSLSettings); profileConfiguration.put(entry.getKey(), configuration); } if (profileConfiguration.containsKey(TcpTransport.DEFAULT_PROFILE) == false) { - profileConfiguration.put(TcpTransport.DEFAULT_PROFILE, sslConfiguration); + profileConfiguration.put(TcpTransport.DEFAULT_PROFILE, sslService.sslConfiguration(transportSSLSettings, Settings.EMPTY)); } this.profileConfiguration = Collections.unmodifiableMap(profileConfiguration); } else { - throw new IllegalArgumentException("Currently only support SSL enabled."); + profileConfiguration = Collections.emptyMap(); + } + } + + @Override + protected void doStart() { + super.doStart(); + if (authenticator != null) { + authenticator.setBoundTransportAddress(boundAddress(), profileBoundAddresses()); + } + } + + @Override + public void onException(TcpChannel channel, Exception e) { + if (!lifecycle.started()) { + // just close and ignore - we are already stopped and just need to make sure we release all resources + CloseableChannel.closeChannel(channel); + } else if (SSLExceptionHelper.isNotSslRecordException(e)) { + if (logger.isTraceEnabled()) { + logger.trace( + new ParameterizedMessage("received plaintext traffic on an encrypted channel, closing connection {}", channel), e); + } else { + logger.warn("received plaintext traffic on an encrypted channel, closing connection {}", channel); + } + CloseableChannel.closeChannel(channel); + } else if (SSLExceptionHelper.isCloseDuringHandshakeException(e)) { + if (logger.isTraceEnabled()) { + logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e); + } else { + logger.warn("connection {} closed during handshake", channel); + } + CloseableChannel.closeChannel(channel); + } else if (SSLExceptionHelper.isReceivedCertificateUnknownException(e)) { + if (logger.isTraceEnabled()) { + logger.trace(new ParameterizedMessage("client did not trust server's certificate, closing connection {}", channel), e); + } else { + logger.warn("client did not trust this server's certificate, closing connection {}", channel); + } + CloseableChannel.closeChannel(channel); + } else { + super.onException(channel, e); } } @@ -89,9 +140,13 @@ protected TcpChannelFactory channelFactory(ProfileSettings profileSettings, bool return new SecurityTcpChannelFactory(profileSettings, isClient); } - @Override - protected void acceptChannel(NioSocketChannel channel) { - super.acceptChannel(channel); + private boolean validateChannel(NioSocketChannel channel) { + if (authenticator != null) { + NioTcpChannel nioTcpChannel = (NioTcpChannel) channel; + return authenticator.accept(nioTcpChannel.getProfile(), nioTcpChannel.getRemoteAddress()); + } else { + return true; + } } private class SecurityTcpChannelFactory extends TcpChannelFactory { @@ -101,30 +156,46 @@ private class SecurityTcpChannelFactory extends TcpChannelFactory { private SecurityTcpChannelFactory(ProfileSettings profileSettings, boolean isClient) { super(new RawChannelFactory(profileSettings.tcpNoDelay, - profileSettings.tcpKeepAlive, - profileSettings.reuseAddress, - Math.toIntExact(profileSettings.sendBufferSize.getBytes()), - Math.toIntExact(profileSettings.receiveBufferSize.getBytes()))); + profileSettings.tcpKeepAlive, + profileSettings.reuseAddress, + Math.toIntExact(profileSettings.sendBufferSize.getBytes()), + Math.toIntExact(profileSettings.receiveBufferSize.getBytes()))); this.profileName = profileSettings.profileName; this.isClient = isClient; } @Override public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException { - SSLConfiguration defaultConfig = profileConfiguration.get(TcpTransport.DEFAULT_PROFILE); - SSLEngine sslEngine = sslService.createSSLEngine(profileConfiguration.getOrDefault(profileName, defaultConfig), null, -1); - SSLDriver sslDriver = new SSLDriver(sslEngine, isClient); NioTcpChannel nioChannel = new NioTcpChannel(profileName, channel); + SocketChannelContext context; Supplier pageSupplier = () -> { Recycler.V bytes = pageCacheRecycler.bytePage(false); return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close); }; - TcpReadWriteHandler readWriteHandler = new TcpReadWriteHandler(nioChannel, SecurityNioTransport.this); InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier); Consumer exceptionHandler = (e) -> onException(nioChannel, e); - SSLChannelContext context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer); + Predicate filter = SecurityNioTransport.this::validateChannel; + + if (sslEnabled) { + SSLEngine sslEngine; + SSLConfiguration defaultConfig = profileConfiguration.get(TcpTransport.DEFAULT_PROFILE); + SSLConfiguration sslConfig = profileConfiguration.getOrDefault(profileName, defaultConfig); + boolean hostnameVerificationEnabled = sslConfig.verificationMode().isHostnameVerificationEnabled(); + if (hostnameVerificationEnabled) { + InetSocketAddress inetSocketAddress = (InetSocketAddress) channel.getRemoteAddress(); + // we create the socket based on the name given. don't reverse DNS + sslEngine = sslService.createSSLEngine(sslConfig, inetSocketAddress.getHostString(), inetSocketAddress.getPort()); + } else { + sslEngine = sslService.createSSLEngine(sslConfig, null, -1); + } + SSLDriver sslDriver = new SSLDriver(sslEngine, isClient); + context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer, filter); + } else { + context = new BytesChannelContext(nioChannel, selector, exceptionHandler, readWriteHandler, buffer, filter); + } nioChannel.setContext(context); + return nioChannel; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java index 97dd7866dc006..e6db3407496eb 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java @@ -25,6 +25,7 @@ import org.elasticsearch.cluster.routing.IndexRoutingTable; import org.elasticsearch.common.Strings; import org.elasticsearch.common.network.NetworkAddress; +import org.elasticsearch.common.network.NetworkModule; import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Settings; @@ -242,6 +243,7 @@ protected Settings nodeSettings(int nodeOrdinal) { Settings customSettings = customSecuritySettingsSource.nodeSettings(nodeOrdinal); builder.put(customSettings, false); // handle secure settings separately builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial"); + builder.put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO); Settings.Builder customBuilder = Settings.builder().put(customSettings); if (customBuilder.getSecureSettings() != null) { SecuritySettingsSource.addSecureSettings(builder, secureSettings -> @@ -262,6 +264,7 @@ protected Path nodeConfigPath(int nodeOrdinal) { @Override protected Settings transportClientSettings() { return Settings.builder().put(super.transportClientSettings()) + .put(NetworkModule.TRANSPORT_TYPE_KEY, SecurityField.NIO) .put(customSecuritySettingsSource.transportClientSettings()) .build(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index cc8c61a5c32e4..2e0662264a248 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -21,12 +21,12 @@ import org.elasticsearch.transport.Netty4Plugin; import org.elasticsearch.xpack.core.XPackClientPlugin; import org.elasticsearch.xpack.core.XPackSettings; -import org.elasticsearch.xpack.security.LocalStateSecurity; import org.elasticsearch.xpack.core.security.SecurityField; -import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail; import org.elasticsearch.xpack.core.security.authc.esnative.NativeRealmSettings; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.support.Hasher; +import org.elasticsearch.xpack.security.LocalStateSecurity; +import org.elasticsearch.xpack.security.audit.logfile.LoggingAuditTrail; import java.io.IOException; import java.io.InputStream; @@ -125,6 +125,7 @@ public Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal)) .put(XPackSettings.SECURITY_ENABLED.getKey(), true) + .put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO) //TODO: for now isolate security tests from watcher & monitoring (randomize this later) .put(XPackSettings.WATCHER_ENABLED.getKey(), false) .put(XPackSettings.MONITORING_ENABLED.getKey(), false) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java index 2e2a931f78f87..cb1b69708bdf2 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/index/IndexAuditTrailTests.java @@ -49,6 +49,7 @@ import org.elasticsearch.transport.TransportMessage; import org.elasticsearch.transport.TransportRequest; import org.elasticsearch.xpack.core.XPackSettings; +import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.authc.Authentication; import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authc.AuthenticationToken; @@ -187,7 +188,9 @@ public Settings nodeSettings(int nodeOrdinal) { // Disable native ML autodetect_process as the c++ controller won't be available // .put(MachineLearningField.AUTODETECT_PROCESS.getKey(), false) .put(XPackSettings.SECURITY_ENABLED.getKey(), useSecurity); - if (useSecurity == false && builder.get(NetworkModule.TRANSPORT_TYPE_KEY) == null) { + String transport = builder.get(NetworkModule.TRANSPORT_TYPE_KEY); + if (useSecurity == false && (transport == null || SecurityField.NAME4.equals(transport) + || SecurityField.NIO.equals(transport))) { builder.put(NetworkModule.TRANSPORT_TYPE_KEY, getTestTransportType()); } return builder.build(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java index 408d3e797e5a3..1059e22abd663 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/audit/logfile/LoggingAuditTrailTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.mock.orig.Mockito; import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.tasks.Task; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.rest.FakeRestRequest; import org.elasticsearch.test.rest.FakeRestRequest.Builder; @@ -122,6 +123,7 @@ protected String expectedMessage() { private ClusterService clusterService; private ThreadContext threadContext; private boolean includeRequestBody; + private String opaqueId; @Before public void init() throws Exception { @@ -145,6 +147,13 @@ public void init() throws Exception { when(clusterService.getClusterSettings()).thenReturn(clusterSettings); prefix = LoggingAuditTrail.LocalNodeInfo.resolvePrefix(settings, localNode); threadContext = new ThreadContext(Settings.EMPTY); + if (randomBoolean()) { + String id = randomAlphaOfLength(10); + threadContext.putHeader(Task.X_OPAQUE_ID, id); + opaqueId = ", opaque_id=[" + id + "]"; + } else { + opaqueId = ""; + } } public void testAnonymousAccessDeniedTransport() throws Exception { @@ -155,10 +164,10 @@ public void testAnonymousAccessDeniedTransport() throws Exception { auditTrail.anonymousAccessDenied("_action", message); if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [anonymous_access_denied]\t" + origins + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [anonymous_access_denied]\t" + origins + - ", action=[_action], request=[MockMessage]"); + ", action=[_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -179,10 +188,10 @@ public void testAnonymousAccessDeniedRest() throws Exception { auditTrail.anonymousAccessDenied(request); if (includeRequestBody) { assertMsg(logger, Level.INFO, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]"); + NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId + ", request_body=[" + expectedMessage + "]"); } else { assertMsg(logger, Level.INFO, prefix + "[rest] [anonymous_access_denied]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri]"); + NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId); } // test disabled @@ -202,10 +211,10 @@ public void testAuthenticationFailed() throws Exception { if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins + ", principal=[_principal], action=[_action], indices=[" + indices(message) + - "], request=[MockIndicesRequest]"); + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins + - ", principal=[_principal], action=[_action], request=[MockMessage]"); + ", principal=[_principal], action=[_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -224,10 +233,10 @@ public void testAuthenticationFailedNoToken() throws Exception { auditTrail.authenticationFailed("_action", message); if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_failed]\t" + origins + - ", action=[_action], request=[MockMessage]"); + ", action=[_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -248,11 +257,11 @@ public void testAuthenticationFailedRest() throws Exception { auditTrail.authenticationFailed(new MockToken(), request); if (includeRequestBody) { assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" + - NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" + + NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]" + opaqueId + ", request_body=[" + expectedMessage + "]"); } else { assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" + - NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]"); + NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]" + opaqueId); } // test disabled @@ -273,10 +282,10 @@ public void testAuthenticationFailedRestNoToken() throws Exception { auditTrail.authenticationFailed(request); if (includeRequestBody) { assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]"); + NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId + ", request_body=[" + expectedMessage + "]"); } else { assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_failed]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri]"); + NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId); } // test disabled @@ -303,10 +312,10 @@ public void testAuthenticationFailedRealm() throws Exception { if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [realm_authentication_failed]\trealm=[_realm], " + origins + ", principal=[_principal], action=[_action], indices=[" + indices(message) + "], " + - "request=[MockIndicesRequest]"); + "request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [realm_authentication_failed]\trealm=[_realm], " + origins + - ", principal=[_principal], action=[_action], request=[MockMessage]"); + ", principal=[_principal], action=[_action], request=[MockMessage]" + opaqueId); } } @@ -327,11 +336,11 @@ public void testAuthenticationFailedRealmRest() throws Exception { auditTrail.authenticationFailed("_realm", new MockToken(), request); if (includeRequestBody) { assertMsg(logger, Level.INFO, prefix + "[rest] [realm_authentication_failed]\trealm=[_realm], origin_address=[" + - NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri], request_body=[" + + NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]" + opaqueId + ", request_body=[" + expectedMessage + "]"); } else { assertMsg(logger, Level.INFO, prefix + "[rest] [realm_authentication_failed]\trealm=[_realm], origin_address=[" + - NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]"); + NetworkAddress.format(address) + "], principal=[_principal], uri=[_uri]" + opaqueId); } } @@ -353,10 +362,10 @@ public void testAccessGranted() throws Exception { : "principal=[_username], realm=[authRealm]") + ", roles=[" + role + "]"; if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[_action], request=[MockMessage]"); + ", action=[_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -384,11 +393,11 @@ public void testAccessGrantedInternalSystemAction() throws Exception { assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", principal=[" + SystemUser.INSTANCE.principal() + "], realm=[authRealm], roles=[" + role + "], action=[internal:_action], indices=[" + indices(message) - + "], request=[MockIndicesRequest]"); + + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", principal=[" + SystemUser.INSTANCE.principal() + "], realm=[authRealm], roles=[" + role - + "], action=[internal:_action], request=[MockMessage]"); + + "], action=[internal:_action], request=[MockMessage]" + opaqueId); } } @@ -410,10 +419,10 @@ public void testAccessGrantedInternalSystemActionNonSystemUser() throws Exceptio : "principal=[_username], realm=[authRealm]") + ", roles=[" + role + "]"; if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[internal:_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + ", action=[internal:_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [access_granted]\t" + origins + ", " + userInfo + - ", action=[internal:_action], request=[MockMessage]"); + ", action=[internal:_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -442,10 +451,10 @@ public void testAccessDenied() throws Exception { : "principal=[_username], realm=[authRealm]") + ", roles=[" + role + "]"; if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [access_denied]\t" + origins + ", " + userInfo + - ", action=[_action], request=[MockMessage]"); + ", action=[_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -466,10 +475,10 @@ public void testTamperedRequestRest() throws Exception { auditTrail.tamperedRequest(request); if (includeRequestBody) { assertMsg(logger, Level.INFO, prefix + "[rest] [tampered_request]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri], request_body=[" + expectedMessage + "]"); + NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId + ", request_body=[" + expectedMessage + "]"); } else { assertMsg(logger, Level.INFO, prefix + "[rest] [tampered_request]\torigin_address=[" + - NetworkAddress.format(address) + "], uri=[_uri]"); + NetworkAddress.format(address) + "], uri=[_uri]" + opaqueId); } // test disabled @@ -489,10 +498,10 @@ public void testTamperedRequest() throws Exception { auditTrail.tamperedRequest(action, message); if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + - ", action=[_action], request=[MockMessage]"); + ", action=[_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -516,10 +525,10 @@ public void testTamperedRequestWithUser() throws Exception { auditTrail.tamperedRequest(user, action, message); if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo + - ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + ", action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [tampered_request]\t" + origins + ", " + userInfo + - ", action=[_action], request=[MockMessage]"); + ", action=[_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -537,7 +546,7 @@ public void testConnectionDenied() throws Exception { final SecurityIpFilterRule rule = new SecurityIpFilterRule(false, "_all"); auditTrail.connectionDenied(inetAddress, "default", rule); assertMsg(logger, Level.INFO, String.format(Locale.ROOT, prefix + - "[ip_filter] [connection_denied]\torigin_address=[%s], transport_profile=[%s], rule=[deny %s]", + "[ip_filter] [connection_denied]\torigin_address=[%s], transport_profile=[%s], rule=[deny %s]" + opaqueId, NetworkAddress.format(inetAddress), "default", "_all")); // test disabled @@ -562,7 +571,8 @@ public void testConnectionGranted() throws Exception { auditTrail = new LoggingAuditTrail(settings, clusterService, logger, threadContext); auditTrail.connectionGranted(inetAddress, "default", rule); assertMsg(logger, Level.INFO, String.format(Locale.ROOT, prefix + "[ip_filter] [connection_granted]\torigin_address=[%s], " + - "transport_profile=[default], rule=[allow default:accept_all]", NetworkAddress.format(inetAddress))); + "transport_profile=[default], rule=[allow default:accept_all]" + opaqueId, + NetworkAddress.format(inetAddress))); } public void testRunAsGranted() throws Exception { @@ -577,12 +587,12 @@ public void testRunAsGranted() throws Exception { assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_granted]\t" + origins + ", principal=[_username], realm=[authRealm], run_as_principal=[running as], run_as_realm=[lookRealm], roles=[" - + role + "], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + + role + "], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_granted]\t" + origins + ", principal=[_username], realm=[authRealm], run_as_principal=[running as], run_as_realm=[lookRealm], roles=[" - + role + "], action=[_action], request=[MockMessage]"); + + role + "], action=[_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -605,12 +615,12 @@ public void testRunAsDenied() throws Exception { assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_denied]\t" + origins + ", principal=[_username], realm=[authRealm], run_as_principal=[running as], run_as_realm=[lookRealm], roles=[" - + role + "], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + + role + "], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [run_as_denied]\t" + origins + ", principal=[_username], realm=[authRealm], run_as_principal=[running as], run_as_realm=[lookRealm], roles=[" - + role + "], action=[_action], request=[MockMessage]"); + + role + "], action=[_action], request=[MockMessage]" + opaqueId); } // test disabled @@ -667,10 +677,11 @@ public void testAuthenticationSuccessRest() throws Exception { if (includeRequestBody) { assertMsg(logger, Level.INFO, prefix + "[rest] [authentication_success]\t" + userInfo + ", realm=[_realm], uri=[_uri], params=[" + params - + "], request_body=[" + expectedMessage + "]"); + + "]" + opaqueId + ", request_body=[" + expectedMessage + "]"); } else { assertMsg(logger, Level.INFO, - prefix + "[rest] [authentication_success]\t" + userInfo + ", realm=[_realm], uri=[_uri], params=[" + params + "]"); + prefix + "[rest] [authentication_success]\t" + userInfo + ", realm=[_realm], uri=[_uri], params=[" + params + + "]" + opaqueId); } // test disabled @@ -701,10 +712,10 @@ public void testAuthenticationSuccessTransport() throws Exception { auditTrail.authenticationSuccess(realm, user, "_action", message); if (message instanceof IndicesRequest) { assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_success]\t" + origins + ", " + userInfo - + ", realm=[_realm], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]"); + + ", realm=[_realm], action=[_action], indices=[" + indices(message) + "], request=[MockIndicesRequest]" + opaqueId); } else { assertMsg(logger, Level.INFO, prefix + "[transport] [authentication_success]\t" + origins + ", " + userInfo - + ", realm=[_realm], action=[_action], request=[MockMessage]"); + + ", realm=[_realm], action=[_action], request=[MockMessage]" + opaqueId); } // test disabled diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java index d614afc0aeb81..a359c1ba6ecc1 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/SetupPasswordToolTests.java @@ -27,9 +27,9 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.env.Environment; import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.license.XPackInfoResponse; -import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo; -import org.elasticsearch.license.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.xpack.core.security.support.Validation; import org.elasticsearch.xpack.core.security.user.ElasticUser; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java index bcd7996e32a8c..739523795e7c5 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryRealmTests.java @@ -8,8 +8,10 @@ import com.unboundid.ldap.listener.InMemoryDirectoryServer; import com.unboundid.ldap.listener.InMemoryDirectoryServerConfig; import com.unboundid.ldap.sdk.Attribute; +import com.unboundid.ldap.sdk.FailoverServerSet; import com.unboundid.ldap.sdk.LDAPException; import com.unboundid.ldap.sdk.LDAPURL; +import com.unboundid.ldap.sdk.SingleServerSet; import com.unboundid.ldap.sdk.schema.Schema; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.PlainActionFuture; @@ -28,6 +30,7 @@ import org.elasticsearch.xpack.core.security.authc.ldap.ActiveDirectorySessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.ldap.LdapRealmSettings; import org.elasticsearch.xpack.core.security.authc.ldap.PoolingSessionFactorySettings; +import org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFactorySettings; import org.elasticsearch.xpack.core.security.authc.support.CachingUsernamePasswordRealmSettings; import org.elasticsearch.xpack.core.security.authc.support.DnRoleMapperSettings; import org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken; @@ -51,9 +54,11 @@ import static org.elasticsearch.xpack.core.security.authc.ldap.support.SessionFactorySettings.URLS_SETTING; import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.arrayContainingInAnyOrder; +import static org.hamcrest.Matchers.arrayWithSize; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasEntry; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.is; import static org.hamcrest.Matchers.notNullValue; import static org.mockito.Matchers.any; @@ -355,6 +360,48 @@ public void testCustomSearchFilters() throws Exception { assertEquals("(objectClass=down level)", sessionFactory.downLevelADAuthenticator.getUserSearchFilter()); } + public void testBuildUrlFromDomainNameAndDefaultPort() throws Exception { + Settings settings = Settings.builder() + .put(ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING, "ad.test.elasticsearch.com") + .build(); + RealmConfig config = new RealmConfig("testBuildUrlFromDomainNameAndDefaultPort", settings, globalSettings, + TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); + ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService, threadPool); + assertSingleLdapServer(sessionFactory, "ad.test.elasticsearch.com", 389); + } + + public void testBuildUrlFromDomainNameAndCustomPort() throws Exception { + Settings settings = Settings.builder() + .put(ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING, "ad.test.elasticsearch.com") + .put(ActiveDirectorySessionFactorySettings.AD_LDAP_PORT_SETTING.getKey(), 10389) + .build(); + RealmConfig config = new RealmConfig("testBuildUrlFromDomainNameAndCustomPort", settings, globalSettings, + TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); + ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService, threadPool); + assertSingleLdapServer(sessionFactory, "ad.test.elasticsearch.com", 10389); + } + + public void testUrlConfiguredInSettings() throws Exception { + Settings settings = Settings.builder() + .put(ActiveDirectorySessionFactorySettings.AD_DOMAIN_NAME_SETTING, "ad.test.elasticsearch.com") + .put(SessionFactorySettings.URLS_SETTING, "ldap://ad01.testing.elastic.co:20389/") + .build(); + RealmConfig config = new RealmConfig("testBuildUrlFromDomainNameAndCustomPort", settings, globalSettings, + TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); + ActiveDirectorySessionFactory sessionFactory = new ActiveDirectorySessionFactory(config, sslService, threadPool); + assertSingleLdapServer(sessionFactory, "ad01.testing.elastic.co", 20389); + } + + private void assertSingleLdapServer(ActiveDirectorySessionFactory sessionFactory, String hostname, int port) { + assertThat(sessionFactory.getServerSet(), instanceOf(FailoverServerSet.class)); + FailoverServerSet fss = (FailoverServerSet) sessionFactory.getServerSet(); + assertThat(fss.getServerSets(), arrayWithSize(1)); + assertThat(fss.getServerSets()[0], instanceOf(SingleServerSet.class)); + SingleServerSet sss = (SingleServerSet) fss.getServerSets()[0]; + assertThat(sss.getAddress(), equalTo(hostname)); + assertThat(sss.getPort(), equalTo(port)); + } + private Settings settings() throws Exception { return settings(Settings.EMPTY); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index c5a6a525d4e10..835fcf302c727 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -80,7 +80,7 @@ public MockTransportService nioFromThreadPool(Settings settings, ThreadPool thre .put("xpack.security.transport.ssl.enabled", true).build(); Transport transport = new SecurityNioTransport(settings1, threadPool, networkService, BigArrays.NON_RECYCLING_INSTANCE, new MockPageCacheRecycler(settings), namedWriteableRegistry, - new NoneCircuitBreakerService(), createSSLService()) { + new NoneCircuitBreakerService(), null, createSSLService()) { @Override protected Version executeHandshake(DiscoveryNode node, TcpChannel channel, TimeValue timeout) throws IOException, diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 index ea0b7da161c1e..2c3288babd6d8 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 @@ -74,9 +74,14 @@ queryNoWith : queryTerm /** we could add sort by - sort per partition */ (ORDER BY orderBy (',' orderBy)*)? - (LIMIT limit=(INTEGER_VALUE | ALL))? + limitClause? ; +limitClause + : LIMIT limit=(INTEGER_VALUE | ALL) + | LIMIT_ESC limit=(INTEGER_VALUE | ALL) ESC_END + ; + queryTerm : querySpecification #queryPrimaryDefault | '(' queryNoWith ')' #subquery @@ -185,7 +190,12 @@ predicate ; pattern - : value=string (ESCAPE escape=string)? + : value=string patternEscape? + ; + +patternEscape + : ESCAPE escape=string + | ESCAPE_ESC escape=string '}' ; valueExpression @@ -197,18 +207,44 @@ valueExpression ; primaryExpression - : CAST '(' expression AS dataType ')' #cast - | EXTRACT '(' field=identifier FROM valueExpression ')' #extract + : castExpression #cast + | extractExpression #extract | constant #constantDefault | ASTERISK #star | (qualifiedName DOT)? ASTERISK #star - | identifier '(' (setQuantifier? expression (',' expression)*)? ')' #functionCall + | functionExpression #function | '(' query ')' #subqueryExpression | identifier #columnReference | qualifiedName #dereference | '(' expression ')' #parenthesizedExpression ; +castExpression + : castTemplate + | FUNCTION_ESC castTemplate ESC_END + ; + +castTemplate + : CAST '(' expression AS dataType ')' + ; + +extractExpression + : extractTemplate + | FUNCTION_ESC extractTemplate ESC_END + ; + +extractTemplate + : EXTRACT '(' field=identifier FROM valueExpression ')' + ; + +functionExpression + : functionTemplate + | FUNCTION_ESC functionTemplate '}' + ; + +functionTemplate + : identifier '(' (setQuantifier? expression (',' expression)*)? ')' + ; constant : NULL #nullLiteral @@ -216,6 +252,10 @@ constant | booleanValue #booleanLiteral | STRING+ #stringLiteral | PARAM #paramLiteral + | DATE_ESC string ESC_END #dateEscapedLiteral + | TIME_ESC string ESC_END #timeEscapedLiteral + | TIMESTAMP_ESC string ESC_END #timestampEscapedLiteral + | GUID_ESC string ESC_END #guidEscapedLiteral ; comparisonOperator @@ -351,6 +391,18 @@ VERIFY: 'VERIFY'; WHERE: 'WHERE'; WITH: 'WITH'; +// Escaped Sequence +ESCAPE_ESC: '{ESCAPE'; +FUNCTION_ESC: '{FN'; +LIMIT_ESC:'{LIMIT'; +DATE_ESC: '{D'; +TIME_ESC: '{T'; +TIMESTAMP_ESC: '{TS'; +// mapped to string literal +GUID_ESC: '{GUID'; + +ESC_END: '}'; + EQ : '='; NEQ : '<>' | '!=' | '<=>'; LT : '<'; diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens index 87cf9a4809d4a..527cc676e1d80 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.tokens @@ -69,33 +69,41 @@ USING=68 VERIFY=69 WHERE=70 WITH=71 -EQ=72 -NEQ=73 -LT=74 -LTE=75 -GT=76 -GTE=77 -PLUS=78 -MINUS=79 -ASTERISK=80 -SLASH=81 -PERCENT=82 -CONCAT=83 -DOT=84 -PARAM=85 -STRING=86 -INTEGER_VALUE=87 -DECIMAL_VALUE=88 -IDENTIFIER=89 -DIGIT_IDENTIFIER=90 -TABLE_IDENTIFIER=91 -QUOTED_IDENTIFIER=92 -BACKQUOTED_IDENTIFIER=93 -SIMPLE_COMMENT=94 -BRACKETED_COMMENT=95 -WS=96 -UNRECOGNIZED=97 -DELIMITER=98 +ESCAPE_ESC=72 +FUNCTION_ESC=73 +LIMIT_ESC=74 +DATE_ESC=75 +TIME_ESC=76 +TIMESTAMP_ESC=77 +GUID_ESC=78 +ESC_END=79 +EQ=80 +NEQ=81 +LT=82 +LTE=83 +GT=84 +GTE=85 +PLUS=86 +MINUS=87 +ASTERISK=88 +SLASH=89 +PERCENT=90 +CONCAT=91 +DOT=92 +PARAM=93 +STRING=94 +INTEGER_VALUE=95 +DECIMAL_VALUE=96 +IDENTIFIER=97 +DIGIT_IDENTIFIER=98 +TABLE_IDENTIFIER=99 +QUOTED_IDENTIFIER=100 +BACKQUOTED_IDENTIFIER=101 +SIMPLE_COMMENT=102 +BRACKETED_COMMENT=103 +WS=104 +UNRECOGNIZED=105 +DELIMITER=106 '('=1 ')'=2 ','=3 @@ -167,16 +175,24 @@ DELIMITER=98 'VERIFY'=69 'WHERE'=70 'WITH'=71 -'='=72 -'<'=74 -'<='=75 -'>'=76 -'>='=77 -'+'=78 -'-'=79 -'*'=80 -'/'=81 -'%'=82 -'||'=83 -'.'=84 -'?'=85 +'{ESCAPE'=72 +'{FN'=73 +'{LIMIT'=74 +'{D'=75 +'{T'=76 +'{TS'=77 +'{GUID'=78 +'}'=79 +'='=80 +'<'=82 +'<='=83 +'>'=84 +'>='=85 +'+'=86 +'-'=87 +'*'=88 +'/'=89 +'%'=90 +'||'=91 +'.'=92 +'?'=93 diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens index a687a9215ecb5..155d4860e0ea0 100644 --- a/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens +++ b/x-pack/plugin/sql/src/main/antlr/SqlBaseLexer.tokens @@ -69,32 +69,40 @@ USING=68 VERIFY=69 WHERE=70 WITH=71 -EQ=72 -NEQ=73 -LT=74 -LTE=75 -GT=76 -GTE=77 -PLUS=78 -MINUS=79 -ASTERISK=80 -SLASH=81 -PERCENT=82 -CONCAT=83 -DOT=84 -PARAM=85 -STRING=86 -INTEGER_VALUE=87 -DECIMAL_VALUE=88 -IDENTIFIER=89 -DIGIT_IDENTIFIER=90 -TABLE_IDENTIFIER=91 -QUOTED_IDENTIFIER=92 -BACKQUOTED_IDENTIFIER=93 -SIMPLE_COMMENT=94 -BRACKETED_COMMENT=95 -WS=96 -UNRECOGNIZED=97 +ESCAPE_ESC=72 +FUNCTION_ESC=73 +LIMIT_ESC=74 +DATE_ESC=75 +TIME_ESC=76 +TIMESTAMP_ESC=77 +GUID_ESC=78 +ESC_END=79 +EQ=80 +NEQ=81 +LT=82 +LTE=83 +GT=84 +GTE=85 +PLUS=86 +MINUS=87 +ASTERISK=88 +SLASH=89 +PERCENT=90 +CONCAT=91 +DOT=92 +PARAM=93 +STRING=94 +INTEGER_VALUE=95 +DECIMAL_VALUE=96 +IDENTIFIER=97 +DIGIT_IDENTIFIER=98 +TABLE_IDENTIFIER=99 +QUOTED_IDENTIFIER=100 +BACKQUOTED_IDENTIFIER=101 +SIMPLE_COMMENT=102 +BRACKETED_COMMENT=103 +WS=104 +UNRECOGNIZED=105 '('=1 ')'=2 ','=3 @@ -166,16 +174,24 @@ UNRECOGNIZED=97 'VERIFY'=69 'WHERE'=70 'WITH'=71 -'='=72 -'<'=74 -'<='=75 -'>'=76 -'>='=77 -'+'=78 -'-'=79 -'*'=80 -'/'=81 -'%'=82 -'||'=83 -'.'=84 -'?'=85 +'{ESCAPE'=72 +'{FN'=73 +'{LIMIT'=74 +'{D'=75 +'{T'=76 +'{TS'=77 +'{GUID'=78 +'}'=79 +'='=80 +'<'=82 +'<='=83 +'>'=84 +'>='=85 +'+'=86 +'-'=87 +'*'=88 +'/'=89 +'%'=90 +'||'=91 +'.'=92 +'?'=93 diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java index 6f8be61b463fd..4915a25a55bc7 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/analysis/analyzer/Verifier.java @@ -213,10 +213,11 @@ static Collection verify(LogicalPlan plan) { * Check validity of Aggregate/GroupBy. * This rule is needed for multiple reasons: * 1. a user might specify an invalid aggregate (SELECT foo GROUP BY bar) - * 2. the order/having might contain a non-grouped attribute. This is typically + * 2. the ORDER BY/HAVING might contain a non-grouped attribute. This is typically * caught by the Analyzer however if wrapped in a function (ABS()) it gets resolved * (because the expression gets resolved little by little without being pushed down, * without the Analyzer modifying anything. + * 2a. HAVING also requires an Aggregate function * 3. composite agg (used for GROUP BY) allows ordering only on the group keys */ private static boolean checkGroupBy(LogicalPlan p, Set localFailures, @@ -244,7 +245,7 @@ private static boolean checkGroupByOrder(LogicalPlan p, Set localFailur } // make sure to compare attributes directly - if (Expressions.anyMatch(a.groupings(), + if (Expressions.anyMatch(a.groupings(), g -> e.semanticEquals(e instanceof Attribute ? Expressions.attribute(g) : g))) { return; } @@ -278,13 +279,14 @@ private static boolean checkGroupByHaving(LogicalPlan p, Set localFailu Map> missing = new LinkedHashMap<>(); Expression condition = f.condition(); - condition.collectFirstChildren(c -> checkGroupMatch(c, condition, a.groupings(), missing, functions)); + // variation of checkGroupMatch customized for HAVING, which requires just aggregations + condition.collectFirstChildren(c -> checkGroupByHavingHasOnlyAggs(c, condition, missing, functions)); if (!missing.isEmpty()) { String plural = missing.size() > 1 ? "s" : StringUtils.EMPTY; - localFailures.add(fail(condition, "Cannot filter by non-grouped column" + plural + " %s, expected %s", - Expressions.names(missing.keySet()), - Expressions.names(a.groupings()))); + localFailures.add( + fail(condition, "Cannot filter HAVING on non-aggregate" + plural + " %s; consider using WHERE instead", + Expressions.names(missing.keySet()))); groupingFailures.add(a); return false; } @@ -294,6 +296,57 @@ private static boolean checkGroupByHaving(LogicalPlan p, Set localFailu } + private static boolean checkGroupByHavingHasOnlyAggs(Expression e, Node source, + Map> missing, Map functions) { + + // resolve FunctionAttribute to backing functions + if (e instanceof FunctionAttribute) { + FunctionAttribute fa = (FunctionAttribute) e; + Function function = functions.get(fa.functionId()); + // TODO: this should be handled by a different rule + if (function == null) { + return false; + } + e = function; + } + + // scalar functions can be a binary tree + // first test the function against the grouping + // and if that fails, start unpacking hoping to find matches + if (e instanceof ScalarFunction) { + ScalarFunction sf = (ScalarFunction) e; + + // unwrap function to find the base + for (Expression arg : sf.arguments()) { + arg.collectFirstChildren(c -> checkGroupByHavingHasOnlyAggs(c, source, missing, functions)); + } + return true; + + } else if (e instanceof Score) { + // Score can't be used for having + missing.put(e, source); + return true; + } + + // skip literals / foldable + if (e.foldable()) { + return true; + } + // skip aggs (allowed to refer to non-group columns) + if (Functions.isAggregate(e)) { + return true; + } + + // left without leaves which have to match; that's a failure since everything should be based on an agg + if (e instanceof Attribute) { + missing.put(e, source); + return true; + } + + return false; + } + + // check whether plain columns specified in an agg are mentioned in the group-by private static boolean checkGroupByAgg(LogicalPlan p, Set localFailures, Set groupingFailures, Map functions) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java index 22141497a5c25..212149683ffd9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/FunctionRegistry.java @@ -58,6 +58,16 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sinh; import org.elasticsearch.xpack.sql.expression.function.scalar.math.Sqrt; import org.elasticsearch.xpack.sql.expression.function.scalar.math.Tan; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.Ascii; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.BitLength; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.Char; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.CharLength; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.LCase; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.LTrim; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.Length; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.RTrim; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.Space; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.UCase; import org.elasticsearch.xpack.sql.parser.ParsingException; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.util.StringUtils; @@ -134,6 +144,17 @@ public class FunctionRegistry { def(Sinh.class, Sinh::new), def(Sqrt.class, Sqrt::new), def(Tan.class, Tan::new), + // String + def(Ascii.class, Ascii::new), + def(Char.class, Char::new), + def(BitLength.class, BitLength::new), + def(CharLength.class, CharLength::new), + def(LCase.class, LCase::new), + def(Length.class, Length::new), + def(LTrim.class, LTrim::new), + def(RTrim.class, RTrim::new), + def(Space.class, Space::new), + def(UCase.class, UCase::new), // Special def(Score.class, Score::new))); @@ -299,6 +320,7 @@ interface BinaryFunctionBuilder { T build(Location location, Expression lhs, Expression rhs); } + @SuppressWarnings("overloads") private static FunctionDefinition def(Class function, FunctionBuilder builder, boolean datetime, String... aliases) { String primaryName = normalize(function.getSimpleName()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java index 2084ad684df67..3c3f629cc1caf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/Processors.java @@ -17,6 +17,7 @@ import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.ConstantProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.HitExtractorProcessor; import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor; import java.util.ArrayList; import java.util.List; @@ -46,6 +47,8 @@ public static List getNamedWriteables() { entries.add(new Entry(Processor.class, DateTimeProcessor.NAME, DateTimeProcessor::new)); // math entries.add(new Entry(Processor.class, MathProcessor.NAME, MathProcessor::new)); + // string + entries.add(new Entry(Processor.class, StringProcessor.NAME, StringProcessor::new)); return entries; } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java index 1a86a44d32b13..24bbebd64c2db 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/ATan2.java @@ -30,7 +30,7 @@ protected BiFunction operation() { } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, ATan2::new, left(), right()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java index d38d7067cafb5..4e362dbb8e5eb 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/math/Power.java @@ -26,7 +26,7 @@ protected BiFunction operation() { } @Override - protected NodeInfo info() { + protected NodeInfo info() { return NodeInfo.create(this, Power::new, left(), right()); } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Ascii.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Ascii.java new file mode 100644 index 0000000000000..7f74a22cd80af --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Ascii.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Returns the ASCII code of the leftmost character of the given (char) expression. + */ +public class Ascii extends UnaryStringFunction { + + public Ascii(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Ascii::new, field()); + } + + @Override + protected Ascii replaceChild(Expression newChild) { + return new Ascii(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.ASCII; + } + + @Override + public DataType dataType() { + return DataType.INTEGER; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java new file mode 100644 index 0000000000000..3254e0538f06e --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/BitLength.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Returns returns the number of bits contained within the value expression. + */ +public class BitLength extends UnaryStringFunction { + + public BitLength(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, BitLength::new, field()); + } + + @Override + protected BitLength replaceChild(Expression newChild) { + return new BitLength(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.BIT_LENGTH; + } + + @Override + public DataType dataType() { + //TODO investigate if a data type Long (BIGINT) wouldn't be more appropriate here + return DataType.INTEGER; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Char.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Char.java new file mode 100644 index 0000000000000..06d1c3d81cc42 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Char.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Converts an int ASCII code to a character value. + */ +public class Char extends UnaryStringIntFunction { + + public Char(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Char::new, field()); + } + + @Override + protected Char replaceChild(Expression newChild) { + return new Char(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.CHAR; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/CharLength.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/CharLength.java new file mode 100644 index 0000000000000..bdf43fbeb4ee9 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/CharLength.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Returns the length (in characters) of the string expression. + */ +public class CharLength extends UnaryStringFunction { + + public CharLength(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, CharLength::new, field()); + } + + @Override + protected CharLength replaceChild(Expression newChild) { + return new CharLength(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.CHAR_LENGTH; + } + + @Override + public DataType dataType() { + return DataType.INTEGER; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LCase.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LCase.java new file mode 100644 index 0000000000000..a074fcb3b98b6 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LCase.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Lowercases all uppercase letters in a string. + */ +public class LCase extends UnaryStringFunction { + + public LCase(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, LCase::new, field()); + } + + @Override + protected LCase replaceChild(Expression newChild) { + return new LCase(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.LCASE; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LTrim.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LTrim.java new file mode 100644 index 0000000000000..616f8ccdfedfc --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/LTrim.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Trims the leading whitespaces. + */ +public class LTrim extends UnaryStringFunction { + + public LTrim(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, LTrim::new, field()); + } + + @Override + protected LTrim replaceChild(Expression newChild) { + return new LTrim(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.LTRIM; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Length.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Length.java new file mode 100644 index 0000000000000..8e3efbfceec22 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Length.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Returns the length (number of characters) in a string, excluding the trailing blanks. + */ +public class Length extends UnaryStringFunction { + + public Length(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Length::new, field()); + } + + @Override + protected Length replaceChild(Expression newChild) { + return new Length(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.LENGTH; + } + + @Override + public DataType dataType() { + return DataType.INTEGER; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/RTrim.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/RTrim.java new file mode 100644 index 0000000000000..433668420d35f --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/RTrim.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Trims the trailing whitespaces. + */ +public class RTrim extends UnaryStringFunction { + + public RTrim(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, RTrim::new, field()); + } + + @Override + protected RTrim replaceChild(Expression newChild) { + return new RTrim(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.RTRIM; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Space.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Space.java new file mode 100644 index 0000000000000..37809482c219b --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/Space.java @@ -0,0 +1,42 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Generates a string consisting of count spaces. + */ +public class Space extends UnaryStringIntFunction { + + public Space(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, Space::new, field()); + } + + @Override + protected Space replaceChild(Expression newChild) { + return new Space(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.SPACE; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java new file mode 100644 index 0000000000000..059ad66583662 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java @@ -0,0 +1,51 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +abstract class StringFunctionUtils { + + /** + * Trims the trailing whitespace characters from the given String. Uses {@link java.lang.Character.isWhitespace(char)} + * to determine if a character is whitespace or not. + * + * @param s the original String + * @return the resulting String + */ + static String trimTrailingWhitespaces(String s) { + if (!hasLength(s)) { + return s; + } + + StringBuilder sb = new StringBuilder(s); + while (sb.length() > 0 && Character.isWhitespace(sb.charAt(sb.length() - 1))) { + sb.deleteCharAt(sb.length() - 1); + } + return sb.toString(); + } + + /** + * Trims the leading whitespace characters from the given String. Uses {@link java.lang.Character.isWhitespace(char)} + * to determine if a character is whitespace or not. + * + * @param s the original String + * @return the resulting String + */ + static String trimLeadingWhitespaces(String s) { + if (!hasLength(s)) { + return s; + } + + StringBuilder sb = new StringBuilder(s); + while (sb.length() > 0 && Character.isWhitespace(sb.charAt(0))) { + sb.deleteCharAt(0); + } + return sb.toString(); + } + + private static boolean hasLength(String s) { + return (s != null && s.length() > 0); + } +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java new file mode 100644 index 0000000000000..2a1ba3a10cf88 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringProcessor.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.apache.lucene.util.UnicodeUtil; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.runtime.Processor; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Locale; +import java.util.function.Function; + +public class StringProcessor implements Processor { + + private interface StringFunction { + default R apply(Object o) { + if (!(o instanceof String || o instanceof Character)) { + throw new SqlIllegalArgumentException("A string/char is required; received [{}]", o); + } + + return doApply(o.toString()); + } + + R doApply(String s); + } + + private interface NumericFunction { + default R apply(Object o) { + if (!(o instanceof Number)) { + throw new SqlIllegalArgumentException("A number is required; received [{}]", o); + } + + return doApply((Number) o); + } + + R doApply(Number s); + } + + public enum StringOperation { + ASCII((String s) -> s.length() == 0 ? null : Integer.valueOf(s.charAt(0))), + CHAR((Number n) -> { + int i = n.intValue(); + return i < 0 || i > 255 ? null : String.valueOf((char) i); + }), + LCASE((String s) -> s.toLowerCase(Locale.ROOT)), + UCASE((String s) -> s.toUpperCase(Locale.ROOT)), + LENGTH((String s) -> StringFunctionUtils.trimTrailingWhitespaces(s).length()), + RTRIM((String s) -> StringFunctionUtils.trimTrailingWhitespaces(s)), + LTRIM((String s) -> StringFunctionUtils.trimLeadingWhitespaces(s)), + SPACE((Number n) -> { + int i = n.intValue(); + if (i < 0) { + return null; + }; + char[] spaces = new char[i]; + char whitespace = ' '; + Arrays.fill(spaces, whitespace); + + return new String(spaces); + }), + BIT_LENGTH((String s) -> UnicodeUtil.calcUTF16toUTF8Length(s, 0, s.length()) * 8), + CHAR_LENGTH(String::length); + + private final Function apply; + + StringOperation(StringFunction apply) { + this.apply = l -> l == null ? null : apply.apply(l); + } + + StringOperation(NumericFunction apply) { + this.apply = l -> l == null ? null : apply.apply((l)); + } + + StringOperation(Function apply) { + this(apply, false); + } + + /** + * Wrapper for nulls around the given function. + * If true, nulls are passed through, otherwise the function is short-circuited + * and null returned. + */ + StringOperation(Function apply, boolean nullAware) { + if (nullAware) { + this.apply = apply; + } else { + this.apply = l -> l == null ? null : apply.apply(l); + } + } + + public final Object apply(Object l) { + return apply.apply(l); + } + + /** + * "translate" the function name ("char") into a function name that is not a reserved keyword in java. + * Used in {@code InternalSqlScriptUtils#character(Number)}. + */ + @Override + public String toString() { + return this == CHAR ? "character" : super.toString(); + } + } + + public static final String NAME = "s"; + + private final StringOperation processor; + + public StringProcessor(StringOperation processor) { + this.processor = processor; + } + + public StringProcessor(StreamInput in) throws IOException { + processor = in.readEnum(StringOperation.class); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeEnum(processor); + } + + @Override + public String getWriteableName() { + return NAME; + } + + @Override + public Object process(Object input) { + return processor.apply(input); + } + + StringOperation processor() { + return processor; + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + StringProcessor other = (StringProcessor) obj; + return processor == other.processor; + } + + @Override + public int hashCode() { + return processor.hashCode(); + } + + @Override + public String toString() { + return processor.toString(); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UCase.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UCase.java new file mode 100644 index 0000000000000..a030eeee7b97c --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UCase.java @@ -0,0 +1,43 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.tree.NodeInfo; +import org.elasticsearch.xpack.sql.type.DataType; + +/** + * Uppercases all lowercase letters in a string. + */ +public class UCase extends UnaryStringFunction { + + public UCase(Location location, Expression field) { + super(location, field); + } + + @Override + protected NodeInfo info() { + return NodeInfo.create(this, UCase::new, field()); + } + + @Override + protected UCase replaceChild(Expression newChild) { + return new UCase(location(), newChild); + } + + @Override + protected StringOperation operation() { + return StringOperation.UCASE; + } + + @Override + public DataType dataType() { + return DataType.KEYWORD; + } + +} diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java new file mode 100644 index 0000000000000..a0cfd50422cc0 --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringFunction.java @@ -0,0 +1,89 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.util.StringUtils; + +import java.util.Locale; +import java.util.Objects; + +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; + +public abstract class UnaryStringFunction extends UnaryScalarFunction { + + protected UnaryStringFunction(Location location, Expression field) { + super(location, field); + } + + @Override + public boolean foldable() { + return field().foldable(); + } + + @Override + public Object fold() { + return operation().apply(field().fold()); + } + + @Override + protected TypeResolution resolveType() { + if (!childrenResolved()) { + return new TypeResolution("Unresolved children"); + } + + return field().dataType().isString() ? TypeResolution.TYPE_RESOLVED : new TypeResolution( + "'%s' requires a string type, received %s", operation(), field().dataType().esType); + } + + @Override + protected final ProcessorDefinition makeProcessorDefinition() { + return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), + new StringProcessor(operation())); + } + + protected abstract StringOperation operation(); + + @Override + protected ScriptTemplate asScriptFrom(FieldAttribute field) { + //TODO change this to use _source instead of the exact form (aka field.keyword for text fields) + return new ScriptTemplate(formatScript("doc[{}].value"), + paramsBuilder().variable(field.isInexact() ? field.exactAttribute().name() : field.name()).build(), + dataType()); + } + + @Override + protected String formatScript(String template) { + // basically, transform the script to InternalSqlScriptUtils.[function_name](other_function_or_field_name) + return super.formatScript( + format(Locale.ROOT, "{sql}.%s(%s)", + StringUtils.underscoreToLowerCamelCase(operation().toString()), + template)); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + UnaryStringFunction other = (UnaryStringFunction) obj; + return Objects.equals(other.field(), field()); + } + + @Override + public int hashCode() { + return Objects.hash(field()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java new file mode 100644 index 0000000000000..7e963eb9db78b --- /dev/null +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/UnaryStringIntFunction.java @@ -0,0 +1,90 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.FieldAttribute; +import org.elasticsearch.xpack.sql.expression.function.scalar.UnaryScalarFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.ProcessorDefinitions; +import org.elasticsearch.xpack.sql.expression.function.scalar.processor.definition.UnaryProcessorDefinition; +import org.elasticsearch.xpack.sql.expression.function.scalar.script.ScriptTemplate; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; +import org.elasticsearch.xpack.sql.tree.Location; +import org.elasticsearch.xpack.sql.util.StringUtils; + +import java.util.Locale; +import java.util.Objects; + +import static java.lang.String.format; +import static org.elasticsearch.xpack.sql.expression.function.scalar.script.ParamsBuilder.paramsBuilder; + +/** + * Base unary function for text manipulating SQL functions that receive as parameter a number + */ +public abstract class UnaryStringIntFunction extends UnaryScalarFunction { + + protected UnaryStringIntFunction(Location location, Expression field) { + super(location, field); + } + + @Override + public boolean foldable() { + return field().foldable(); + } + + @Override + public Object fold() { + return operation().apply(field().fold()); + } + + @Override + protected TypeResolution resolveType() { + if (!childrenResolved()) { + return new TypeResolution("Unresolved children"); + } + + return field().dataType().isInteger ? TypeResolution.TYPE_RESOLVED : new TypeResolution( + "'%s' requires a integer type, received %s", operation(), field().dataType().esType); + } + + @Override + protected final ProcessorDefinition makeProcessorDefinition() { + return new UnaryProcessorDefinition(location(), this, ProcessorDefinitions.toProcessorDefinition(field()), + new StringProcessor(operation())); + } + + protected abstract StringOperation operation(); + + @Override + protected ScriptTemplate asScriptFrom(FieldAttribute field) { + return new ScriptTemplate(formatScript("doc[{}].value"), + paramsBuilder().variable(field.name()).build(), + dataType()); + } + + @Override + protected String formatScript(String template) { + return super.formatScript( + format(Locale.ROOT, "{sql}.%s(%s)", + StringUtils.underscoreToLowerCamelCase(operation().toString()), + template)); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != getClass()) { + return false; + } + UnaryStringIntFunction other = (UnaryStringIntFunction) obj; + return Objects.equals(other.field(), field()); + } + + @Override + public int hashCode() { + return Objects.hash(field()); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java index 802aa4a7c09bb..ccd5c24c6412d 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/whitelist/InternalSqlScriptUtils.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.sql.expression.function.scalar.whitelist; import org.elasticsearch.xpack.sql.expression.function.scalar.datetime.DateTimeFunction; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; /** * Whitelisted class for SQL scripts. @@ -19,4 +20,44 @@ private InternalSqlScriptUtils() {} public static Integer dateTimeChrono(long millis, String tzId, String chronoName) { return DateTimeFunction.dateTimeChrono(millis, tzId, chronoName); } + + public static Integer ascii(String s) { + return (Integer) StringOperation.ASCII.apply(s); + } + + public static Integer bitLength(String s) { + return (Integer) StringOperation.BIT_LENGTH.apply(s); + } + + public static String character(Number n) { + return (String) StringOperation.CHAR.apply(n); + } + + public static Integer charLength(String s) { + return (Integer) StringOperation.CHAR_LENGTH.apply(s); + } + + public static String lcase(String s) { + return (String) StringOperation.LCASE.apply(s); + } + + public static String ucase(String s) { + return (String) StringOperation.UCASE.apply(s); + } + + public static Integer length(String s) { + return (Integer) StringOperation.LENGTH.apply(s); + } + + public static String rtrim(String s) { + return (String) StringOperation.RTRIM.apply(s); + } + + public static String ltrim(String s) { + return (String) StringOperation.LTRIM.apply(s); + } + + public static String space(Number n) { + return (String) StringOperation.SPACE.apply(n); + } } diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java index 35eb76af67c7b..66ec98ea53c82 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java @@ -20,6 +20,7 @@ import org.elasticsearch.xpack.sql.expression.ScalarSubquery; import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; import org.elasticsearch.xpack.sql.expression.UnresolvedStar; +import org.elasticsearch.xpack.sql.expression.function.Function; import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.Cast; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Add; @@ -48,14 +49,19 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticBinaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ArithmeticUnaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.BooleanLiteralContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastExpressionContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.CastTemplateContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ColumnReferenceContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ComparisonContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DateEscapedLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DecimalLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.DereferenceContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExistsContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractContext; -import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionCallContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractExpressionContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ExtractTemplateContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionExpressionContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.FunctionTemplateContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.GuidEscapedLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.IntegerLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalBinaryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LogicalNotContext; @@ -66,6 +72,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParamLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.ParenthesizedExpressionContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PatternContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PatternEscapeContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicateContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PredicatedContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.PrimitiveDataTypeContext; @@ -76,10 +83,16 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringLiteralContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.StringQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.SubqueryExpressionContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimeEscapedLiteralContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.TimestampEscapedLiteralContext; import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; import org.elasticsearch.xpack.sql.tree.Location; import org.elasticsearch.xpack.sql.type.DataType; import org.elasticsearch.xpack.sql.type.DataTypes; +import org.joda.time.DateTime; +import org.joda.time.format.DateTimeFormatter; +import org.joda.time.format.DateTimeFormatterBuilder; +import org.joda.time.format.ISODateTimeFormat; import java.math.BigDecimal; import java.util.List; @@ -222,17 +235,18 @@ public LikePattern visitPattern(PatternContext ctx) { } char escape = 0; - String escapeString = string(ctx.escape); + PatternEscapeContext escapeCtx = ctx.patternEscape(); + String escapeString = escapeCtx == null ? null : string(escapeCtx.escape); if (Strings.hasText(escapeString)) { // shouldn't happen but adding validation in case the string parsing gets wonky if (escapeString.length() > 1) { - throw new ParsingException(source(ctx.escape), "A character not a string required for escaping; found [{}]", escapeString); + throw new ParsingException(source(escapeCtx), "A character not a string required for escaping; found [{}]", escapeString); } else if (escapeString.length() == 1) { escape = escapeString.charAt(0); // these chars already have a meaning if (escape == '*' || escape == '%' || escape == '_') { - throw new ParsingException(source(ctx.escape), "Char [{}] cannot be used for escaping", escape); + throw new ParsingException(source(escapeCtx.escape), "Char [{}] cannot be used for escaping", escape); } // lastly validate that escape chars (if present) are followed by special chars for (int i = 0; i < pattern.length(); i++) { @@ -324,11 +338,6 @@ public Order visitOrderBy(OrderByContext ctx) { ctx.DESC() != null ? Order.OrderDirection.DESC : Order.OrderDirection.ASC); } - @Override - public Object visitCast(CastContext ctx) { - return new Cast(source(ctx), expression(ctx.expression()), typedParsing(ctx.dataType(), DataType.class)); - } - @Override public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) { String type = visitIdentifier(ctx.identifier()).toLowerCase(Locale.ROOT); @@ -367,20 +376,32 @@ public DataType visitPrimitiveDataType(PrimitiveDataTypeContext ctx) { } } + // + // Functions template + // @Override - public Object visitFunctionCall(FunctionCallContext ctx) { - String name = visitIdentifier(ctx.identifier()); - boolean isDistinct = ctx.setQuantifier() != null && ctx.setQuantifier().DISTINCT() != null; - UnresolvedFunction.ResolutionType resolutionType = - isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD; - return new UnresolvedFunction(source(ctx), name, resolutionType, expressions(ctx.expression())); + public Cast visitCastExpression(CastExpressionContext ctx) { + CastTemplateContext ctc = ctx.castTemplate(); + return new Cast(source(ctc), expression(ctc.expression()), typedParsing(ctc.dataType(), DataType.class)); } @Override - public Object visitExtract(ExtractContext ctx) { - String fieldString = visitIdentifier(ctx.field); - return new UnresolvedFunction(source(ctx), fieldString, - UnresolvedFunction.ResolutionType.EXTRACT, singletonList(expression(ctx.valueExpression()))); + public Function visitExtractExpression(ExtractExpressionContext ctx) { + ExtractTemplateContext template = ctx.extractTemplate(); + String fieldString = visitIdentifier(template.field); + return new UnresolvedFunction(source(template), fieldString, + UnresolvedFunction.ResolutionType.EXTRACT, singletonList(expression(template.valueExpression()))); + } + + @Override + public Function visitFunctionExpression(FunctionExpressionContext ctx) { + FunctionTemplateContext template = ctx.functionTemplate(); + + String name = visitIdentifier(template.identifier()); + boolean isDistinct = template.setQuantifier() != null && template.setQuantifier().DISTINCT() != null; + UnresolvedFunction.ResolutionType resolutionType = + isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD; + return new UnresolvedFunction(source(ctx), name, resolutionType, expressions(template.expression())); } @Override @@ -445,12 +466,12 @@ public Expression visitStringLiteral(StringLiteralContext ctx) { } @Override - public Object visitDecimalLiteral(DecimalLiteralContext ctx) { + public Literal visitDecimalLiteral(DecimalLiteralContext ctx) { return new Literal(source(ctx), new BigDecimal(ctx.getText()).doubleValue(), DataType.DOUBLE); } @Override - public Object visitIntegerLiteral(IntegerLiteralContext ctx) { + public Literal visitIntegerLiteral(IntegerLiteralContext ctx) { BigDecimal bigD = new BigDecimal(ctx.getText()); long value = bigD.longValueExact(); @@ -463,7 +484,7 @@ public Object visitIntegerLiteral(IntegerLiteralContext ctx) { } @Override - public Object visitParamLiteral(ParamLiteralContext ctx) { + public Literal visitParamLiteral(ParamLiteralContext ctx) { SqlTypedParamValue param = param(ctx.PARAM()); Location loc = source(ctx); if (param.value == null) { @@ -522,4 +543,100 @@ private SqlTypedParamValue param(TerminalNode node) { return params.get(token); } -} + + @Override + public Literal visitDateEscapedLiteral(DateEscapedLiteralContext ctx) { + String string = string(ctx.string()); + Location loc = source(ctx); + // parse yyyy-MM-dd + DateTime dt = null; + try { + dt = ISODateTimeFormat.date().parseDateTime(string); + } catch(IllegalArgumentException ex) { + throw new ParsingException(loc, "Invalid date received; {}", ex.getMessage()); + } + return new Literal(loc, dt, DataType.DATE); + } + + @Override + public Literal visitTimeEscapedLiteral(TimeEscapedLiteralContext ctx) { + String string = string(ctx.string()); + Location loc = source(ctx); + + // parse HH:mm:ss + DateTime dt = null; + try { + dt = ISODateTimeFormat.hourMinuteSecond().parseDateTime(string); + } catch (IllegalArgumentException ex) { + throw new ParsingException(loc, "Invalid time received; {}", ex.getMessage()); + } + + throw new SqlIllegalArgumentException("Time (only) literals are not supported; a date component is required as well"); + } + + @Override + public Literal visitTimestampEscapedLiteral(TimestampEscapedLiteralContext ctx) { + String string = string(ctx.string()); + + Location loc = source(ctx); + // parse yyyy-mm-dd hh:mm:ss(.f...) + DateTime dt = null; + try { + DateTimeFormatter formatter = new DateTimeFormatterBuilder() + .append(ISODateTimeFormat.date()) + .appendLiteral(" ") + .append(ISODateTimeFormat.hourMinuteSecondFraction()) + .toFormatter(); + dt = formatter.parseDateTime(string); + } catch (IllegalArgumentException ex) { + throw new ParsingException(loc, "Invalid timestamp received; {}", ex.getMessage()); + } + return new Literal(loc, dt, DataType.DATE); + } + + @Override + public Literal visitGuidEscapedLiteral(GuidEscapedLiteralContext ctx) { + String string = string(ctx.string()); + + Location loc = source(ctx.string()); + // basic validation + String lowerCase = string.toLowerCase(Locale.ROOT); + // needs to be format nnnnnnnn-nnnn-nnnn-nnnn-nnnnnnnnnnnn + // since the length is fixed, the validation happens on absolute values + // not pretty but it's fast and doesn't create any extra objects + + String errorPrefix = "Invalid GUID, "; + + if (lowerCase.length() != 36) { + throw new ParsingException(loc, "{}too {}", errorPrefix, lowerCase.length() > 36 ? "long" : "short"); + } + + int[] separatorPos = { 8, 13, 18, 23 }; + for (int pos : separatorPos) { + if (lowerCase.charAt(pos) != '-') { + throw new ParsingException(loc, "{}expected group separator at offset [{}], found [{}]", + errorPrefix, pos, string.charAt(pos)); + } + } + + String HEXA = "0123456789abcdef"; + + for (int i = 0; i < lowerCase.length(); i++) { + // skip separators + boolean inspect = true; + for (int pos : separatorPos) { + if (i == pos) { + inspect = false; + break; + } else if (pos > i) { + break; + } + } + if (inspect && HEXA.indexOf(lowerCase.charAt(i)) < 0) { + throw new ParsingException(loc, "{}expected hexadecimal at offset[{}], found [{}]", errorPrefix, i, string.charAt(i)); + } + } + + return new Literal(source(ctx), string, DataType.KEYWORD); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java index 3435994a0fc42..58d858c42415a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/LogicalPlanBuilder.java @@ -19,6 +19,7 @@ import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinCriteriaContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinRelationContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.JoinTypeContext; +import org.elasticsearch.xpack.sql.parser.SqlBaseParser.LimitClauseContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.NamedQueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryContext; import org.elasticsearch.xpack.sql.parser.SqlBaseParser.QueryNoWithContext; @@ -89,9 +90,13 @@ public LogicalPlan visitQueryNoWith(QueryNoWithContext ctx) { plan = new OrderBy(source(ctx.ORDER()), plan, visitList(ctx.orderBy(), Order.class)); } - if (ctx.limit != null && ctx.INTEGER_VALUE() != null) { - plan = new Limit(source(ctx.limit), new Literal(source(ctx), - Integer.parseInt(ctx.limit.getText()), DataType.INTEGER), plan); + LimitClauseContext limitClause = ctx.limitClause(); + if (limitClause != null) { + Token limit = limitClause.limit; + if (limit != null && limitClause.INTEGER_VALUE() != null) { + plan = new Limit(source(limitClause), new Literal(source(limitClause), + Integer.parseInt(limit.getText()), DataType.INTEGER), plan); + } } return plan; diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java index 4e80e8db9bb52..b353bcf6521f9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java @@ -1,8 +1,3 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.sql.parser; @@ -208,6 +203,18 @@ class SqlBaseBaseListener implements SqlBaseListener { *

    The default implementation does nothing.

    */ @Override public void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterLimitClause(SqlBaseParser.LimitClauseContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitLimitClause(SqlBaseParser.LimitClauseContext ctx) { } /** * {@inheritDoc} * @@ -556,6 +563,18 @@ class SqlBaseBaseListener implements SqlBaseListener { *

    The default implementation does nothing.

    */ @Override public void exitPattern(SqlBaseParser.PatternContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { } /** * {@inheritDoc} * @@ -657,13 +676,13 @@ class SqlBaseBaseListener implements SqlBaseListener { * *

    The default implementation does nothing.

    */ - @Override public void enterFunctionCall(SqlBaseParser.FunctionCallContext ctx) { } + @Override public void enterFunction(SqlBaseParser.FunctionContext ctx) { } /** * {@inheritDoc} * *

    The default implementation does nothing.

    */ - @Override public void exitFunctionCall(SqlBaseParser.FunctionCallContext ctx) { } + @Override public void exitFunction(SqlBaseParser.FunctionContext ctx) { } /** * {@inheritDoc} * @@ -712,6 +731,78 @@ class SqlBaseBaseListener implements SqlBaseListener { *

    The default implementation does nothing.

    */ @Override public void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterCastExpression(SqlBaseParser.CastExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitCastExpression(SqlBaseParser.CastExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterCastTemplate(SqlBaseParser.CastTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { } /** * {@inheritDoc} * @@ -772,6 +863,54 @@ class SqlBaseBaseListener implements SqlBaseListener { *

    The default implementation does nothing.

    */ @Override public void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void enterGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { } + /** + * {@inheritDoc} + * + *

    The default implementation does nothing.

    + */ + @Override public void exitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index 1adb0a423c7f2..d40ae6daa6e34 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -1,8 +1,3 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.sql.parser; import org.antlr.v4.runtime.tree.AbstractParseTreeVisitor; @@ -128,6 +123,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitLimitClause(SqlBaseParser.LimitClauseContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -331,6 +333,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitPattern(SqlBaseParser.PatternContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitPatternEscape(SqlBaseParser.PatternEscapeContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -393,7 +402,7 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa *

    The default implementation returns the result of calling * {@link #visitChildren} on {@code ctx}.

    */ - @Override public T visitFunctionCall(SqlBaseParser.FunctionCallContext ctx) { return visitChildren(ctx); } + @Override public T visitFunction(SqlBaseParser.FunctionContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -422,6 +431,48 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitCastExpression(SqlBaseParser.CastExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * @@ -457,6 +508,34 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

    */ @Override public T visitParamLiteral(SqlBaseParser.ParamLiteralContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

    The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

    + */ + @Override public T visitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java index c54c5e3810c8f..588f3ef028d6f 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseLexer.java @@ -1,15 +1,13 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.sql.parser; import org.antlr.v4.runtime.Lexer; import org.antlr.v4.runtime.CharStream; +import org.antlr.v4.runtime.Token; +import org.antlr.v4.runtime.TokenStream; import org.antlr.v4.runtime.*; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; +import org.antlr.v4.runtime.misc.*; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class SqlBaseLexer extends Lexer { @@ -28,11 +26,13 @@ class SqlBaseLexer extends Lexer { NOT=45, NULL=46, ON=47, OPTIMIZED=48, OR=49, ORDER=50, OUTER=51, PARSED=52, PHYSICAL=53, PLAN=54, RIGHT=55, RLIKE=56, QUERY=57, SCHEMAS=58, SELECT=59, SHOW=60, SYS=61, TABLE=62, TABLES=63, TEXT=64, TRUE=65, TYPE=66, TYPES=67, - USING=68, VERIFY=69, WHERE=70, WITH=71, EQ=72, NEQ=73, LT=74, LTE=75, - GT=76, GTE=77, PLUS=78, MINUS=79, ASTERISK=80, SLASH=81, PERCENT=82, CONCAT=83, - DOT=84, PARAM=85, STRING=86, INTEGER_VALUE=87, DECIMAL_VALUE=88, IDENTIFIER=89, - DIGIT_IDENTIFIER=90, TABLE_IDENTIFIER=91, QUOTED_IDENTIFIER=92, BACKQUOTED_IDENTIFIER=93, - SIMPLE_COMMENT=94, BRACKETED_COMMENT=95, WS=96, UNRECOGNIZED=97; + USING=68, VERIFY=69, WHERE=70, WITH=71, ESCAPE_ESC=72, FUNCTION_ESC=73, + LIMIT_ESC=74, DATE_ESC=75, TIME_ESC=76, TIMESTAMP_ESC=77, GUID_ESC=78, + ESC_END=79, EQ=80, NEQ=81, LT=82, LTE=83, GT=84, GTE=85, PLUS=86, MINUS=87, + ASTERISK=88, SLASH=89, PERCENT=90, CONCAT=91, DOT=92, PARAM=93, STRING=94, + INTEGER_VALUE=95, DECIMAL_VALUE=96, IDENTIFIER=97, DIGIT_IDENTIFIER=98, + TABLE_IDENTIFIER=99, QUOTED_IDENTIFIER=100, BACKQUOTED_IDENTIFIER=101, + SIMPLE_COMMENT=102, BRACKETED_COMMENT=103, WS=104, UNRECOGNIZED=105; public static String[] modeNames = { "DEFAULT_MODE" }; @@ -46,12 +46,13 @@ class SqlBaseLexer extends Lexer { "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", - "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", - "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", - "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", - "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", - "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", "SIMPLE_COMMENT", - "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" + "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "ESCAPE_ESC", + "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", + "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", + "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", "STRING", + "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", + "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "EXPONENT", "DIGIT", "LETTER", + "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", "UNRECOGNIZED" }; private static final String[] _LITERAL_NAMES = { @@ -65,8 +66,9 @@ class SqlBaseLexer extends Lexer { "'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'TRUE'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", - "'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'" + "'WHERE'", "'WITH'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", + "'{TS'", "'{GUID'", "'}'", "'='", null, "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", @@ -77,12 +79,13 @@ class SqlBaseLexer extends Lexer { "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", - "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", - "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", - "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", - "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", - "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", - "UNRECOGNIZED" + "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "ESCAPE_ESC", + "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", + "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", + "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", "STRING", + "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", + "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", + "WS", "UNRECOGNIZED" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -139,7 +142,7 @@ public SqlBaseLexer(CharStream input) { public ATN getATN() { return _ATN; } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2c\u033b\b\1\4\2\t"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\2k\u0370\b\1\4\2\t"+ "\2\4\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13"+ "\t\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ @@ -150,276 +153,293 @@ public SqlBaseLexer(CharStream input) { "\4>\t>\4?\t?\4@\t@\4A\tA\4B\tB\4C\tC\4D\tD\4E\tE\4F\tF\4G\tG\4H\tH\4I"+ "\tI\4J\tJ\4K\tK\4L\tL\4M\tM\4N\tN\4O\tO\4P\tP\4Q\tQ\4R\tR\4S\tS\4T\tT"+ "\4U\tU\4V\tV\4W\tW\4X\tX\4Y\tY\4Z\tZ\4[\t[\4\\\t\\\4]\t]\4^\t^\4_\t_\4"+ - "`\t`\4a\ta\4b\tb\4c\tc\4d\td\4e\te\3\2\3\2\3\3\3\3\3\4\3\4\3\5\3\5\3\6"+ - "\3\6\3\6\3\6\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\7\3\b\3\b\3\b\3\b\3\b\3\b\3"+ - "\b\3\b\3\b\3\t\3\t\3\t\3\t\3\n\3\n\3\n\3\n\3\13\3\13\3\13\3\f\3\f\3\f"+ - "\3\f\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\r\3\16\3\16\3\16\3\17\3\17\3\17\3\17"+ - "\3\17\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\20\3\21\3\21\3\21\3\21\3\21"+ - "\3\21\3\21\3\21\3\21\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\22\3\23\3\23"+ - "\3\23\3\23\3\23\3\23\3\24\3\24\3\24\3\24\3\24\3\25\3\25\3\25\3\25\3\25"+ - "\3\25\3\25\3\25\3\25\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\26\3\27"+ - "\3\27\3\27\3\27\3\27\3\27\3\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30"+ - "\3\30\3\30\3\30\3\31\3\31\3\31\3\31\3\31\3\31\3\31\3\32\3\32\3\32\3\32"+ - "\3\32\3\32\3\32\3\32\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\34\3\34"+ - "\3\34\3\34\3\34\3\34\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\36\3\36\3\36"+ - "\3\36\3\36\3\37\3\37\3\37\3\37\3\37\3 \3 \3 \3 \3 \3 \3 \3 \3 \3 \3!\3"+ - "!\3!\3!\3!\3!\3!\3!\3!\3\"\3\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\3#\3#\3"+ - "$\3$\3$\3%\3%\3%\3%\3%\3%\3&\3&\3&\3\'\3\'\3\'\3\'\3\'\3(\3(\3(\3(\3("+ - "\3)\3)\3)\3)\3)\3*\3*\3*\3*\3*\3*\3+\3+\3+\3+\3+\3+\3+\3,\3,\3,\3,\3,"+ - "\3,\3-\3-\3-\3-\3-\3-\3-\3-\3.\3.\3.\3.\3/\3/\3/\3/\3/\3\60\3\60\3\60"+ - "\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\61\3\62\3\62\3\62\3\63"+ - "\3\63\3\63\3\63\3\63\3\63\3\64\3\64\3\64\3\64\3\64\3\64\3\65\3\65\3\65"+ - "\3\65\3\65\3\65\3\65\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\66\3\67"+ - "\3\67\3\67\3\67\3\67\38\38\38\38\38\38\39\39\39\39\39\39\3:\3:\3:\3:\3"+ - ":\3:\3;\3;\3;\3;\3;\3;\3;\3;\3<\3<\3<\3<\3<\3<\3<\3=\3=\3=\3=\3=\3>\3"+ - ">\3>\3>\3?\3?\3?\3?\3?\3?\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3B\3B\3"+ - "B\3B\3B\3C\3C\3C\3C\3C\3D\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3E\3F\3F\3F\3"+ - "F\3F\3F\3F\3G\3G\3G\3G\3G\3G\3H\3H\3H\3H\3H\3I\3I\3J\3J\3J\3J\3J\3J\3"+ - "J\5J\u027b\nJ\3K\3K\3L\3L\3L\3M\3M\3N\3N\3N\3O\3O\3P\3P\3Q\3Q\3R\3R\3"+ - "S\3S\3T\3T\3T\3U\3U\3V\3V\3W\3W\3W\3W\7W\u029c\nW\fW\16W\u029f\13W\3W"+ - "\3W\3X\6X\u02a4\nX\rX\16X\u02a5\3Y\6Y\u02a9\nY\rY\16Y\u02aa\3Y\3Y\7Y\u02af"+ - "\nY\fY\16Y\u02b2\13Y\3Y\3Y\6Y\u02b6\nY\rY\16Y\u02b7\3Y\6Y\u02bb\nY\rY"+ - "\16Y\u02bc\3Y\3Y\7Y\u02c1\nY\fY\16Y\u02c4\13Y\5Y\u02c6\nY\3Y\3Y\3Y\3Y"+ - "\6Y\u02cc\nY\rY\16Y\u02cd\3Y\3Y\5Y\u02d2\nY\3Z\3Z\5Z\u02d6\nZ\3Z\3Z\3"+ - "Z\7Z\u02db\nZ\fZ\16Z\u02de\13Z\3[\3[\3[\3[\6[\u02e4\n[\r[\16[\u02e5\3"+ - "\\\3\\\3\\\3\\\6\\\u02ec\n\\\r\\\16\\\u02ed\3]\3]\3]\3]\7]\u02f4\n]\f"+ - "]\16]\u02f7\13]\3]\3]\3^\3^\3^\3^\7^\u02ff\n^\f^\16^\u0302\13^\3^\3^\3"+ - "_\3_\5_\u0308\n_\3_\6_\u030b\n_\r_\16_\u030c\3`\3`\3a\3a\3b\3b\3b\3b\7"+ - "b\u0317\nb\fb\16b\u031a\13b\3b\5b\u031d\nb\3b\5b\u0320\nb\3b\3b\3c\3c"+ - "\3c\3c\3c\7c\u0329\nc\fc\16c\u032c\13c\3c\3c\3c\3c\3c\3d\6d\u0334\nd\r"+ - "d\16d\u0335\3d\3d\3e\3e\3\u032a\2f\3\3\5\4\7\5\t\6\13\7\r\b\17\t\21\n"+ - "\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25)\26+\27-\30"+ - "/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O)Q*S+U,W-Y.["+ - "/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u{?}@\177A\u0081B\u0083"+ - "C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095L\u0097"+ - "M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9V\u00ab"+ - "W\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd\2\u00bf"+ - "\2\u00c1\2\u00c3`\u00c5a\u00c7b\u00c9c\3\2\f\3\2))\4\2BBaa\5\2<\3>\3>\3>\3?\3?\3?\3?"+ + "\3?\3?\3@\3@\3@\3@\3@\3@\3@\3A\3A\3A\3A\3A\3B\3B\3B\3B\3B\3C\3C\3C\3C"+ + "\3C\3D\3D\3D\3D\3D\3D\3E\3E\3E\3E\3E\3E\3F\3F\3F\3F\3F\3F\3F\3G\3G\3G"+ + "\3G\3G\3G\3H\3H\3H\3H\3H\3I\3I\3I\3I\3I\3I\3I\3I\3J\3J\3J\3J\3K\3K\3K"+ + "\3K\3K\3K\3K\3L\3L\3L\3M\3M\3M\3N\3N\3N\3N\3O\3O\3O\3O\3O\3O\3P\3P\3Q"+ + "\3Q\3R\3R\3R\3R\3R\3R\3R\5R\u02b0\nR\3S\3S\3T\3T\3T\3U\3U\3V\3V\3V\3W"+ + "\3W\3X\3X\3Y\3Y\3Z\3Z\3[\3[\3\\\3\\\3\\\3]\3]\3^\3^\3_\3_\3_\3_\7_\u02d1"+ + "\n_\f_\16_\u02d4\13_\3_\3_\3`\6`\u02d9\n`\r`\16`\u02da\3a\6a\u02de\na"+ + "\ra\16a\u02df\3a\3a\7a\u02e4\na\fa\16a\u02e7\13a\3a\3a\6a\u02eb\na\ra"+ + "\16a\u02ec\3a\6a\u02f0\na\ra\16a\u02f1\3a\3a\7a\u02f6\na\fa\16a\u02f9"+ + "\13a\5a\u02fb\na\3a\3a\3a\3a\6a\u0301\na\ra\16a\u0302\3a\3a\5a\u0307\n"+ + "a\3b\3b\5b\u030b\nb\3b\3b\3b\7b\u0310\nb\fb\16b\u0313\13b\3c\3c\3c\3c"+ + "\6c\u0319\nc\rc\16c\u031a\3d\3d\3d\3d\6d\u0321\nd\rd\16d\u0322\3e\3e\3"+ + "e\3e\7e\u0329\ne\fe\16e\u032c\13e\3e\3e\3f\3f\3f\3f\7f\u0334\nf\ff\16"+ + "f\u0337\13f\3f\3f\3g\3g\5g\u033d\ng\3g\6g\u0340\ng\rg\16g\u0341\3h\3h"+ + "\3i\3i\3j\3j\3j\3j\7j\u034c\nj\fj\16j\u034f\13j\3j\5j\u0352\nj\3j\5j\u0355"+ + "\nj\3j\3j\3k\3k\3k\3k\3k\7k\u035e\nk\fk\16k\u0361\13k\3k\3k\3k\3k\3k\3"+ + "l\6l\u0369\nl\rl\16l\u036a\3l\3l\3m\3m\3\u035f\2n\3\3\5\4\7\5\t\6\13\7"+ + "\r\b\17\t\21\n\23\13\25\f\27\r\31\16\33\17\35\20\37\21!\22#\23%\24\'\25"+ + ")\26+\27-\30/\31\61\32\63\33\65\34\67\359\36;\37= ?!A\"C#E$G%I&K\'M(O"+ + ")Q*S+U,W-Y.[/]\60_\61a\62c\63e\64g\65i\66k\67m8o9q:s;u{?}@\177A\u0081"+ + "B\u0083C\u0085D\u0087E\u0089F\u008bG\u008dH\u008fI\u0091J\u0093K\u0095"+ + "L\u0097M\u0099N\u009bO\u009dP\u009fQ\u00a1R\u00a3S\u00a5T\u00a7U\u00a9"+ + "V\u00abW\u00adX\u00afY\u00b1Z\u00b3[\u00b5\\\u00b7]\u00b9^\u00bb_\u00bd"+ + "`\u00bfa\u00c1b\u00c3c\u00c5d\u00c7e\u00c9f\u00cbg\u00cd\2\u00cf\2\u00d1"+ + "\2\u00d3h\u00d5i\u00d7j\u00d9k\3\2\f\3\2))\4\2BBaa\5\2<\3\2\2\2\u017d\u017e"+ - "\7H\2\2\u017e\u017f\7W\2\2\u017f\u0180\7P\2\2\u0180\u0181\7E\2\2\u0181"+ - "\u0182\7V\2\2\u0182\u0183\7K\2\2\u0183\u0184\7Q\2\2\u0184\u0185\7P\2\2"+ - "\u0185\u0186\7U\2\2\u0186@\3\2\2\2\u0187\u0188\7I\2\2\u0188\u0189\7T\2"+ - "\2\u0189\u018a\7C\2\2\u018a\u018b\7R\2\2\u018b\u018c\7J\2\2\u018c\u018d"+ - "\7X\2\2\u018d\u018e\7K\2\2\u018e\u018f\7\\\2\2\u018fB\3\2\2\2\u0190\u0191"+ - "\7I\2\2\u0191\u0192\7T\2\2\u0192\u0193\7Q\2\2\u0193\u0194\7W\2\2\u0194"+ - "\u0195\7R\2\2\u0195D\3\2\2\2\u0196\u0197\7J\2\2\u0197\u0198\7C\2\2\u0198"+ - "\u0199\7X\2\2\u0199\u019a\7K\2\2\u019a\u019b\7P\2\2\u019b\u019c\7I\2\2"+ - "\u019cF\3\2\2\2\u019d\u019e\7K\2\2\u019e\u019f\7P\2\2\u019fH\3\2\2\2\u01a0"+ - "\u01a1\7K\2\2\u01a1\u01a2\7P\2\2\u01a2\u01a3\7P\2\2\u01a3\u01a4\7G\2\2"+ - "\u01a4\u01a5\7T\2\2\u01a5J\3\2\2\2\u01a6\u01a7\7K\2\2\u01a7\u01a8\7U\2"+ - "\2\u01a8L\3\2\2\2\u01a9\u01aa\7L\2\2\u01aa\u01ab\7Q\2\2\u01ab\u01ac\7"+ - "K\2\2\u01ac\u01ad\7P\2\2\u01adN\3\2\2\2\u01ae\u01af\7N\2\2\u01af\u01b0"+ - "\7G\2\2\u01b0\u01b1\7H\2\2\u01b1\u01b2\7V\2\2\u01b2P\3\2\2\2\u01b3\u01b4"+ - "\7N\2\2\u01b4\u01b5\7K\2\2\u01b5\u01b6\7M\2\2\u01b6\u01b7\7G\2\2\u01b7"+ - "R\3\2\2\2\u01b8\u01b9\7N\2\2\u01b9\u01ba\7K\2\2\u01ba\u01bb\7O\2\2\u01bb"+ - "\u01bc\7K\2\2\u01bc\u01bd\7V\2\2\u01bdT\3\2\2\2\u01be\u01bf\7O\2\2\u01bf"+ - "\u01c0\7C\2\2\u01c0\u01c1\7R\2\2\u01c1\u01c2\7R\2\2\u01c2\u01c3\7G\2\2"+ - "\u01c3\u01c4\7F\2\2\u01c4V\3\2\2\2\u01c5\u01c6\7O\2\2\u01c6\u01c7\7C\2"+ - "\2\u01c7\u01c8\7V\2\2\u01c8\u01c9\7E\2\2\u01c9\u01ca\7J\2\2\u01caX\3\2"+ - "\2\2\u01cb\u01cc\7P\2\2\u01cc\u01cd\7C\2\2\u01cd\u01ce\7V\2\2\u01ce\u01cf"+ - "\7W\2\2\u01cf\u01d0\7T\2\2\u01d0\u01d1\7C\2\2\u01d1\u01d2\7N\2\2\u01d2"+ - "Z\3\2\2\2\u01d3\u01d4\7P\2\2\u01d4\u01d5\7Q\2\2\u01d5\u01d6\7V\2\2\u01d6"+ - "\\\3\2\2\2\u01d7\u01d8\7P\2\2\u01d8\u01d9\7W\2\2\u01d9\u01da\7N\2\2\u01da"+ - "\u01db\7N\2\2\u01db^\3\2\2\2\u01dc\u01dd\7Q\2\2\u01dd\u01de\7P\2\2\u01de"+ - "`\3\2\2\2\u01df\u01e0\7Q\2\2\u01e0\u01e1\7R\2\2\u01e1\u01e2\7V\2\2\u01e2"+ - "\u01e3\7K\2\2\u01e3\u01e4\7O\2\2\u01e4\u01e5\7K\2\2\u01e5\u01e6\7\\\2"+ - "\2\u01e6\u01e7\7G\2\2\u01e7\u01e8\7F\2\2\u01e8b\3\2\2\2\u01e9\u01ea\7"+ - "Q\2\2\u01ea\u01eb\7T\2\2\u01ebd\3\2\2\2\u01ec\u01ed\7Q\2\2\u01ed\u01ee"+ - "\7T\2\2\u01ee\u01ef\7F\2\2\u01ef\u01f0\7G\2\2\u01f0\u01f1\7T\2\2\u01f1"+ - "f\3\2\2\2\u01f2\u01f3\7Q\2\2\u01f3\u01f4\7W\2\2\u01f4\u01f5\7V\2\2\u01f5"+ - "\u01f6\7G\2\2\u01f6\u01f7\7T\2\2\u01f7h\3\2\2\2\u01f8\u01f9\7R\2\2\u01f9"+ - "\u01fa\7C\2\2\u01fa\u01fb\7T\2\2\u01fb\u01fc\7U\2\2\u01fc\u01fd\7G\2\2"+ - "\u01fd\u01fe\7F\2\2\u01fej\3\2\2\2\u01ff\u0200\7R\2\2\u0200\u0201\7J\2"+ - "\2\u0201\u0202\7[\2\2\u0202\u0203\7U\2\2\u0203\u0204\7K\2\2\u0204\u0205"+ - "\7E\2\2\u0205\u0206\7C\2\2\u0206\u0207\7N\2\2\u0207l\3\2\2\2\u0208\u0209"+ - "\7R\2\2\u0209\u020a\7N\2\2\u020a\u020b\7C\2\2\u020b\u020c\7P\2\2\u020c"+ - "n\3\2\2\2\u020d\u020e\7T\2\2\u020e\u020f\7K\2\2\u020f\u0210\7I\2\2\u0210"+ - "\u0211\7J\2\2\u0211\u0212\7V\2\2\u0212p\3\2\2\2\u0213\u0214\7T\2\2\u0214"+ - "\u0215\7N\2\2\u0215\u0216\7K\2\2\u0216\u0217\7M\2\2\u0217\u0218\7G\2\2"+ - "\u0218r\3\2\2\2\u0219\u021a\7S\2\2\u021a\u021b\7W\2\2\u021b\u021c\7G\2"+ - "\2\u021c\u021d\7T\2\2\u021d\u021e\7[\2\2\u021et\3\2\2\2\u021f\u0220\7"+ - "U\2\2\u0220\u0221\7E\2\2\u0221\u0222\7J\2\2\u0222\u0223\7G\2\2\u0223\u0224"+ - "\7O\2\2\u0224\u0225\7C\2\2\u0225\u0226\7U\2\2\u0226v\3\2\2\2\u0227\u0228"+ - "\7U\2\2\u0228\u0229\7G\2\2\u0229\u022a\7N\2\2\u022a\u022b\7G\2\2\u022b"+ - "\u022c\7E\2\2\u022c\u022d\7V\2\2\u022dx\3\2\2\2\u022e\u022f\7U\2\2\u022f"+ - "\u0230\7J\2\2\u0230\u0231\7Q\2\2\u0231\u0232\7Y\2\2\u0232z\3\2\2\2\u0233"+ - "\u0234\7U\2\2\u0234\u0235\7[\2\2\u0235\u0236\7U\2\2\u0236|\3\2\2\2\u0237"+ - "\u0238\7V\2\2\u0238\u0239\7C\2\2\u0239\u023a\7D\2\2\u023a\u023b\7N\2\2"+ - "\u023b\u023c\7G\2\2\u023c~\3\2\2\2\u023d\u023e\7V\2\2\u023e\u023f\7C\2"+ - "\2\u023f\u0240\7D\2\2\u0240\u0241\7N\2\2\u0241\u0242\7G\2\2\u0242\u0243"+ - "\7U\2\2\u0243\u0080\3\2\2\2\u0244\u0245\7V\2\2\u0245\u0246\7G\2\2\u0246"+ - "\u0247\7Z\2\2\u0247\u0248\7V\2\2\u0248\u0082\3\2\2\2\u0249\u024a\7V\2"+ - "\2\u024a\u024b\7T\2\2\u024b\u024c\7W\2\2\u024c\u024d\7G\2\2\u024d\u0084"+ - "\3\2\2\2\u024e\u024f\7V\2\2\u024f\u0250\7[\2\2\u0250\u0251\7R\2\2\u0251"+ - "\u0252\7G\2\2\u0252\u0086\3\2\2\2\u0253\u0254\7V\2\2\u0254\u0255\7[\2"+ - "\2\u0255\u0256\7R\2\2\u0256\u0257\7G\2\2\u0257\u0258\7U\2\2\u0258\u0088"+ - "\3\2\2\2\u0259\u025a\7W\2\2\u025a\u025b\7U\2\2\u025b\u025c\7K\2\2\u025c"+ - "\u025d\7P\2\2\u025d\u025e\7I\2\2\u025e\u008a\3\2\2\2\u025f\u0260\7X\2"+ - "\2\u0260\u0261\7G\2\2\u0261\u0262\7T\2\2\u0262\u0263\7K\2\2\u0263\u0264"+ - "\7H\2\2\u0264\u0265\7[\2\2\u0265\u008c\3\2\2\2\u0266\u0267\7Y\2\2\u0267"+ - "\u0268\7J\2\2\u0268\u0269\7G\2\2\u0269\u026a\7T\2\2\u026a\u026b\7G\2\2"+ - "\u026b\u008e\3\2\2\2\u026c\u026d\7Y\2\2\u026d\u026e\7K\2\2\u026e\u026f"+ - "\7V\2\2\u026f\u0270\7J\2\2\u0270\u0090\3\2\2\2\u0271\u0272\7?\2\2\u0272"+ - "\u0092\3\2\2\2\u0273\u0274\7>\2\2\u0274\u027b\7@\2\2\u0275\u0276\7#\2"+ - "\2\u0276\u027b\7?\2\2\u0277\u0278\7>\2\2\u0278\u0279\7?\2\2\u0279\u027b"+ - "\7@\2\2\u027a\u0273\3\2\2\2\u027a\u0275\3\2\2\2\u027a\u0277\3\2\2\2\u027b"+ - "\u0094\3\2\2\2\u027c\u027d\7>\2\2\u027d\u0096\3\2\2\2\u027e\u027f\7>\2"+ - "\2\u027f\u0280\7?\2\2\u0280\u0098\3\2\2\2\u0281\u0282\7@\2\2\u0282\u009a"+ - "\3\2\2\2\u0283\u0284\7@\2\2\u0284\u0285\7?\2\2\u0285\u009c\3\2\2\2\u0286"+ - "\u0287\7-\2\2\u0287\u009e\3\2\2\2\u0288\u0289\7/\2\2\u0289\u00a0\3\2\2"+ - "\2\u028a\u028b\7,\2\2\u028b\u00a2\3\2\2\2\u028c\u028d\7\61\2\2\u028d\u00a4"+ - "\3\2\2\2\u028e\u028f\7\'\2\2\u028f\u00a6\3\2\2\2\u0290\u0291\7~\2\2\u0291"+ - "\u0292\7~\2\2\u0292\u00a8\3\2\2\2\u0293\u0294\7\60\2\2\u0294\u00aa\3\2"+ - "\2\2\u0295\u0296\7A\2\2\u0296\u00ac\3\2\2\2\u0297\u029d\7)\2\2\u0298\u029c"+ - "\n\2\2\2\u0299\u029a\7)\2\2\u029a\u029c\7)\2\2\u029b\u0298\3\2\2\2\u029b"+ - "\u0299\3\2\2\2\u029c\u029f\3\2\2\2\u029d\u029b\3\2\2\2\u029d\u029e\3\2"+ - "\2\2\u029e\u02a0\3\2\2\2\u029f\u029d\3\2\2\2\u02a0\u02a1\7)\2\2\u02a1"+ - "\u00ae\3\2\2\2\u02a2\u02a4\5\u00bf`\2\u02a3\u02a2\3\2\2\2\u02a4\u02a5"+ - "\3\2\2\2\u02a5\u02a3\3\2\2\2\u02a5\u02a6\3\2\2\2\u02a6\u00b0\3\2\2\2\u02a7"+ - "\u02a9\5\u00bf`\2\u02a8\u02a7\3\2\2\2\u02a9\u02aa\3\2\2\2\u02aa\u02a8"+ - "\3\2\2\2\u02aa\u02ab\3\2\2\2\u02ab\u02ac\3\2\2\2\u02ac\u02b0\5\u00a9U"+ - "\2\u02ad\u02af\5\u00bf`\2\u02ae\u02ad\3\2\2\2\u02af\u02b2\3\2\2\2\u02b0"+ - "\u02ae\3\2\2\2\u02b0\u02b1\3\2\2\2\u02b1\u02d2\3\2\2\2\u02b2\u02b0\3\2"+ - "\2\2\u02b3\u02b5\5\u00a9U\2\u02b4\u02b6\5\u00bf`\2\u02b5\u02b4\3\2\2\2"+ - "\u02b6\u02b7\3\2\2\2\u02b7\u02b5\3\2\2\2\u02b7\u02b8\3\2\2\2\u02b8\u02d2"+ - "\3\2\2\2\u02b9\u02bb\5\u00bf`\2\u02ba\u02b9\3\2\2\2\u02bb\u02bc\3\2\2"+ - "\2\u02bc\u02ba\3\2\2\2\u02bc\u02bd\3\2\2\2\u02bd\u02c5\3\2\2\2\u02be\u02c2"+ - "\5\u00a9U\2\u02bf\u02c1\5\u00bf`\2\u02c0\u02bf\3\2\2\2\u02c1\u02c4\3\2"+ - "\2\2\u02c2\u02c0\3\2\2\2\u02c2\u02c3\3\2\2\2\u02c3\u02c6\3\2\2\2\u02c4"+ - "\u02c2\3\2\2\2\u02c5\u02be\3\2\2\2\u02c5\u02c6\3\2\2\2\u02c6\u02c7\3\2"+ - "\2\2\u02c7\u02c8\5\u00bd_\2\u02c8\u02d2\3\2\2\2\u02c9\u02cb\5\u00a9U\2"+ - "\u02ca\u02cc\5\u00bf`\2\u02cb\u02ca\3\2\2\2\u02cc\u02cd\3\2\2\2\u02cd"+ - "\u02cb\3\2\2\2\u02cd\u02ce\3\2\2\2\u02ce\u02cf\3\2\2\2\u02cf\u02d0\5\u00bd"+ - "_\2\u02d0\u02d2\3\2\2\2\u02d1\u02a8\3\2\2\2\u02d1\u02b3\3\2\2\2\u02d1"+ - "\u02ba\3\2\2\2\u02d1\u02c9\3\2\2\2\u02d2\u00b2\3\2\2\2\u02d3\u02d6\5\u00c1"+ - "a\2\u02d4\u02d6\7a\2\2\u02d5\u02d3\3\2\2\2\u02d5\u02d4\3\2\2\2\u02d6\u02dc"+ - "\3\2\2\2\u02d7\u02db\5\u00c1a\2\u02d8\u02db\5\u00bf`\2\u02d9\u02db\t\3"+ - "\2\2\u02da\u02d7\3\2\2\2\u02da\u02d8\3\2\2\2\u02da\u02d9\3\2\2\2\u02db"+ - "\u02de\3\2\2\2\u02dc\u02da\3\2\2\2\u02dc\u02dd\3\2\2\2\u02dd\u00b4\3\2"+ - "\2\2\u02de\u02dc\3\2\2\2\u02df\u02e3\5\u00bf`\2\u02e0\u02e4\5\u00c1a\2"+ - "\u02e1\u02e4\5\u00bf`\2\u02e2\u02e4\t\4\2\2\u02e3\u02e0\3\2\2\2\u02e3"+ - "\u02e1\3\2\2\2\u02e3\u02e2\3\2\2\2\u02e4\u02e5\3\2\2\2\u02e5\u02e3\3\2"+ - "\2\2\u02e5\u02e6\3\2\2\2\u02e6\u00b6\3\2\2\2\u02e7\u02ec\5\u00c1a\2\u02e8"+ - "\u02ec\5\u00bf`\2\u02e9\u02ec\t\3\2\2\u02ea\u02ec\5\u00a1Q\2\u02eb\u02e7"+ - "\3\2\2\2\u02eb\u02e8\3\2\2\2\u02eb\u02e9\3\2\2\2\u02eb\u02ea\3\2\2\2\u02ec"+ - "\u02ed\3\2\2\2\u02ed\u02eb\3\2\2\2\u02ed\u02ee\3\2\2\2\u02ee\u00b8\3\2"+ - "\2\2\u02ef\u02f5\7$\2\2\u02f0\u02f4\n\5\2\2\u02f1\u02f2\7$\2\2\u02f2\u02f4"+ - "\7$\2\2\u02f3\u02f0\3\2\2\2\u02f3\u02f1\3\2\2\2\u02f4\u02f7\3\2\2\2\u02f5"+ - "\u02f3\3\2\2\2\u02f5\u02f6\3\2\2\2\u02f6\u02f8\3\2\2\2\u02f7\u02f5\3\2"+ - "\2\2\u02f8\u02f9\7$\2\2\u02f9\u00ba\3\2\2\2\u02fa\u0300\7b\2\2\u02fb\u02ff"+ - "\n\6\2\2\u02fc\u02fd\7b\2\2\u02fd\u02ff\7b\2\2\u02fe\u02fb\3\2\2\2\u02fe"+ - "\u02fc\3\2\2\2\u02ff\u0302\3\2\2\2\u0300\u02fe\3\2\2\2\u0300\u0301\3\2"+ - "\2\2\u0301\u0303\3\2\2\2\u0302\u0300\3\2\2\2\u0303\u0304\7b\2\2\u0304"+ - "\u00bc\3\2\2\2\u0305\u0307\7G\2\2\u0306\u0308\t\7\2\2\u0307\u0306\3\2"+ - "\2\2\u0307\u0308\3\2\2\2\u0308\u030a\3\2\2\2\u0309\u030b\5\u00bf`\2\u030a"+ - "\u0309\3\2\2\2\u030b\u030c\3\2\2\2\u030c\u030a\3\2\2\2\u030c\u030d\3\2"+ - "\2\2\u030d\u00be\3\2\2\2\u030e\u030f\t\b\2\2\u030f\u00c0\3\2\2\2\u0310"+ - "\u0311\t\t\2\2\u0311\u00c2\3\2\2\2\u0312\u0313\7/\2\2\u0313\u0314\7/\2"+ - "\2\u0314\u0318\3\2\2\2\u0315\u0317\n\n\2\2\u0316\u0315\3\2\2\2\u0317\u031a"+ - "\3\2\2\2\u0318\u0316\3\2\2\2\u0318\u0319\3\2\2\2\u0319\u031c\3\2\2\2\u031a"+ - "\u0318\3\2\2\2\u031b\u031d\7\17\2\2\u031c\u031b\3\2\2\2\u031c\u031d\3"+ - "\2\2\2\u031d\u031f\3\2\2\2\u031e\u0320\7\f\2\2\u031f\u031e\3\2\2\2\u031f"+ - "\u0320\3\2\2\2\u0320\u0321\3\2\2\2\u0321\u0322\bb\2\2\u0322\u00c4\3\2"+ - "\2\2\u0323\u0324\7\61\2\2\u0324\u0325\7,\2\2\u0325\u032a\3\2\2\2\u0326"+ - "\u0329\5\u00c5c\2\u0327\u0329\13\2\2\2\u0328\u0326\3\2\2\2\u0328\u0327"+ - "\3\2\2\2\u0329\u032c\3\2\2\2\u032a\u032b\3\2\2\2\u032a\u0328\3\2\2\2\u032b"+ - "\u032d\3\2\2\2\u032c\u032a\3\2\2\2\u032d\u032e\7,\2\2\u032e\u032f\7\61"+ - "\2\2\u032f\u0330\3\2\2\2\u0330\u0331\bc\2\2\u0331\u00c6\3\2\2\2\u0332"+ - "\u0334\t\13\2\2\u0333\u0332\3\2\2\2\u0334\u0335\3\2\2\2\u0335\u0333\3"+ - "\2\2\2\u0335\u0336\3\2\2\2\u0336\u0337\3\2\2\2\u0337\u0338\bd\2\2\u0338"+ - "\u00c8\3\2\2\2\u0339\u033a\13\2\2\2\u033a\u00ca\3\2\2\2\"\2\u027a\u029b"+ - "\u029d\u02a5\u02aa\u02b0\u02b7\u02bc\u02c2\u02c5\u02cd\u02d1\u02d5\u02da"+ - "\u02dc\u02e3\u02e5\u02eb\u02ed\u02f3\u02f5\u02fe\u0300\u0307\u030c\u0318"+ - "\u031c\u031f\u0328\u032a\u0335\3\2\3\2"; + "\3\2\2\2\2\u00bd\3\2\2\2\2\u00bf\3\2\2\2\2\u00c1\3\2\2\2\2\u00c3\3\2\2"+ + "\2\2\u00c5\3\2\2\2\2\u00c7\3\2\2\2\2\u00c9\3\2\2\2\2\u00cb\3\2\2\2\2\u00d3"+ + "\3\2\2\2\2\u00d5\3\2\2\2\2\u00d7\3\2\2\2\2\u00d9\3\2\2\2\3\u00db\3\2\2"+ + "\2\5\u00dd\3\2\2\2\7\u00df\3\2\2\2\t\u00e1\3\2\2\2\13\u00e3\3\2\2\2\r"+ + "\u00e7\3\2\2\2\17\u00ef\3\2\2\2\21\u00f8\3\2\2\2\23\u00fc\3\2\2\2\25\u0100"+ + "\3\2\2\2\27\u0103\3\2\2\2\31\u0107\3\2\2\2\33\u010f\3\2\2\2\35\u0112\3"+ + "\2\2\2\37\u0117\3\2\2\2!\u011f\3\2\2\2#\u0128\3\2\2\2%\u0130\3\2\2\2\'"+ + "\u0136\3\2\2\2)\u013b\3\2\2\2+\u0144\3\2\2\2-\u014d\3\2\2\2/\u0154\3\2"+ + "\2\2\61\u015f\3\2\2\2\63\u0166\3\2\2\2\65\u016e\3\2\2\2\67\u0176\3\2\2"+ + "\29\u017c\3\2\2\2;\u0183\3\2\2\2=\u0188\3\2\2\2?\u018d\3\2\2\2A\u0197"+ + "\3\2\2\2C\u01a0\3\2\2\2E\u01a6\3\2\2\2G\u01ad\3\2\2\2I\u01b0\3\2\2\2K"+ + "\u01b6\3\2\2\2M\u01b9\3\2\2\2O\u01be\3\2\2\2Q\u01c3\3\2\2\2S\u01c8\3\2"+ + "\2\2U\u01ce\3\2\2\2W\u01d5\3\2\2\2Y\u01db\3\2\2\2[\u01e3\3\2\2\2]\u01e7"+ + "\3\2\2\2_\u01ec\3\2\2\2a\u01ef\3\2\2\2c\u01f9\3\2\2\2e\u01fc\3\2\2\2g"+ + "\u0202\3\2\2\2i\u0208\3\2\2\2k\u020f\3\2\2\2m\u0218\3\2\2\2o\u021d\3\2"+ + "\2\2q\u0223\3\2\2\2s\u0229\3\2\2\2u\u022f\3\2\2\2w\u0237\3\2\2\2y\u023e"+ + "\3\2\2\2{\u0243\3\2\2\2}\u0247\3\2\2\2\177\u024d\3\2\2\2\u0081\u0254\3"+ + "\2\2\2\u0083\u0259\3\2\2\2\u0085\u025e\3\2\2\2\u0087\u0263\3\2\2\2\u0089"+ + "\u0269\3\2\2\2\u008b\u026f\3\2\2\2\u008d\u0276\3\2\2\2\u008f\u027c\3\2"+ + "\2\2\u0091\u0281\3\2\2\2\u0093\u0289\3\2\2\2\u0095\u028d\3\2\2\2\u0097"+ + "\u0294\3\2\2\2\u0099\u0297\3\2\2\2\u009b\u029a\3\2\2\2\u009d\u029e\3\2"+ + "\2\2\u009f\u02a4\3\2\2\2\u00a1\u02a6\3\2\2\2\u00a3\u02af\3\2\2\2\u00a5"+ + "\u02b1\3\2\2\2\u00a7\u02b3\3\2\2\2\u00a9\u02b6\3\2\2\2\u00ab\u02b8\3\2"+ + "\2\2\u00ad\u02bb\3\2\2\2\u00af\u02bd\3\2\2\2\u00b1\u02bf\3\2\2\2\u00b3"+ + "\u02c1\3\2\2\2\u00b5\u02c3\3\2\2\2\u00b7\u02c5\3\2\2\2\u00b9\u02c8\3\2"+ + "\2\2\u00bb\u02ca\3\2\2\2\u00bd\u02cc\3\2\2\2\u00bf\u02d8\3\2\2\2\u00c1"+ + "\u0306\3\2\2\2\u00c3\u030a\3\2\2\2\u00c5\u0314\3\2\2\2\u00c7\u0320\3\2"+ + "\2\2\u00c9\u0324\3\2\2\2\u00cb\u032f\3\2\2\2\u00cd\u033a\3\2\2\2\u00cf"+ + "\u0343\3\2\2\2\u00d1\u0345\3\2\2\2\u00d3\u0347\3\2\2\2\u00d5\u0358\3\2"+ + "\2\2\u00d7\u0368\3\2\2\2\u00d9\u036e\3\2\2\2\u00db\u00dc\7*\2\2\u00dc"+ + "\4\3\2\2\2\u00dd\u00de\7+\2\2\u00de\6\3\2\2\2\u00df\u00e0\7.\2\2\u00e0"+ + "\b\3\2\2\2\u00e1\u00e2\7<\2\2\u00e2\n\3\2\2\2\u00e3\u00e4\7C\2\2\u00e4"+ + "\u00e5\7N\2\2\u00e5\u00e6\7N\2\2\u00e6\f\3\2\2\2\u00e7\u00e8\7C\2\2\u00e8"+ + "\u00e9\7P\2\2\u00e9\u00ea\7C\2\2\u00ea\u00eb\7N\2\2\u00eb\u00ec\7[\2\2"+ + "\u00ec\u00ed\7\\\2\2\u00ed\u00ee\7G\2\2\u00ee\16\3\2\2\2\u00ef\u00f0\7"+ + "C\2\2\u00f0\u00f1\7P\2\2\u00f1\u00f2\7C\2\2\u00f2\u00f3\7N\2\2\u00f3\u00f4"+ + "\7[\2\2\u00f4\u00f5\7\\\2\2\u00f5\u00f6\7G\2\2\u00f6\u00f7\7F\2\2\u00f7"+ + "\20\3\2\2\2\u00f8\u00f9\7C\2\2\u00f9\u00fa\7P\2\2\u00fa\u00fb\7F\2\2\u00fb"+ + "\22\3\2\2\2\u00fc\u00fd\7C\2\2\u00fd\u00fe\7P\2\2\u00fe\u00ff\7[\2\2\u00ff"+ + "\24\3\2\2\2\u0100\u0101\7C\2\2\u0101\u0102\7U\2\2\u0102\26\3\2\2\2\u0103"+ + "\u0104\7C\2\2\u0104\u0105\7U\2\2\u0105\u0106\7E\2\2\u0106\30\3\2\2\2\u0107"+ + "\u0108\7D\2\2\u0108\u0109\7G\2\2\u0109\u010a\7V\2\2\u010a\u010b\7Y\2\2"+ + "\u010b\u010c\7G\2\2\u010c\u010d\7G\2\2\u010d\u010e\7P\2\2\u010e\32\3\2"+ + "\2\2\u010f\u0110\7D\2\2\u0110\u0111\7[\2\2\u0111\34\3\2\2\2\u0112\u0113"+ + "\7E\2\2\u0113\u0114\7C\2\2\u0114\u0115\7U\2\2\u0115\u0116\7V\2\2\u0116"+ + "\36\3\2\2\2\u0117\u0118\7E\2\2\u0118\u0119\7C\2\2\u0119\u011a\7V\2\2\u011a"+ + "\u011b\7C\2\2\u011b\u011c\7N\2\2\u011c\u011d\7Q\2\2\u011d\u011e\7I\2\2"+ + "\u011e \3\2\2\2\u011f\u0120\7E\2\2\u0120\u0121\7C\2\2\u0121\u0122\7V\2"+ + "\2\u0122\u0123\7C\2\2\u0123\u0124\7N\2\2\u0124\u0125\7Q\2\2\u0125\u0126"+ + "\7I\2\2\u0126\u0127\7U\2\2\u0127\"\3\2\2\2\u0128\u0129\7E\2\2\u0129\u012a"+ + "\7Q\2\2\u012a\u012b\7N\2\2\u012b\u012c\7W\2\2\u012c\u012d\7O\2\2\u012d"+ + "\u012e\7P\2\2\u012e\u012f\7U\2\2\u012f$\3\2\2\2\u0130\u0131\7F\2\2\u0131"+ + "\u0132\7G\2\2\u0132\u0133\7D\2\2\u0133\u0134\7W\2\2\u0134\u0135\7I\2\2"+ + "\u0135&\3\2\2\2\u0136\u0137\7F\2\2\u0137\u0138\7G\2\2\u0138\u0139\7U\2"+ + "\2\u0139\u013a\7E\2\2\u013a(\3\2\2\2\u013b\u013c\7F\2\2\u013c\u013d\7"+ + "G\2\2\u013d\u013e\7U\2\2\u013e\u013f\7E\2\2\u013f\u0140\7T\2\2\u0140\u0141"+ + "\7K\2\2\u0141\u0142\7D\2\2\u0142\u0143\7G\2\2\u0143*\3\2\2\2\u0144\u0145"+ + "\7F\2\2\u0145\u0146\7K\2\2\u0146\u0147\7U\2\2\u0147\u0148\7V\2\2\u0148"+ + "\u0149\7K\2\2\u0149\u014a\7P\2\2\u014a\u014b\7E\2\2\u014b\u014c\7V\2\2"+ + "\u014c,\3\2\2\2\u014d\u014e\7G\2\2\u014e\u014f\7U\2\2\u014f\u0150\7E\2"+ + "\2\u0150\u0151\7C\2\2\u0151\u0152\7R\2\2\u0152\u0153\7G\2\2\u0153.\3\2"+ + "\2\2\u0154\u0155\7G\2\2\u0155\u0156\7Z\2\2\u0156\u0157\7G\2\2\u0157\u0158"+ + "\7E\2\2\u0158\u0159\7W\2\2\u0159\u015a\7V\2\2\u015a\u015b\7C\2\2\u015b"+ + "\u015c\7D\2\2\u015c\u015d\7N\2\2\u015d\u015e\7G\2\2\u015e\60\3\2\2\2\u015f"+ + "\u0160\7G\2\2\u0160\u0161\7Z\2\2\u0161\u0162\7K\2\2\u0162\u0163\7U\2\2"+ + "\u0163\u0164\7V\2\2\u0164\u0165\7U\2\2\u0165\62\3\2\2\2\u0166\u0167\7"+ + "G\2\2\u0167\u0168\7Z\2\2\u0168\u0169\7R\2\2\u0169\u016a\7N\2\2\u016a\u016b"+ + "\7C\2\2\u016b\u016c\7K\2\2\u016c\u016d\7P\2\2\u016d\64\3\2\2\2\u016e\u016f"+ + "\7G\2\2\u016f\u0170\7Z\2\2\u0170\u0171\7V\2\2\u0171\u0172\7T\2\2\u0172"+ + "\u0173\7C\2\2\u0173\u0174\7E\2\2\u0174\u0175\7V\2\2\u0175\66\3\2\2\2\u0176"+ + "\u0177\7H\2\2\u0177\u0178\7C\2\2\u0178\u0179\7N\2\2\u0179\u017a\7U\2\2"+ + "\u017a\u017b\7G\2\2\u017b8\3\2\2\2\u017c\u017d\7H\2\2\u017d\u017e\7Q\2"+ + "\2\u017e\u017f\7T\2\2\u017f\u0180\7O\2\2\u0180\u0181\7C\2\2\u0181\u0182"+ + "\7V\2\2\u0182:\3\2\2\2\u0183\u0184\7H\2\2\u0184\u0185\7T\2\2\u0185\u0186"+ + "\7Q\2\2\u0186\u0187\7O\2\2\u0187<\3\2\2\2\u0188\u0189\7H\2\2\u0189\u018a"+ + "\7W\2\2\u018a\u018b\7N\2\2\u018b\u018c\7N\2\2\u018c>\3\2\2\2\u018d\u018e"+ + "\7H\2\2\u018e\u018f\7W\2\2\u018f\u0190\7P\2\2\u0190\u0191\7E\2\2\u0191"+ + "\u0192\7V\2\2\u0192\u0193\7K\2\2\u0193\u0194\7Q\2\2\u0194\u0195\7P\2\2"+ + "\u0195\u0196\7U\2\2\u0196@\3\2\2\2\u0197\u0198\7I\2\2\u0198\u0199\7T\2"+ + "\2\u0199\u019a\7C\2\2\u019a\u019b\7R\2\2\u019b\u019c\7J\2\2\u019c\u019d"+ + "\7X\2\2\u019d\u019e\7K\2\2\u019e\u019f\7\\\2\2\u019fB\3\2\2\2\u01a0\u01a1"+ + "\7I\2\2\u01a1\u01a2\7T\2\2\u01a2\u01a3\7Q\2\2\u01a3\u01a4\7W\2\2\u01a4"+ + "\u01a5\7R\2\2\u01a5D\3\2\2\2\u01a6\u01a7\7J\2\2\u01a7\u01a8\7C\2\2\u01a8"+ + "\u01a9\7X\2\2\u01a9\u01aa\7K\2\2\u01aa\u01ab\7P\2\2\u01ab\u01ac\7I\2\2"+ + "\u01acF\3\2\2\2\u01ad\u01ae\7K\2\2\u01ae\u01af\7P\2\2\u01afH\3\2\2\2\u01b0"+ + "\u01b1\7K\2\2\u01b1\u01b2\7P\2\2\u01b2\u01b3\7P\2\2\u01b3\u01b4\7G\2\2"+ + "\u01b4\u01b5\7T\2\2\u01b5J\3\2\2\2\u01b6\u01b7\7K\2\2\u01b7\u01b8\7U\2"+ + "\2\u01b8L\3\2\2\2\u01b9\u01ba\7L\2\2\u01ba\u01bb\7Q\2\2\u01bb\u01bc\7"+ + "K\2\2\u01bc\u01bd\7P\2\2\u01bdN\3\2\2\2\u01be\u01bf\7N\2\2\u01bf\u01c0"+ + "\7G\2\2\u01c0\u01c1\7H\2\2\u01c1\u01c2\7V\2\2\u01c2P\3\2\2\2\u01c3\u01c4"+ + "\7N\2\2\u01c4\u01c5\7K\2\2\u01c5\u01c6\7M\2\2\u01c6\u01c7\7G\2\2\u01c7"+ + "R\3\2\2\2\u01c8\u01c9\7N\2\2\u01c9\u01ca\7K\2\2\u01ca\u01cb\7O\2\2\u01cb"+ + "\u01cc\7K\2\2\u01cc\u01cd\7V\2\2\u01cdT\3\2\2\2\u01ce\u01cf\7O\2\2\u01cf"+ + "\u01d0\7C\2\2\u01d0\u01d1\7R\2\2\u01d1\u01d2\7R\2\2\u01d2\u01d3\7G\2\2"+ + "\u01d3\u01d4\7F\2\2\u01d4V\3\2\2\2\u01d5\u01d6\7O\2\2\u01d6\u01d7\7C\2"+ + "\2\u01d7\u01d8\7V\2\2\u01d8\u01d9\7E\2\2\u01d9\u01da\7J\2\2\u01daX\3\2"+ + "\2\2\u01db\u01dc\7P\2\2\u01dc\u01dd\7C\2\2\u01dd\u01de\7V\2\2\u01de\u01df"+ + "\7W\2\2\u01df\u01e0\7T\2\2\u01e0\u01e1\7C\2\2\u01e1\u01e2\7N\2\2\u01e2"+ + "Z\3\2\2\2\u01e3\u01e4\7P\2\2\u01e4\u01e5\7Q\2\2\u01e5\u01e6\7V\2\2\u01e6"+ + "\\\3\2\2\2\u01e7\u01e8\7P\2\2\u01e8\u01e9\7W\2\2\u01e9\u01ea\7N\2\2\u01ea"+ + "\u01eb\7N\2\2\u01eb^\3\2\2\2\u01ec\u01ed\7Q\2\2\u01ed\u01ee\7P\2\2\u01ee"+ + "`\3\2\2\2\u01ef\u01f0\7Q\2\2\u01f0\u01f1\7R\2\2\u01f1\u01f2\7V\2\2\u01f2"+ + "\u01f3\7K\2\2\u01f3\u01f4\7O\2\2\u01f4\u01f5\7K\2\2\u01f5\u01f6\7\\\2"+ + "\2\u01f6\u01f7\7G\2\2\u01f7\u01f8\7F\2\2\u01f8b\3\2\2\2\u01f9\u01fa\7"+ + "Q\2\2\u01fa\u01fb\7T\2\2\u01fbd\3\2\2\2\u01fc\u01fd\7Q\2\2\u01fd\u01fe"+ + "\7T\2\2\u01fe\u01ff\7F\2\2\u01ff\u0200\7G\2\2\u0200\u0201\7T\2\2\u0201"+ + "f\3\2\2\2\u0202\u0203\7Q\2\2\u0203\u0204\7W\2\2\u0204\u0205\7V\2\2\u0205"+ + "\u0206\7G\2\2\u0206\u0207\7T\2\2\u0207h\3\2\2\2\u0208\u0209\7R\2\2\u0209"+ + "\u020a\7C\2\2\u020a\u020b\7T\2\2\u020b\u020c\7U\2\2\u020c\u020d\7G\2\2"+ + "\u020d\u020e\7F\2\2\u020ej\3\2\2\2\u020f\u0210\7R\2\2\u0210\u0211\7J\2"+ + "\2\u0211\u0212\7[\2\2\u0212\u0213\7U\2\2\u0213\u0214\7K\2\2\u0214\u0215"+ + "\7E\2\2\u0215\u0216\7C\2\2\u0216\u0217\7N\2\2\u0217l\3\2\2\2\u0218\u0219"+ + "\7R\2\2\u0219\u021a\7N\2\2\u021a\u021b\7C\2\2\u021b\u021c\7P\2\2\u021c"+ + "n\3\2\2\2\u021d\u021e\7T\2\2\u021e\u021f\7K\2\2\u021f\u0220\7I\2\2\u0220"+ + "\u0221\7J\2\2\u0221\u0222\7V\2\2\u0222p\3\2\2\2\u0223\u0224\7T\2\2\u0224"+ + "\u0225\7N\2\2\u0225\u0226\7K\2\2\u0226\u0227\7M\2\2\u0227\u0228\7G\2\2"+ + "\u0228r\3\2\2\2\u0229\u022a\7S\2\2\u022a\u022b\7W\2\2\u022b\u022c\7G\2"+ + "\2\u022c\u022d\7T\2\2\u022d\u022e\7[\2\2\u022et\3\2\2\2\u022f\u0230\7"+ + "U\2\2\u0230\u0231\7E\2\2\u0231\u0232\7J\2\2\u0232\u0233\7G\2\2\u0233\u0234"+ + "\7O\2\2\u0234\u0235\7C\2\2\u0235\u0236\7U\2\2\u0236v\3\2\2\2\u0237\u0238"+ + "\7U\2\2\u0238\u0239\7G\2\2\u0239\u023a\7N\2\2\u023a\u023b\7G\2\2\u023b"+ + "\u023c\7E\2\2\u023c\u023d\7V\2\2\u023dx\3\2\2\2\u023e\u023f\7U\2\2\u023f"+ + "\u0240\7J\2\2\u0240\u0241\7Q\2\2\u0241\u0242\7Y\2\2\u0242z\3\2\2\2\u0243"+ + "\u0244\7U\2\2\u0244\u0245\7[\2\2\u0245\u0246\7U\2\2\u0246|\3\2\2\2\u0247"+ + "\u0248\7V\2\2\u0248\u0249\7C\2\2\u0249\u024a\7D\2\2\u024a\u024b\7N\2\2"+ + "\u024b\u024c\7G\2\2\u024c~\3\2\2\2\u024d\u024e\7V\2\2\u024e\u024f\7C\2"+ + "\2\u024f\u0250\7D\2\2\u0250\u0251\7N\2\2\u0251\u0252\7G\2\2\u0252\u0253"+ + "\7U\2\2\u0253\u0080\3\2\2\2\u0254\u0255\7V\2\2\u0255\u0256\7G\2\2\u0256"+ + "\u0257\7Z\2\2\u0257\u0258\7V\2\2\u0258\u0082\3\2\2\2\u0259\u025a\7V\2"+ + "\2\u025a\u025b\7T\2\2\u025b\u025c\7W\2\2\u025c\u025d\7G\2\2\u025d\u0084"+ + "\3\2\2\2\u025e\u025f\7V\2\2\u025f\u0260\7[\2\2\u0260\u0261\7R\2\2\u0261"+ + "\u0262\7G\2\2\u0262\u0086\3\2\2\2\u0263\u0264\7V\2\2\u0264\u0265\7[\2"+ + "\2\u0265\u0266\7R\2\2\u0266\u0267\7G\2\2\u0267\u0268\7U\2\2\u0268\u0088"+ + "\3\2\2\2\u0269\u026a\7W\2\2\u026a\u026b\7U\2\2\u026b\u026c\7K\2\2\u026c"+ + "\u026d\7P\2\2\u026d\u026e\7I\2\2\u026e\u008a\3\2\2\2\u026f\u0270\7X\2"+ + "\2\u0270\u0271\7G\2\2\u0271\u0272\7T\2\2\u0272\u0273\7K\2\2\u0273\u0274"+ + "\7H\2\2\u0274\u0275\7[\2\2\u0275\u008c\3\2\2\2\u0276\u0277\7Y\2\2\u0277"+ + "\u0278\7J\2\2\u0278\u0279\7G\2\2\u0279\u027a\7T\2\2\u027a\u027b\7G\2\2"+ + "\u027b\u008e\3\2\2\2\u027c\u027d\7Y\2\2\u027d\u027e\7K\2\2\u027e\u027f"+ + "\7V\2\2\u027f\u0280\7J\2\2\u0280\u0090\3\2\2\2\u0281\u0282\7}\2\2\u0282"+ + "\u0283\7G\2\2\u0283\u0284\7U\2\2\u0284\u0285\7E\2\2\u0285\u0286\7C\2\2"+ + "\u0286\u0287\7R\2\2\u0287\u0288\7G\2\2\u0288\u0092\3\2\2\2\u0289\u028a"+ + "\7}\2\2\u028a\u028b\7H\2\2\u028b\u028c\7P\2\2\u028c\u0094\3\2\2\2\u028d"+ + "\u028e\7}\2\2\u028e\u028f\7N\2\2\u028f\u0290\7K\2\2\u0290\u0291\7O\2\2"+ + "\u0291\u0292\7K\2\2\u0292\u0293\7V\2\2\u0293\u0096\3\2\2\2\u0294\u0295"+ + "\7}\2\2\u0295\u0296\7F\2\2\u0296\u0098\3\2\2\2\u0297\u0298\7}\2\2\u0298"+ + "\u0299\7V\2\2\u0299\u009a\3\2\2\2\u029a\u029b\7}\2\2\u029b\u029c\7V\2"+ + "\2\u029c\u029d\7U\2\2\u029d\u009c\3\2\2\2\u029e\u029f\7}\2\2\u029f\u02a0"+ + "\7I\2\2\u02a0\u02a1\7W\2\2\u02a1\u02a2\7K\2\2\u02a2\u02a3\7F\2\2\u02a3"+ + "\u009e\3\2\2\2\u02a4\u02a5\7\177\2\2\u02a5\u00a0\3\2\2\2\u02a6\u02a7\7"+ + "?\2\2\u02a7\u00a2\3\2\2\2\u02a8\u02a9\7>\2\2\u02a9\u02b0\7@\2\2\u02aa"+ + "\u02ab\7#\2\2\u02ab\u02b0\7?\2\2\u02ac\u02ad\7>\2\2\u02ad\u02ae\7?\2\2"+ + "\u02ae\u02b0\7@\2\2\u02af\u02a8\3\2\2\2\u02af\u02aa\3\2\2\2\u02af\u02ac"+ + "\3\2\2\2\u02b0\u00a4\3\2\2\2\u02b1\u02b2\7>\2\2\u02b2\u00a6\3\2\2\2\u02b3"+ + "\u02b4\7>\2\2\u02b4\u02b5\7?\2\2\u02b5\u00a8\3\2\2\2\u02b6\u02b7\7@\2"+ + "\2\u02b7\u00aa\3\2\2\2\u02b8\u02b9\7@\2\2\u02b9\u02ba\7?\2\2\u02ba\u00ac"+ + "\3\2\2\2\u02bb\u02bc\7-\2\2\u02bc\u00ae\3\2\2\2\u02bd\u02be\7/\2\2\u02be"+ + "\u00b0\3\2\2\2\u02bf\u02c0\7,\2\2\u02c0\u00b2\3\2\2\2\u02c1\u02c2\7\61"+ + "\2\2\u02c2\u00b4\3\2\2\2\u02c3\u02c4\7\'\2\2\u02c4\u00b6\3\2\2\2\u02c5"+ + "\u02c6\7~\2\2\u02c6\u02c7\7~\2\2\u02c7\u00b8\3\2\2\2\u02c8\u02c9\7\60"+ + "\2\2\u02c9\u00ba\3\2\2\2\u02ca\u02cb\7A\2\2\u02cb\u00bc\3\2\2\2\u02cc"+ + "\u02d2\7)\2\2\u02cd\u02d1\n\2\2\2\u02ce\u02cf\7)\2\2\u02cf\u02d1\7)\2"+ + "\2\u02d0\u02cd\3\2\2\2\u02d0\u02ce\3\2\2\2\u02d1\u02d4\3\2\2\2\u02d2\u02d0"+ + "\3\2\2\2\u02d2\u02d3\3\2\2\2\u02d3\u02d5\3\2\2\2\u02d4\u02d2\3\2\2\2\u02d5"+ + "\u02d6\7)\2\2\u02d6\u00be\3\2\2\2\u02d7\u02d9\5\u00cfh\2\u02d8\u02d7\3"+ + "\2\2\2\u02d9\u02da\3\2\2\2\u02da\u02d8\3\2\2\2\u02da\u02db\3\2\2\2\u02db"+ + "\u00c0\3\2\2\2\u02dc\u02de\5\u00cfh\2\u02dd\u02dc\3\2\2\2\u02de\u02df"+ + "\3\2\2\2\u02df\u02dd\3\2\2\2\u02df\u02e0\3\2\2\2\u02e0\u02e1\3\2\2\2\u02e1"+ + "\u02e5\5\u00b9]\2\u02e2\u02e4\5\u00cfh\2\u02e3\u02e2\3\2\2\2\u02e4\u02e7"+ + "\3\2\2\2\u02e5\u02e3\3\2\2\2\u02e5\u02e6\3\2\2\2\u02e6\u0307\3\2\2\2\u02e7"+ + "\u02e5\3\2\2\2\u02e8\u02ea\5\u00b9]\2\u02e9\u02eb\5\u00cfh\2\u02ea\u02e9"+ + "\3\2\2\2\u02eb\u02ec\3\2\2\2\u02ec\u02ea\3\2\2\2\u02ec\u02ed\3\2\2\2\u02ed"+ + "\u0307\3\2\2\2\u02ee\u02f0\5\u00cfh\2\u02ef\u02ee\3\2\2\2\u02f0\u02f1"+ + "\3\2\2\2\u02f1\u02ef\3\2\2\2\u02f1\u02f2\3\2\2\2\u02f2\u02fa\3\2\2\2\u02f3"+ + "\u02f7\5\u00b9]\2\u02f4\u02f6\5\u00cfh\2\u02f5\u02f4\3\2\2\2\u02f6\u02f9"+ + "\3\2\2\2\u02f7\u02f5\3\2\2\2\u02f7\u02f8\3\2\2\2\u02f8\u02fb\3\2\2\2\u02f9"+ + "\u02f7\3\2\2\2\u02fa\u02f3\3\2\2\2\u02fa\u02fb\3\2\2\2\u02fb\u02fc\3\2"+ + "\2\2\u02fc\u02fd\5\u00cdg\2\u02fd\u0307\3\2\2\2\u02fe\u0300\5\u00b9]\2"+ + "\u02ff\u0301\5\u00cfh\2\u0300\u02ff\3\2\2\2\u0301\u0302\3\2\2\2\u0302"+ + "\u0300\3\2\2\2\u0302\u0303\3\2\2\2\u0303\u0304\3\2\2\2\u0304\u0305\5\u00cd"+ + "g\2\u0305\u0307\3\2\2\2\u0306\u02dd\3\2\2\2\u0306\u02e8\3\2\2\2\u0306"+ + "\u02ef\3\2\2\2\u0306\u02fe\3\2\2\2\u0307\u00c2\3\2\2\2\u0308\u030b\5\u00d1"+ + "i\2\u0309\u030b\7a\2\2\u030a\u0308\3\2\2\2\u030a\u0309\3\2\2\2\u030b\u0311"+ + "\3\2\2\2\u030c\u0310\5\u00d1i\2\u030d\u0310\5\u00cfh\2\u030e\u0310\t\3"+ + "\2\2\u030f\u030c\3\2\2\2\u030f\u030d\3\2\2\2\u030f\u030e\3\2\2\2\u0310"+ + "\u0313\3\2\2\2\u0311\u030f\3\2\2\2\u0311\u0312\3\2\2\2\u0312\u00c4\3\2"+ + "\2\2\u0313\u0311\3\2\2\2\u0314\u0318\5\u00cfh\2\u0315\u0319\5\u00d1i\2"+ + "\u0316\u0319\5\u00cfh\2\u0317\u0319\t\4\2\2\u0318\u0315\3\2\2\2\u0318"+ + "\u0316\3\2\2\2\u0318\u0317\3\2\2\2\u0319\u031a\3\2\2\2\u031a\u0318\3\2"+ + "\2\2\u031a\u031b\3\2\2\2\u031b\u00c6\3\2\2\2\u031c\u0321\5\u00d1i\2\u031d"+ + "\u0321\5\u00cfh\2\u031e\u0321\t\3\2\2\u031f\u0321\5\u00b1Y\2\u0320\u031c"+ + "\3\2\2\2\u0320\u031d\3\2\2\2\u0320\u031e\3\2\2\2\u0320\u031f\3\2\2\2\u0321"+ + "\u0322\3\2\2\2\u0322\u0320\3\2\2\2\u0322\u0323\3\2\2\2\u0323\u00c8\3\2"+ + "\2\2\u0324\u032a\7$\2\2\u0325\u0329\n\5\2\2\u0326\u0327\7$\2\2\u0327\u0329"+ + "\7$\2\2\u0328\u0325\3\2\2\2\u0328\u0326\3\2\2\2\u0329\u032c\3\2\2\2\u032a"+ + "\u0328\3\2\2\2\u032a\u032b\3\2\2\2\u032b\u032d\3\2\2\2\u032c\u032a\3\2"+ + "\2\2\u032d\u032e\7$\2\2\u032e\u00ca\3\2\2\2\u032f\u0335\7b\2\2\u0330\u0334"+ + "\n\6\2\2\u0331\u0332\7b\2\2\u0332\u0334\7b\2\2\u0333\u0330\3\2\2\2\u0333"+ + "\u0331\3\2\2\2\u0334\u0337\3\2\2\2\u0335\u0333\3\2\2\2\u0335\u0336\3\2"+ + "\2\2\u0336\u0338\3\2\2\2\u0337\u0335\3\2\2\2\u0338\u0339\7b\2\2\u0339"+ + "\u00cc\3\2\2\2\u033a\u033c\7G\2\2\u033b\u033d\t\7\2\2\u033c\u033b\3\2"+ + "\2\2\u033c\u033d\3\2\2\2\u033d\u033f\3\2\2\2\u033e\u0340\5\u00cfh\2\u033f"+ + "\u033e\3\2\2\2\u0340\u0341\3\2\2\2\u0341\u033f\3\2\2\2\u0341\u0342\3\2"+ + "\2\2\u0342\u00ce\3\2\2\2\u0343\u0344\t\b\2\2\u0344\u00d0\3\2\2\2\u0345"+ + "\u0346\t\t\2\2\u0346\u00d2\3\2\2\2\u0347\u0348\7/\2\2\u0348\u0349\7/\2"+ + "\2\u0349\u034d\3\2\2\2\u034a\u034c\n\n\2\2\u034b\u034a\3\2\2\2\u034c\u034f"+ + "\3\2\2\2\u034d\u034b\3\2\2\2\u034d\u034e\3\2\2\2\u034e\u0351\3\2\2\2\u034f"+ + "\u034d\3\2\2\2\u0350\u0352\7\17\2\2\u0351\u0350\3\2\2\2\u0351\u0352\3"+ + "\2\2\2\u0352\u0354\3\2\2\2\u0353\u0355\7\f\2\2\u0354\u0353\3\2\2\2\u0354"+ + "\u0355\3\2\2\2\u0355\u0356\3\2\2\2\u0356\u0357\bj\2\2\u0357\u00d4\3\2"+ + "\2\2\u0358\u0359\7\61\2\2\u0359\u035a\7,\2\2\u035a\u035f\3\2\2\2\u035b"+ + "\u035e\5\u00d5k\2\u035c\u035e\13\2\2\2\u035d\u035b\3\2\2\2\u035d\u035c"+ + "\3\2\2\2\u035e\u0361\3\2\2\2\u035f\u0360\3\2\2\2\u035f\u035d\3\2\2\2\u0360"+ + "\u0362\3\2\2\2\u0361\u035f\3\2\2\2\u0362\u0363\7,\2\2\u0363\u0364\7\61"+ + "\2\2\u0364\u0365\3\2\2\2\u0365\u0366\bk\2\2\u0366\u00d6\3\2\2\2\u0367"+ + "\u0369\t\13\2\2\u0368\u0367\3\2\2\2\u0369\u036a\3\2\2\2\u036a\u0368\3"+ + "\2\2\2\u036a\u036b\3\2\2\2\u036b\u036c\3\2\2\2\u036c\u036d\bl\2\2\u036d"+ + "\u00d8\3\2\2\2\u036e\u036f\13\2\2\2\u036f\u00da\3\2\2\2\"\2\u02af\u02d0"+ + "\u02d2\u02da\u02df\u02e5\u02ec\u02f1\u02f7\u02fa\u0302\u0306\u030a\u030f"+ + "\u0311\u0318\u031a\u0320\u0322\u0328\u032a\u0333\u0335\u033c\u0341\u034d"+ + "\u0351\u0354\u035d\u035f\u036a\3\2\3\2"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java index 48f6eb4a7c816..70fe777384c85 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java @@ -1,8 +1,3 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.sql.parser; import org.antlr.v4.runtime.tree.ParseTreeListener; @@ -196,6 +191,16 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#limitClause}. + * @param ctx the parse tree + */ + void enterLimitClause(SqlBaseParser.LimitClauseContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#limitClause}. + * @param ctx the parse tree + */ + void exitLimitClause(SqlBaseParser.LimitClauseContext ctx); /** * Enter a parse tree produced by the {@code queryPrimaryDefault} * labeled alternative in {@link SqlBaseParser#queryTerm}. @@ -514,6 +519,16 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitPattern(SqlBaseParser.PatternContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#patternEscape}. + * @param ctx the parse tree + */ + void enterPatternEscape(SqlBaseParser.PatternEscapeContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#patternEscape}. + * @param ctx the parse tree + */ + void exitPatternEscape(SqlBaseParser.PatternEscapeContext ctx); /** * Enter a parse tree produced by the {@code valueExpressionDefault} * labeled alternative in {@link SqlBaseParser#valueExpression}. @@ -611,17 +626,17 @@ interface SqlBaseListener extends ParseTreeListener { */ void exitStar(SqlBaseParser.StarContext ctx); /** - * Enter a parse tree produced by the {@code functionCall} + * Enter a parse tree produced by the {@code function} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void enterFunctionCall(SqlBaseParser.FunctionCallContext ctx); + void enterFunction(SqlBaseParser.FunctionContext ctx); /** - * Exit a parse tree produced by the {@code functionCall} + * Exit a parse tree produced by the {@code function} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree */ - void exitFunctionCall(SqlBaseParser.FunctionCallContext ctx); + void exitFunction(SqlBaseParser.FunctionContext ctx); /** * Enter a parse tree produced by the {@code subqueryExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. @@ -670,6 +685,66 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#castExpression}. + * @param ctx the parse tree + */ + void enterCastExpression(SqlBaseParser.CastExpressionContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#castExpression}. + * @param ctx the parse tree + */ + void exitCastExpression(SqlBaseParser.CastExpressionContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#castTemplate}. + * @param ctx the parse tree + */ + void enterCastTemplate(SqlBaseParser.CastTemplateContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#castTemplate}. + * @param ctx the parse tree + */ + void exitCastTemplate(SqlBaseParser.CastTemplateContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#extractExpression}. + * @param ctx the parse tree + */ + void enterExtractExpression(SqlBaseParser.ExtractExpressionContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#extractExpression}. + * @param ctx the parse tree + */ + void exitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#extractTemplate}. + * @param ctx the parse tree + */ + void enterExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#extractTemplate}. + * @param ctx the parse tree + */ + void exitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#functionExpression}. + * @param ctx the parse tree + */ + void enterFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#functionExpression}. + * @param ctx the parse tree + */ + void exitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#functionTemplate}. + * @param ctx the parse tree + */ + void enterFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#functionTemplate}. + * @param ctx the parse tree + */ + void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx); /** * Enter a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link SqlBaseParser#constant}. @@ -730,6 +805,54 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitParamLiteral(SqlBaseParser.ParamLiteralContext ctx); + /** + * Enter a parse tree produced by the {@code dateEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + */ + void enterDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx); + /** + * Exit a parse tree produced by the {@code dateEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + */ + void exitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx); + /** + * Enter a parse tree produced by the {@code timeEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + */ + void enterTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx); + /** + * Exit a parse tree produced by the {@code timeEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + */ + void exitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx); + /** + * Enter a parse tree produced by the {@code timestampEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + */ + void enterTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx); + /** + * Exit a parse tree produced by the {@code timestampEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + */ + void exitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx); + /** + * Enter a parse tree produced by the {@code guidEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + */ + void enterGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx); + /** + * Exit a parse tree produced by the {@code guidEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + */ + void exitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx); /** * Enter a parse tree produced by {@link SqlBaseParser#comparisonOperator}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 3535977943ba7..32a1b062feebf 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -1,15 +1,13 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.sql.parser; import org.antlr.v4.runtime.atn.*; import org.antlr.v4.runtime.dfa.DFA; import org.antlr.v4.runtime.*; +import org.antlr.v4.runtime.misc.*; import org.antlr.v4.runtime.tree.*; import java.util.List; +import java.util.Iterator; +import java.util.ArrayList; @SuppressWarnings({"all", "warnings", "unchecked", "unused", "cast"}) class SqlBaseParser extends Parser { @@ -28,33 +26,39 @@ class SqlBaseParser extends Parser { NOT=45, NULL=46, ON=47, OPTIMIZED=48, OR=49, ORDER=50, OUTER=51, PARSED=52, PHYSICAL=53, PLAN=54, RIGHT=55, RLIKE=56, QUERY=57, SCHEMAS=58, SELECT=59, SHOW=60, SYS=61, TABLE=62, TABLES=63, TEXT=64, TRUE=65, TYPE=66, TYPES=67, - USING=68, VERIFY=69, WHERE=70, WITH=71, EQ=72, NEQ=73, LT=74, LTE=75, - GT=76, GTE=77, PLUS=78, MINUS=79, ASTERISK=80, SLASH=81, PERCENT=82, CONCAT=83, - DOT=84, PARAM=85, STRING=86, INTEGER_VALUE=87, DECIMAL_VALUE=88, IDENTIFIER=89, - DIGIT_IDENTIFIER=90, TABLE_IDENTIFIER=91, QUOTED_IDENTIFIER=92, BACKQUOTED_IDENTIFIER=93, - SIMPLE_COMMENT=94, BRACKETED_COMMENT=95, WS=96, UNRECOGNIZED=97, DELIMITER=98; + USING=68, VERIFY=69, WHERE=70, WITH=71, ESCAPE_ESC=72, FUNCTION_ESC=73, + LIMIT_ESC=74, DATE_ESC=75, TIME_ESC=76, TIMESTAMP_ESC=77, GUID_ESC=78, + ESC_END=79, EQ=80, NEQ=81, LT=82, LTE=83, GT=84, GTE=85, PLUS=86, MINUS=87, + ASTERISK=88, SLASH=89, PERCENT=90, CONCAT=91, DOT=92, PARAM=93, STRING=94, + INTEGER_VALUE=95, DECIMAL_VALUE=96, IDENTIFIER=97, DIGIT_IDENTIFIER=98, + TABLE_IDENTIFIER=99, QUOTED_IDENTIFIER=100, BACKQUOTED_IDENTIFIER=101, + SIMPLE_COMMENT=102, BRACKETED_COMMENT=103, WS=104, UNRECOGNIZED=105, DELIMITER=106; public static final int RULE_singleStatement = 0, RULE_singleExpression = 1, RULE_statement = 2, - RULE_query = 3, RULE_queryNoWith = 4, RULE_queryTerm = 5, RULE_orderBy = 6, - RULE_querySpecification = 7, RULE_fromClause = 8, RULE_groupBy = 9, RULE_groupingElement = 10, - RULE_groupingExpressions = 11, RULE_namedQuery = 12, RULE_setQuantifier = 13, - RULE_selectItem = 14, RULE_relation = 15, RULE_joinRelation = 16, RULE_joinType = 17, - RULE_joinCriteria = 18, RULE_relationPrimary = 19, RULE_expression = 20, - RULE_booleanExpression = 21, RULE_predicated = 22, RULE_predicate = 23, - RULE_pattern = 24, RULE_valueExpression = 25, RULE_primaryExpression = 26, - RULE_constant = 27, RULE_comparisonOperator = 28, RULE_booleanValue = 29, - RULE_dataType = 30, RULE_qualifiedName = 31, RULE_identifier = 32, RULE_tableIdentifier = 33, - RULE_quoteIdentifier = 34, RULE_unquoteIdentifier = 35, RULE_number = 36, - RULE_string = 37, RULE_nonReserved = 38; + RULE_query = 3, RULE_queryNoWith = 4, RULE_limitClause = 5, RULE_queryTerm = 6, + RULE_orderBy = 7, RULE_querySpecification = 8, RULE_fromClause = 9, RULE_groupBy = 10, + RULE_groupingElement = 11, RULE_groupingExpressions = 12, RULE_namedQuery = 13, + RULE_setQuantifier = 14, RULE_selectItem = 15, RULE_relation = 16, RULE_joinRelation = 17, + RULE_joinType = 18, RULE_joinCriteria = 19, RULE_relationPrimary = 20, + RULE_expression = 21, RULE_booleanExpression = 22, RULE_predicated = 23, + RULE_predicate = 24, RULE_pattern = 25, RULE_patternEscape = 26, RULE_valueExpression = 27, + RULE_primaryExpression = 28, RULE_castExpression = 29, RULE_castTemplate = 30, + RULE_extractExpression = 31, RULE_extractTemplate = 32, RULE_functionExpression = 33, + RULE_functionTemplate = 34, RULE_constant = 35, RULE_comparisonOperator = 36, + RULE_booleanValue = 37, RULE_dataType = 38, RULE_qualifiedName = 39, RULE_identifier = 40, + RULE_tableIdentifier = 41, RULE_quoteIdentifier = 42, RULE_unquoteIdentifier = 43, + RULE_number = 44, RULE_string = 45, RULE_nonReserved = 46; public static final String[] ruleNames = { "singleStatement", "singleExpression", "statement", "query", "queryNoWith", - "queryTerm", "orderBy", "querySpecification", "fromClause", "groupBy", - "groupingElement", "groupingExpressions", "namedQuery", "setQuantifier", + "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause", + "groupBy", "groupingElement", "groupingExpressions", "namedQuery", "setQuantifier", "selectItem", "relation", "joinRelation", "joinType", "joinCriteria", "relationPrimary", "expression", "booleanExpression", "predicated", "predicate", - "pattern", "valueExpression", "primaryExpression", "constant", "comparisonOperator", - "booleanValue", "dataType", "qualifiedName", "identifier", "tableIdentifier", - "quoteIdentifier", "unquoteIdentifier", "number", "string", "nonReserved" + "pattern", "patternEscape", "valueExpression", "primaryExpression", "castExpression", + "castTemplate", "extractExpression", "extractTemplate", "functionExpression", + "functionTemplate", "constant", "comparisonOperator", "booleanValue", + "dataType", "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier", + "unquoteIdentifier", "number", "string", "nonReserved" }; private static final String[] _LITERAL_NAMES = { @@ -68,8 +72,9 @@ class SqlBaseParser extends Parser { "'OR'", "'ORDER'", "'OUTER'", "'PARSED'", "'PHYSICAL'", "'PLAN'", "'RIGHT'", "'RLIKE'", "'QUERY'", "'SCHEMAS'", "'SELECT'", "'SHOW'", "'SYS'", "'TABLE'", "'TABLES'", "'TEXT'", "'TRUE'", "'TYPE'", "'TYPES'", "'USING'", "'VERIFY'", - "'WHERE'", "'WITH'", "'='", null, "'<'", "'<='", "'>'", "'>='", "'+'", - "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'" + "'WHERE'", "'WITH'", "'{ESCAPE'", "'{FN'", "'{LIMIT'", "'{D'", "'{T'", + "'{TS'", "'{GUID'", "'}'", "'='", null, "'<'", "'<='", "'>'", "'>='", + "'+'", "'-'", "'*'", "'/'", "'%'", "'||'", "'.'", "'?'" }; private static final String[] _SYMBOLIC_NAMES = { null, null, null, null, null, "ALL", "ANALYZE", "ANALYZED", "AND", "ANY", @@ -80,12 +85,13 @@ class SqlBaseParser extends Parser { "LIMIT", "MAPPED", "MATCH", "NATURAL", "NOT", "NULL", "ON", "OPTIMIZED", "OR", "ORDER", "OUTER", "PARSED", "PHYSICAL", "PLAN", "RIGHT", "RLIKE", "QUERY", "SCHEMAS", "SELECT", "SHOW", "SYS", "TABLE", "TABLES", "TEXT", - "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "EQ", "NEQ", - "LT", "LTE", "GT", "GTE", "PLUS", "MINUS", "ASTERISK", "SLASH", "PERCENT", - "CONCAT", "DOT", "PARAM", "STRING", "INTEGER_VALUE", "DECIMAL_VALUE", - "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", "QUOTED_IDENTIFIER", - "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", "WS", - "UNRECOGNIZED", "DELIMITER" + "TRUE", "TYPE", "TYPES", "USING", "VERIFY", "WHERE", "WITH", "ESCAPE_ESC", + "FUNCTION_ESC", "LIMIT_ESC", "DATE_ESC", "TIME_ESC", "TIMESTAMP_ESC", + "GUID_ESC", "ESC_END", "EQ", "NEQ", "LT", "LTE", "GT", "GTE", "PLUS", + "MINUS", "ASTERISK", "SLASH", "PERCENT", "CONCAT", "DOT", "PARAM", "STRING", + "INTEGER_VALUE", "DECIMAL_VALUE", "IDENTIFIER", "DIGIT_IDENTIFIER", "TABLE_IDENTIFIER", + "QUOTED_IDENTIFIER", "BACKQUOTED_IDENTIFIER", "SIMPLE_COMMENT", "BRACKETED_COMMENT", + "WS", "UNRECOGNIZED", "DELIMITER" }; public static final Vocabulary VOCABULARY = new VocabularyImpl(_LITERAL_NAMES, _SYMBOLIC_NAMES); @@ -166,9 +172,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(78); + setState(94); statement(); - setState(79); + setState(95); match(EOF); } } @@ -213,9 +219,9 @@ public final SingleExpressionContext singleExpression() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(81); + setState(97); expression(); - setState(82); + setState(98); match(EOF); } } @@ -601,14 +607,14 @@ public final StatementContext statement() throws RecognitionException { enterRule(_localctx, 4, RULE_statement); int _la; try { - setState(193); + setState(209); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: _localctx = new StatementDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(84); + setState(100); query(); } break; @@ -616,27 +622,27 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ExplainContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(85); + setState(101); match(EXPLAIN); - setState(99); + setState(115); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: { - setState(86); + setState(102); match(T__0); - setState(95); + setState(111); _errHandler.sync(this); _la = _input.LA(1); while (((((_la - 28)) & ~0x3f) == 0 && ((1L << (_la - 28)) & ((1L << (FORMAT - 28)) | (1L << (PLAN - 28)) | (1L << (VERIFY - 28)))) != 0)) { { - setState(93); + setState(109); switch (_input.LA(1)) { case PLAN: { - setState(87); + setState(103); match(PLAN); - setState(88); + setState(104); ((ExplainContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ALL) | (1L << ANALYZED) | (1L << EXECUTABLE) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED))) != 0)) ) { @@ -648,9 +654,9 @@ public final StatementContext statement() throws RecognitionException { break; case FORMAT: { - setState(89); + setState(105); match(FORMAT); - setState(90); + setState(106); ((ExplainContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -662,9 +668,9 @@ public final StatementContext statement() throws RecognitionException { break; case VERIFY: { - setState(91); + setState(107); match(VERIFY); - setState(92); + setState(108); ((ExplainContext)_localctx).verify = booleanValue(); } break; @@ -672,16 +678,16 @@ public final StatementContext statement() throws RecognitionException { throw new NoViableAltException(this); } } - setState(97); + setState(113); _errHandler.sync(this); _la = _input.LA(1); } - setState(98); + setState(114); match(T__1); } break; } - setState(101); + setState(117); statement(); } break; @@ -689,27 +695,27 @@ public final StatementContext statement() throws RecognitionException { _localctx = new DebugContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(102); + setState(118); match(DEBUG); - setState(114); + setState(130); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(103); + setState(119); match(T__0); - setState(110); + setState(126); _errHandler.sync(this); _la = _input.LA(1); while (_la==FORMAT || _la==PLAN) { { - setState(108); + setState(124); switch (_input.LA(1)) { case PLAN: { - setState(104); + setState(120); match(PLAN); - setState(105); + setState(121); ((DebugContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !(_la==ANALYZED || _la==OPTIMIZED) ) { @@ -721,9 +727,9 @@ public final StatementContext statement() throws RecognitionException { break; case FORMAT: { - setState(106); + setState(122); match(FORMAT); - setState(107); + setState(123); ((DebugContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -737,16 +743,16 @@ public final StatementContext statement() throws RecognitionException { throw new NoViableAltException(this); } } - setState(112); + setState(128); _errHandler.sync(this); _la = _input.LA(1); } - setState(113); + setState(129); match(T__1); } break; } - setState(116); + setState(132); statement(); } break; @@ -754,24 +760,24 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowTablesContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(117); + setState(133); match(SHOW); - setState(118); + setState(134); match(TABLES); - setState(123); + setState(139); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(120); + setState(136); _la = _input.LA(1); if (_la==LIKE) { { - setState(119); + setState(135); match(LIKE); } } - setState(122); + setState(138); pattern(); } } @@ -782,18 +788,18 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(125); + setState(141); match(SHOW); - setState(126); + setState(142); match(COLUMNS); - setState(127); + setState(143); _la = _input.LA(1); if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(128); + setState(144); tableIdentifier(); } break; @@ -801,14 +807,14 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(129); + setState(145); _la = _input.LA(1); if ( !(_la==DESC || _la==DESCRIBE) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(130); + setState(146); tableIdentifier(); } break; @@ -816,24 +822,24 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(131); + setState(147); match(SHOW); - setState(132); + setState(148); match(FUNCTIONS); - setState(137); + setState(153); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(134); + setState(150); _la = _input.LA(1); if (_la==LIKE) { { - setState(133); + setState(149); match(LIKE); } } - setState(136); + setState(152); pattern(); } } @@ -844,9 +850,9 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowSchemasContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(139); + setState(155); match(SHOW); - setState(140); + setState(156); match(SCHEMAS); } break; @@ -854,9 +860,9 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysCatalogsContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(141); + setState(157); match(SYS); - setState(142); + setState(158); match(CATALOGS); } break; @@ -864,69 +870,69 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysTablesContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(143); + setState(159); match(SYS); - setState(144); + setState(160); match(TABLES); - setState(150); + setState(166); _la = _input.LA(1); if (_la==CATALOG) { { - setState(145); + setState(161); match(CATALOG); - setState(147); + setState(163); _la = _input.LA(1); if (_la==LIKE) { { - setState(146); + setState(162); match(LIKE); } } - setState(149); + setState(165); ((SysTablesContext)_localctx).clusterPattern = pattern(); } } - setState(156); + setState(172); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(153); + setState(169); _la = _input.LA(1); if (_la==LIKE) { { - setState(152); + setState(168); match(LIKE); } } - setState(155); + setState(171); ((SysTablesContext)_localctx).tablePattern = pattern(); } } - setState(167); + setState(183); _la = _input.LA(1); if (_la==TYPE) { { - setState(158); + setState(174); match(TYPE); - setState(159); + setState(175); string(); - setState(164); + setState(180); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(160); + setState(176); match(T__2); - setState(161); + setState(177); string(); } } - setState(166); + setState(182); _errHandler.sync(this); _la = _input.LA(1); } @@ -939,55 +945,55 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysColumnsContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(169); + setState(185); match(SYS); - setState(170); + setState(186); match(COLUMNS); - setState(173); + setState(189); _la = _input.LA(1); if (_la==CATALOG) { { - setState(171); + setState(187); match(CATALOG); - setState(172); + setState(188); ((SysColumnsContext)_localctx).cluster = string(); } } - setState(180); + setState(196); _la = _input.LA(1); if (_la==TABLE) { { - setState(175); + setState(191); match(TABLE); - setState(177); + setState(193); _la = _input.LA(1); if (_la==LIKE) { { - setState(176); + setState(192); match(LIKE); } } - setState(179); + setState(195); ((SysColumnsContext)_localctx).indexPattern = pattern(); } } - setState(186); + setState(202); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(183); + setState(199); _la = _input.LA(1); if (_la==LIKE) { { - setState(182); + setState(198); match(LIKE); } } - setState(185); + setState(201); ((SysColumnsContext)_localctx).columnPattern = pattern(); } } @@ -998,9 +1004,9 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysTypesContext(_localctx); enterOuterAlt(_localctx, 12); { - setState(188); + setState(204); match(SYS); - setState(189); + setState(205); match(TYPES); } break; @@ -1008,11 +1014,11 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysTableTypesContext(_localctx); enterOuterAlt(_localctx, 13); { - setState(190); + setState(206); match(SYS); - setState(191); + setState(207); match(TABLE); - setState(192); + setState(208); match(TYPES); } break; @@ -1066,34 +1072,34 @@ public final QueryContext query() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(204); + setState(220); _la = _input.LA(1); if (_la==WITH) { { - setState(195); + setState(211); match(WITH); - setState(196); + setState(212); namedQuery(); - setState(201); + setState(217); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(197); + setState(213); match(T__2); - setState(198); + setState(214); namedQuery(); } } - setState(203); + setState(219); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(206); + setState(222); queryNoWith(); } } @@ -1109,7 +1115,6 @@ public final QueryContext query() throws RecognitionException { } public static class QueryNoWithContext extends ParserRuleContext { - public Token limit; public QueryTermContext queryTerm() { return getRuleContext(QueryTermContext.class,0); } @@ -1121,9 +1126,9 @@ public List orderBy() { public OrderByContext orderBy(int i) { return getRuleContext(OrderByContext.class,i); } - public TerminalNode LIMIT() { return getToken(SqlBaseParser.LIMIT, 0); } - public TerminalNode INTEGER_VALUE() { return getToken(SqlBaseParser.INTEGER_VALUE, 0); } - public TerminalNode ALL() { return getToken(SqlBaseParser.ALL, 0); } + public LimitClauseContext limitClause() { + return getRuleContext(LimitClauseContext.class,0); + } public QueryNoWithContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -1150,54 +1155,126 @@ public final QueryNoWithContext queryNoWith() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(208); + setState(224); queryTerm(); - setState(219); + setState(235); _la = _input.LA(1); if (_la==ORDER) { { - setState(209); + setState(225); match(ORDER); - setState(210); + setState(226); match(BY); - setState(211); + setState(227); orderBy(); - setState(216); + setState(232); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(212); + setState(228); match(T__2); - setState(213); + setState(229); orderBy(); } } - setState(218); + setState(234); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(223); + setState(238); _la = _input.LA(1); - if (_la==LIMIT) { + if (_la==LIMIT || _la==LIMIT_ESC) { + { + setState(237); + limitClause(); + } + } + + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class LimitClauseContext extends ParserRuleContext { + public Token limit; + public TerminalNode LIMIT() { return getToken(SqlBaseParser.LIMIT, 0); } + public TerminalNode INTEGER_VALUE() { return getToken(SqlBaseParser.INTEGER_VALUE, 0); } + public TerminalNode ALL() { return getToken(SqlBaseParser.ALL, 0); } + public TerminalNode LIMIT_ESC() { return getToken(SqlBaseParser.LIMIT_ESC, 0); } + public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); } + public LimitClauseContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_limitClause; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterLimitClause(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitLimitClause(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitLimitClause(this); + else return visitor.visitChildren(this); + } + } + + public final LimitClauseContext limitClause() throws RecognitionException { + LimitClauseContext _localctx = new LimitClauseContext(_ctx, getState()); + enterRule(_localctx, 10, RULE_limitClause); + int _la; + try { + setState(245); + switch (_input.LA(1)) { + case LIMIT: + enterOuterAlt(_localctx, 1); { - setState(221); + setState(240); match(LIMIT); - setState(222); - ((QueryNoWithContext)_localctx).limit = _input.LT(1); + setState(241); + ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { - ((QueryNoWithContext)_localctx).limit = (Token)_errHandler.recoverInline(this); + ((LimitClauseContext)_localctx).limit = (Token)_errHandler.recoverInline(this); } else { consume(); } } - } - + break; + case LIMIT_ESC: + enterOuterAlt(_localctx, 2); + { + setState(242); + match(LIMIT_ESC); + setState(243); + ((LimitClauseContext)_localctx).limit = _input.LT(1); + _la = _input.LA(1); + if ( !(_la==ALL || _la==INTEGER_VALUE) ) { + ((LimitClauseContext)_localctx).limit = (Token)_errHandler.recoverInline(this); + } else { + consume(); + } + setState(244); + match(ESC_END); + } + break; + default: + throw new NoViableAltException(this); } } catch (RecognitionException re) { @@ -1263,15 +1340,15 @@ public T accept(ParseTreeVisitor visitor) { public final QueryTermContext queryTerm() throws RecognitionException { QueryTermContext _localctx = new QueryTermContext(_ctx, getState()); - enterRule(_localctx, 10, RULE_queryTerm); + enterRule(_localctx, 12, RULE_queryTerm); try { - setState(230); + setState(252); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(225); + setState(247); querySpecification(); } break; @@ -1279,11 +1356,11 @@ public final QueryTermContext queryTerm() throws RecognitionException { _localctx = new SubqueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(226); + setState(248); match(T__0); - setState(227); + setState(249); queryNoWith(); - setState(228); + setState(250); match(T__1); } break; @@ -1330,18 +1407,18 @@ public T accept(ParseTreeVisitor visitor) { public final OrderByContext orderBy() throws RecognitionException { OrderByContext _localctx = new OrderByContext(_ctx, getState()); - enterRule(_localctx, 12, RULE_orderBy); + enterRule(_localctx, 14, RULE_orderBy); int _la; try { enterOuterAlt(_localctx, 1); { - setState(232); + setState(254); expression(); - setState(234); + setState(256); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { - setState(233); + setState(255); ((OrderByContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -1415,80 +1492,80 @@ public T accept(ParseTreeVisitor visitor) { public final QuerySpecificationContext querySpecification() throws RecognitionException { QuerySpecificationContext _localctx = new QuerySpecificationContext(_ctx, getState()); - enterRule(_localctx, 14, RULE_querySpecification); + enterRule(_localctx, 16, RULE_querySpecification); int _la; try { enterOuterAlt(_localctx, 1); { - setState(236); + setState(258); match(SELECT); - setState(238); + setState(260); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(237); + setState(259); setQuantifier(); } } - setState(240); + setState(262); selectItem(); - setState(245); + setState(267); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(241); + setState(263); match(T__2); - setState(242); + setState(264); selectItem(); } } - setState(247); + setState(269); _errHandler.sync(this); _la = _input.LA(1); } - setState(249); + setState(271); _la = _input.LA(1); if (_la==FROM) { { - setState(248); + setState(270); fromClause(); } } - setState(253); + setState(275); _la = _input.LA(1); if (_la==WHERE) { { - setState(251); + setState(273); match(WHERE); - setState(252); + setState(274); ((QuerySpecificationContext)_localctx).where = booleanExpression(0); } } - setState(258); + setState(280); _la = _input.LA(1); if (_la==GROUP) { { - setState(255); + setState(277); match(GROUP); - setState(256); + setState(278); match(BY); - setState(257); + setState(279); groupBy(); } } - setState(262); + setState(284); _la = _input.LA(1); if (_la==HAVING) { { - setState(260); + setState(282); match(HAVING); - setState(261); + setState(283); ((QuerySpecificationContext)_localctx).having = booleanExpression(0); } } @@ -1535,28 +1612,28 @@ public T accept(ParseTreeVisitor visitor) { public final FromClauseContext fromClause() throws RecognitionException { FromClauseContext _localctx = new FromClauseContext(_ctx, getState()); - enterRule(_localctx, 16, RULE_fromClause); + enterRule(_localctx, 18, RULE_fromClause); int _la; try { enterOuterAlt(_localctx, 1); { - setState(264); + setState(286); match(FROM); - setState(265); + setState(287); relation(); - setState(270); + setState(292); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(266); + setState(288); match(T__2); - setState(267); + setState(289); relation(); } } - setState(272); + setState(294); _errHandler.sync(this); _la = _input.LA(1); } @@ -1604,35 +1681,35 @@ public T accept(ParseTreeVisitor visitor) { public final GroupByContext groupBy() throws RecognitionException { GroupByContext _localctx = new GroupByContext(_ctx, getState()); - enterRule(_localctx, 18, RULE_groupBy); + enterRule(_localctx, 20, RULE_groupBy); int _la; try { enterOuterAlt(_localctx, 1); { - setState(274); + setState(296); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(273); + setState(295); setQuantifier(); } } - setState(276); + setState(298); groupingElement(); - setState(281); + setState(303); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(277); + setState(299); match(T__2); - setState(278); + setState(300); groupingElement(); } } - setState(283); + setState(305); _errHandler.sync(this); _la = _input.LA(1); } @@ -1682,12 +1759,12 @@ public T accept(ParseTreeVisitor visitor) { public final GroupingElementContext groupingElement() throws RecognitionException { GroupingElementContext _localctx = new GroupingElementContext(_ctx, getState()); - enterRule(_localctx, 20, RULE_groupingElement); + enterRule(_localctx, 22, RULE_groupingElement); try { _localctx = new SingleGroupingSetContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(284); + setState(306); groupingExpressions(); } } @@ -1730,50 +1807,50 @@ public T accept(ParseTreeVisitor visitor) { public final GroupingExpressionsContext groupingExpressions() throws RecognitionException { GroupingExpressionsContext _localctx = new GroupingExpressionsContext(_ctx, getState()); - enterRule(_localctx, 22, RULE_groupingExpressions); + enterRule(_localctx, 24, RULE_groupingExpressions); int _la; try { - setState(299); + setState(321); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,40,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(286); + setState(308); match(T__0); - setState(295); + setState(317); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(287); + setState(309); expression(); - setState(292); + setState(314); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(288); + setState(310); match(T__2); - setState(289); + setState(311); expression(); } } - setState(294); + setState(316); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(297); + setState(319); match(T__1); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(298); + setState(320); expression(); } break; @@ -1820,19 +1897,19 @@ public T accept(ParseTreeVisitor visitor) { public final NamedQueryContext namedQuery() throws RecognitionException { NamedQueryContext _localctx = new NamedQueryContext(_ctx, getState()); - enterRule(_localctx, 24, RULE_namedQuery); + enterRule(_localctx, 26, RULE_namedQuery); try { enterOuterAlt(_localctx, 1); { - setState(301); + setState(323); ((NamedQueryContext)_localctx).name = identifier(); - setState(302); + setState(324); match(AS); - setState(303); + setState(325); match(T__0); - setState(304); + setState(326); queryNoWith(); - setState(305); + setState(327); match(T__1); } } @@ -1871,12 +1948,12 @@ public T accept(ParseTreeVisitor visitor) { public final SetQuantifierContext setQuantifier() throws RecognitionException { SetQuantifierContext _localctx = new SetQuantifierContext(_ctx, getState()); - enterRule(_localctx, 26, RULE_setQuantifier); + enterRule(_localctx, 28, RULE_setQuantifier); int _la; try { enterOuterAlt(_localctx, 1); { - setState(307); + setState(329); _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); @@ -1933,28 +2010,28 @@ public T accept(ParseTreeVisitor visitor) { public final SelectItemContext selectItem() throws RecognitionException { SelectItemContext _localctx = new SelectItemContext(_ctx, getState()); - enterRule(_localctx, 28, RULE_selectItem); + enterRule(_localctx, 30, RULE_selectItem); int _la; try { _localctx = new SelectExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(309); + setState(331); expression(); - setState(314); + setState(336); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(311); + setState(333); _la = _input.LA(1); if (_la==AS) { { - setState(310); + setState(332); match(AS); } } - setState(313); + setState(335); identifier(); } } @@ -2003,24 +2080,24 @@ public T accept(ParseTreeVisitor visitor) { public final RelationContext relation() throws RecognitionException { RelationContext _localctx = new RelationContext(_ctx, getState()); - enterRule(_localctx, 30, RULE_relation); + enterRule(_localctx, 32, RULE_relation); int _la; try { enterOuterAlt(_localctx, 1); { - setState(316); + setState(338); relationPrimary(); - setState(320); + setState(342); _errHandler.sync(this); _la = _input.LA(1); while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FULL) | (1L << INNER) | (1L << JOIN) | (1L << LEFT) | (1L << NATURAL) | (1L << RIGHT))) != 0)) { { { - setState(317); + setState(339); joinRelation(); } } - setState(322); + setState(344); _errHandler.sync(this); _la = _input.LA(1); } @@ -2071,10 +2148,10 @@ public T accept(ParseTreeVisitor visitor) { public final JoinRelationContext joinRelation() throws RecognitionException { JoinRelationContext _localctx = new JoinRelationContext(_ctx, getState()); - enterRule(_localctx, 32, RULE_joinRelation); + enterRule(_localctx, 34, RULE_joinRelation); int _la; try { - setState(334); + setState(356); switch (_input.LA(1)) { case FULL: case INNER: @@ -2084,18 +2161,18 @@ public final JoinRelationContext joinRelation() throws RecognitionException { enterOuterAlt(_localctx, 1); { { - setState(323); + setState(345); joinType(); } - setState(324); + setState(346); match(JOIN); - setState(325); + setState(347); ((JoinRelationContext)_localctx).right = relationPrimary(); - setState(327); + setState(349); _la = _input.LA(1); if (_la==ON || _la==USING) { { - setState(326); + setState(348); joinCriteria(); } } @@ -2105,13 +2182,13 @@ public final JoinRelationContext joinRelation() throws RecognitionException { case NATURAL: enterOuterAlt(_localctx, 2); { - setState(329); + setState(351); match(NATURAL); - setState(330); + setState(352); joinType(); - setState(331); + setState(353); match(JOIN); - setState(332); + setState(354); ((JoinRelationContext)_localctx).right = relationPrimary(); } break; @@ -2157,20 +2234,20 @@ public T accept(ParseTreeVisitor visitor) { public final JoinTypeContext joinType() throws RecognitionException { JoinTypeContext _localctx = new JoinTypeContext(_ctx, getState()); - enterRule(_localctx, 34, RULE_joinType); + enterRule(_localctx, 36, RULE_joinType); int _la; try { - setState(351); + setState(373); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { - setState(337); + setState(359); _la = _input.LA(1); if (_la==INNER) { { - setState(336); + setState(358); match(INNER); } } @@ -2180,13 +2257,13 @@ public final JoinTypeContext joinType() throws RecognitionException { case LEFT: enterOuterAlt(_localctx, 2); { - setState(339); + setState(361); match(LEFT); - setState(341); + setState(363); _la = _input.LA(1); if (_la==OUTER) { { - setState(340); + setState(362); match(OUTER); } } @@ -2196,13 +2273,13 @@ public final JoinTypeContext joinType() throws RecognitionException { case RIGHT: enterOuterAlt(_localctx, 3); { - setState(343); + setState(365); match(RIGHT); - setState(345); + setState(367); _la = _input.LA(1); if (_la==OUTER) { { - setState(344); + setState(366); match(OUTER); } } @@ -2212,13 +2289,13 @@ public final JoinTypeContext joinType() throws RecognitionException { case FULL: enterOuterAlt(_localctx, 4); { - setState(347); + setState(369); match(FULL); - setState(349); + setState(371); _la = _input.LA(1); if (_la==OUTER) { { - setState(348); + setState(370); match(OUTER); } } @@ -2273,46 +2350,46 @@ public T accept(ParseTreeVisitor visitor) { public final JoinCriteriaContext joinCriteria() throws RecognitionException { JoinCriteriaContext _localctx = new JoinCriteriaContext(_ctx, getState()); - enterRule(_localctx, 36, RULE_joinCriteria); + enterRule(_localctx, 38, RULE_joinCriteria); int _la; try { - setState(367); + setState(389); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); { - setState(353); + setState(375); match(ON); - setState(354); + setState(376); booleanExpression(0); } break; case USING: enterOuterAlt(_localctx, 2); { - setState(355); + setState(377); match(USING); - setState(356); + setState(378); match(T__0); - setState(357); + setState(379); identifier(); - setState(362); + setState(384); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(358); + setState(380); match(T__2); - setState(359); + setState(381); identifier(); } } - setState(364); + setState(386); _errHandler.sync(this); _la = _input.LA(1); } - setState(365); + setState(387); match(T__1); } break; @@ -2414,32 +2491,32 @@ public T accept(ParseTreeVisitor visitor) { public final RelationPrimaryContext relationPrimary() throws RecognitionException { RelationPrimaryContext _localctx = new RelationPrimaryContext(_ctx, getState()); - enterRule(_localctx, 38, RULE_relationPrimary); + enterRule(_localctx, 40, RULE_relationPrimary); int _la; try { - setState(394); + setState(416); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,59,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) { case 1: _localctx = new TableNameContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(369); + setState(391); tableIdentifier(); - setState(374); + setState(396); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(371); + setState(393); _la = _input.LA(1); if (_la==AS) { { - setState(370); + setState(392); match(AS); } } - setState(373); + setState(395); qualifiedName(); } } @@ -2450,26 +2527,26 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio _localctx = new AliasedQueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(376); + setState(398); match(T__0); - setState(377); + setState(399); queryNoWith(); - setState(378); + setState(400); match(T__1); - setState(383); + setState(405); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(380); + setState(402); _la = _input.LA(1); if (_la==AS) { { - setState(379); + setState(401); match(AS); } } - setState(382); + setState(404); qualifiedName(); } } @@ -2480,26 +2557,26 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio _localctx = new AliasedRelationContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(385); + setState(407); match(T__0); - setState(386); + setState(408); relation(); - setState(387); + setState(409); match(T__1); - setState(392); + setState(414); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(389); + setState(411); _la = _input.LA(1); if (_la==AS) { { - setState(388); + setState(410); match(AS); } } - setState(391); + setState(413); qualifiedName(); } } @@ -2544,11 +2621,11 @@ public T accept(ParseTreeVisitor visitor) { public final ExpressionContext expression() throws RecognitionException { ExpressionContext _localctx = new ExpressionContext(_ctx, getState()); - enterRule(_localctx, 40, RULE_expression); + enterRule(_localctx, 42, RULE_expression); try { enterOuterAlt(_localctx, 1); { - setState(396); + setState(418); booleanExpression(0); } } @@ -2750,25 +2827,25 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _parentState = getState(); BooleanExpressionContext _localctx = new BooleanExpressionContext(_ctx, _parentState); BooleanExpressionContext _prevctx = _localctx; - int _startState = 42; - enterRecursionRule(_localctx, 42, RULE_booleanExpression, _p); + int _startState = 44; + enterRecursionRule(_localctx, 44, RULE_booleanExpression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(447); + setState(469); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,63,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { case 1: { _localctx = new LogicalNotContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(399); + setState(421); match(NOT); - setState(400); + setState(422); booleanExpression(8); } break; @@ -2777,13 +2854,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(401); + setState(423); match(EXISTS); - setState(402); + setState(424); match(T__0); - setState(403); + setState(425); query(); - setState(404); + setState(426); match(T__1); } break; @@ -2792,29 +2869,29 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(406); + setState(428); match(QUERY); - setState(407); + setState(429); match(T__0); - setState(408); + setState(430); ((StringQueryContext)_localctx).queryString = string(); - setState(413); + setState(435); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(409); + setState(431); match(T__2); - setState(410); + setState(432); ((StringQueryContext)_localctx).options = string(); } } - setState(415); + setState(437); _errHandler.sync(this); _la = _input.LA(1); } - setState(416); + setState(438); match(T__1); } break; @@ -2823,33 +2900,33 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(418); + setState(440); match(MATCH); - setState(419); + setState(441); match(T__0); - setState(420); + setState(442); ((MatchQueryContext)_localctx).singleField = qualifiedName(); - setState(421); + setState(443); match(T__2); - setState(422); + setState(444); ((MatchQueryContext)_localctx).queryString = string(); - setState(427); + setState(449); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(423); + setState(445); match(T__2); - setState(424); + setState(446); ((MatchQueryContext)_localctx).options = string(); } } - setState(429); + setState(451); _errHandler.sync(this); _la = _input.LA(1); } - setState(430); + setState(452); match(T__1); } break; @@ -2858,33 +2935,33 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(432); + setState(454); match(MATCH); - setState(433); + setState(455); match(T__0); - setState(434); + setState(456); ((MultiMatchQueryContext)_localctx).multiFields = string(); - setState(435); + setState(457); match(T__2); - setState(436); + setState(458); ((MultiMatchQueryContext)_localctx).queryString = string(); - setState(441); + setState(463); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(437); + setState(459); match(T__2); - setState(438); + setState(460); ((MultiMatchQueryContext)_localctx).options = string(); } } - setState(443); + setState(465); _errHandler.sync(this); _la = _input.LA(1); } - setState(444); + setState(466); match(T__1); } break; @@ -2893,33 +2970,33 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(446); + setState(468); predicated(); } break; } _ctx.stop = _input.LT(-1); - setState(457); + setState(479); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,65,_ctx); + _alt = getInterpreter().adaptivePredict(_input,66,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(455); + setState(477); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,65,_ctx) ) { case 1: { _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(449); + setState(471); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(450); + setState(472); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(451); + setState(473); ((LogicalBinaryContext)_localctx).right = booleanExpression(3); } break; @@ -2928,20 +3005,20 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(452); + setState(474); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(453); + setState(475); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(454); + setState(476); ((LogicalBinaryContext)_localctx).right = booleanExpression(2); } break; } } } - setState(459); + setState(481); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,65,_ctx); + _alt = getInterpreter().adaptivePredict(_input,66,_ctx); } } } @@ -2984,18 +3061,18 @@ public T accept(ParseTreeVisitor visitor) { public final PredicatedContext predicated() throws RecognitionException { PredicatedContext _localctx = new PredicatedContext(_ctx, getState()); - enterRule(_localctx, 44, RULE_predicated); + enterRule(_localctx, 46, RULE_predicated); try { enterOuterAlt(_localctx, 1); { - setState(460); + setState(482); valueExpression(0); - setState(462); + setState(484); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,66,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,67,_ctx) ) { case 1: { - setState(461); + setState(483); predicate(); } break; @@ -3068,145 +3145,145 @@ public T accept(ParseTreeVisitor visitor) { public final PredicateContext predicate() throws RecognitionException { PredicateContext _localctx = new PredicateContext(_ctx, getState()); - enterRule(_localctx, 46, RULE_predicate); + enterRule(_localctx, 48, RULE_predicate); int _la; try { - setState(510); + setState(532); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,74,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,75,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(465); + setState(487); _la = _input.LA(1); if (_la==NOT) { { - setState(464); + setState(486); match(NOT); } } - setState(467); + setState(489); ((PredicateContext)_localctx).kind = match(BETWEEN); - setState(468); + setState(490); ((PredicateContext)_localctx).lower = valueExpression(0); - setState(469); + setState(491); match(AND); - setState(470); + setState(492); ((PredicateContext)_localctx).upper = valueExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(473); + setState(495); _la = _input.LA(1); if (_la==NOT) { { - setState(472); + setState(494); match(NOT); } } - setState(475); + setState(497); ((PredicateContext)_localctx).kind = match(IN); - setState(476); + setState(498); match(T__0); - setState(477); + setState(499); expression(); - setState(482); + setState(504); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(478); + setState(500); match(T__2); - setState(479); + setState(501); expression(); } } - setState(484); + setState(506); _errHandler.sync(this); _la = _input.LA(1); } - setState(485); + setState(507); match(T__1); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(488); + setState(510); _la = _input.LA(1); if (_la==NOT) { { - setState(487); + setState(509); match(NOT); } } - setState(490); + setState(512); ((PredicateContext)_localctx).kind = match(IN); - setState(491); + setState(513); match(T__0); - setState(492); + setState(514); query(); - setState(493); + setState(515); match(T__1); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(496); + setState(518); _la = _input.LA(1); if (_la==NOT) { { - setState(495); + setState(517); match(NOT); } } - setState(498); + setState(520); ((PredicateContext)_localctx).kind = match(LIKE); - setState(499); + setState(521); pattern(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(501); + setState(523); _la = _input.LA(1); if (_la==NOT) { { - setState(500); + setState(522); match(NOT); } } - setState(503); + setState(525); ((PredicateContext)_localctx).kind = match(RLIKE); - setState(504); + setState(526); ((PredicateContext)_localctx).regex = string(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(505); + setState(527); match(IS); - setState(507); + setState(529); _la = _input.LA(1); if (_la==NOT) { { - setState(506); + setState(528); match(NOT); } } - setState(509); + setState(531); ((PredicateContext)_localctx).kind = match(NULL); } break; @@ -3225,14 +3302,12 @@ public final PredicateContext predicate() throws RecognitionException { public static class PatternContext extends ParserRuleContext { public StringContext value; - public StringContext escape; - public List string() { - return getRuleContexts(StringContext.class); + public StringContext string() { + return getRuleContext(StringContext.class,0); } - public StringContext string(int i) { - return getRuleContext(StringContext.class,i); + public PatternEscapeContext patternEscape() { + return getRuleContext(PatternEscapeContext.class,0); } - public TerminalNode ESCAPE() { return getToken(SqlBaseParser.ESCAPE, 0); } public PatternContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } @@ -3254,21 +3329,19 @@ public T accept(ParseTreeVisitor visitor) { public final PatternContext pattern() throws RecognitionException { PatternContext _localctx = new PatternContext(_ctx, getState()); - enterRule(_localctx, 48, RULE_pattern); + enterRule(_localctx, 50, RULE_pattern); try { enterOuterAlt(_localctx, 1); { - setState(512); + setState(534); ((PatternContext)_localctx).value = string(); - setState(515); + setState(536); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,75,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { case 1: { - setState(513); - match(ESCAPE); - setState(514); - ((PatternContext)_localctx).escape = string(); + setState(535); + patternEscape(); } break; } @@ -3285,19 +3358,86 @@ public final PatternContext pattern() throws RecognitionException { return _localctx; } - public static class ValueExpressionContext extends ParserRuleContext { - public ValueExpressionContext(ParserRuleContext parent, int invokingState) { + public static class PatternEscapeContext extends ParserRuleContext { + public StringContext escape; + public TerminalNode ESCAPE() { return getToken(SqlBaseParser.ESCAPE, 0); } + public StringContext string() { + return getRuleContext(StringContext.class,0); + } + public TerminalNode ESCAPE_ESC() { return getToken(SqlBaseParser.ESCAPE_ESC, 0); } + public PatternEscapeContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); } - @Override public int getRuleIndex() { return RULE_valueExpression; } - - public ValueExpressionContext() { } - public void copyFrom(ValueExpressionContext ctx) { - super.copyFrom(ctx); + @Override public int getRuleIndex() { return RULE_patternEscape; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterPatternEscape(this); } - } - public static class ValueExpressionDefaultContext extends ValueExpressionContext { - public PrimaryExpressionContext primaryExpression() { + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitPatternEscape(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitPatternEscape(this); + else return visitor.visitChildren(this); + } + } + + public final PatternEscapeContext patternEscape() throws RecognitionException { + PatternEscapeContext _localctx = new PatternEscapeContext(_ctx, getState()); + enterRule(_localctx, 52, RULE_patternEscape); + try { + setState(544); + switch (_input.LA(1)) { + case ESCAPE: + enterOuterAlt(_localctx, 1); + { + setState(538); + match(ESCAPE); + setState(539); + ((PatternEscapeContext)_localctx).escape = string(); + } + break; + case ESCAPE_ESC: + enterOuterAlt(_localctx, 2); + { + setState(540); + match(ESCAPE_ESC); + setState(541); + ((PatternEscapeContext)_localctx).escape = string(); + setState(542); + match(ESC_END); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ValueExpressionContext extends ParserRuleContext { + public ValueExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_valueExpression; } + + public ValueExpressionContext() { } + public void copyFrom(ValueExpressionContext ctx) { + super.copyFrom(ctx); + } + } + public static class ValueExpressionDefaultContext extends ValueExpressionContext { + public PrimaryExpressionContext primaryExpression() { return getRuleContext(PrimaryExpressionContext.class,0); } public ValueExpressionDefaultContext(ValueExpressionContext ctx) { copyFrom(ctx); } @@ -3404,14 +3544,14 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti int _parentState = getState(); ValueExpressionContext _localctx = new ValueExpressionContext(_ctx, _parentState); ValueExpressionContext _prevctx = _localctx; - int _startState = 50; - enterRecursionRule(_localctx, 50, RULE_valueExpression, _p); + int _startState = 54; + enterRecursionRule(_localctx, 54, RULE_valueExpression, _p); int _la; try { int _alt; enterOuterAlt(_localctx, 1); { - setState(521); + setState(550); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -3444,6 +3584,11 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti case TYPE: case TYPES: case VERIFY: + case FUNCTION_ESC: + case DATE_ESC: + case TIME_ESC: + case TIMESTAMP_ESC: + case GUID_ESC: case ASTERISK: case PARAM: case STRING: @@ -3458,7 +3603,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _ctx = _localctx; _prevctx = _localctx; - setState(518); + setState(547); primaryExpression(); } break; @@ -3468,7 +3613,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(519); + setState(548); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3476,7 +3621,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti } else { consume(); } - setState(520); + setState(549); valueExpression(4); } break; @@ -3484,33 +3629,33 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(535); + setState(564); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,78,_ctx); + _alt = getInterpreter().adaptivePredict(_input,80,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(533); + setState(562); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,77,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,79,_ctx) ) { case 1: { _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(523); + setState(552); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(524); + setState(553); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); - if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (ASTERISK - 80)) | (1L << (SLASH - 80)) | (1L << (PERCENT - 80)))) != 0)) ) { + if ( !(((((_la - 88)) & ~0x3f) == 0 && ((1L << (_la - 88)) & ((1L << (ASTERISK - 88)) | (1L << (SLASH - 88)) | (1L << (PERCENT - 88)))) != 0)) ) { ((ArithmeticBinaryContext)_localctx).operator = (Token)_errHandler.recoverInline(this); } else { consume(); } - setState(525); + setState(554); ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; @@ -3519,9 +3664,9 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(526); + setState(555); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(527); + setState(556); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3529,7 +3674,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti } else { consume(); } - setState(528); + setState(557); ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; @@ -3538,20 +3683,20 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState)); ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(529); + setState(558); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(530); + setState(559); comparisonOperator(); - setState(531); + setState(560); ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(537); + setState(566); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,78,_ctx); + _alt = getInterpreter().adaptivePredict(_input,80,_ctx); } } } @@ -3597,13 +3742,8 @@ public T accept(ParseTreeVisitor visitor) { } } public static class CastContext extends PrimaryExpressionContext { - public TerminalNode CAST() { return getToken(SqlBaseParser.CAST, 0); } - public ExpressionContext expression() { - return getRuleContext(ExpressionContext.class,0); - } - public TerminalNode AS() { return getToken(SqlBaseParser.AS, 0); } - public DataTypeContext dataType() { - return getRuleContext(DataTypeContext.class,0); + public CastExpressionContext castExpression() { + return getRuleContext(CastExpressionContext.class,0); } public CastContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } @Override @@ -3659,14 +3799,8 @@ public T accept(ParseTreeVisitor visitor) { } } public static class ExtractContext extends PrimaryExpressionContext { - public IdentifierContext field; - public TerminalNode EXTRACT() { return getToken(SqlBaseParser.EXTRACT, 0); } - public TerminalNode FROM() { return getToken(SqlBaseParser.FROM, 0); } - public ValueExpressionContext valueExpression() { - return getRuleContext(ValueExpressionContext.class,0); - } - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); + public ExtractExpressionContext extractExpression() { + return getRuleContext(ExtractExpressionContext.class,0); } public ExtractContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } @Override @@ -3713,17 +3847,514 @@ public QualifiedNameContext qualifiedName() { public void enterRule(ParseTreeListener listener) { if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterStar(this); } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitStar(this); + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitStar(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitStar(this); + else return visitor.visitChildren(this); + } + } + public static class FunctionContext extends PrimaryExpressionContext { + public FunctionExpressionContext functionExpression() { + return getRuleContext(FunctionExpressionContext.class,0); + } + public FunctionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunction(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunction(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitFunction(this); + else return visitor.visitChildren(this); + } + } + public static class SubqueryExpressionContext extends PrimaryExpressionContext { + public QueryContext query() { + return getRuleContext(QueryContext.class,0); + } + public SubqueryExpressionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterSubqueryExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitSubqueryExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitSubqueryExpression(this); + else return visitor.visitChildren(this); + } + } + + public final PrimaryExpressionContext primaryExpression() throws RecognitionException { + PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); + enterRule(_localctx, 56, RULE_primaryExpression); + int _la; + try { + setState(588); + _errHandler.sync(this); + switch ( getInterpreter().adaptivePredict(_input,82,_ctx) ) { + case 1: + _localctx = new CastContext(_localctx); + enterOuterAlt(_localctx, 1); + { + setState(567); + castExpression(); + } + break; + case 2: + _localctx = new ExtractContext(_localctx); + enterOuterAlt(_localctx, 2); + { + setState(568); + extractExpression(); + } + break; + case 3: + _localctx = new ConstantDefaultContext(_localctx); + enterOuterAlt(_localctx, 3); + { + setState(569); + constant(); + } + break; + case 4: + _localctx = new StarContext(_localctx); + enterOuterAlt(_localctx, 4); + { + setState(570); + match(ASTERISK); + } + break; + case 5: + _localctx = new StarContext(_localctx); + enterOuterAlt(_localctx, 5); + { + setState(574); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + { + setState(571); + qualifiedName(); + setState(572); + match(DOT); + } + } + + setState(576); + match(ASTERISK); + } + break; + case 6: + _localctx = new FunctionContext(_localctx); + enterOuterAlt(_localctx, 6); + { + setState(577); + functionExpression(); + } + break; + case 7: + _localctx = new SubqueryExpressionContext(_localctx); + enterOuterAlt(_localctx, 7); + { + setState(578); + match(T__0); + setState(579); + query(); + setState(580); + match(T__1); + } + break; + case 8: + _localctx = new ColumnReferenceContext(_localctx); + enterOuterAlt(_localctx, 8); + { + setState(582); + identifier(); + } + break; + case 9: + _localctx = new DereferenceContext(_localctx); + enterOuterAlt(_localctx, 9); + { + setState(583); + qualifiedName(); + } + break; + case 10: + _localctx = new ParenthesizedExpressionContext(_localctx); + enterOuterAlt(_localctx, 10); + { + setState(584); + match(T__0); + setState(585); + expression(); + setState(586); + match(T__1); + } + break; + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class CastExpressionContext extends ParserRuleContext { + public CastTemplateContext castTemplate() { + return getRuleContext(CastTemplateContext.class,0); + } + public TerminalNode FUNCTION_ESC() { return getToken(SqlBaseParser.FUNCTION_ESC, 0); } + public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); } + public CastExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_castExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterCastExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitCastExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitCastExpression(this); + else return visitor.visitChildren(this); + } + } + + public final CastExpressionContext castExpression() throws RecognitionException { + CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); + enterRule(_localctx, 58, RULE_castExpression); + try { + setState(595); + switch (_input.LA(1)) { + case CAST: + enterOuterAlt(_localctx, 1); + { + setState(590); + castTemplate(); + } + break; + case FUNCTION_ESC: + enterOuterAlt(_localctx, 2); + { + setState(591); + match(FUNCTION_ESC); + setState(592); + castTemplate(); + setState(593); + match(ESC_END); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class CastTemplateContext extends ParserRuleContext { + public TerminalNode CAST() { return getToken(SqlBaseParser.CAST, 0); } + public ExpressionContext expression() { + return getRuleContext(ExpressionContext.class,0); + } + public TerminalNode AS() { return getToken(SqlBaseParser.AS, 0); } + public DataTypeContext dataType() { + return getRuleContext(DataTypeContext.class,0); + } + public CastTemplateContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_castTemplate; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterCastTemplate(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitCastTemplate(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitCastTemplate(this); + else return visitor.visitChildren(this); + } + } + + public final CastTemplateContext castTemplate() throws RecognitionException { + CastTemplateContext _localctx = new CastTemplateContext(_ctx, getState()); + enterRule(_localctx, 60, RULE_castTemplate); + try { + enterOuterAlt(_localctx, 1); + { + setState(597); + match(CAST); + setState(598); + match(T__0); + setState(599); + expression(); + setState(600); + match(AS); + setState(601); + dataType(); + setState(602); + match(T__1); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtractExpressionContext extends ParserRuleContext { + public ExtractTemplateContext extractTemplate() { + return getRuleContext(ExtractTemplateContext.class,0); + } + public TerminalNode FUNCTION_ESC() { return getToken(SqlBaseParser.FUNCTION_ESC, 0); } + public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); } + public ExtractExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extractExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterExtractExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitExtractExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitExtractExpression(this); + else return visitor.visitChildren(this); + } + } + + public final ExtractExpressionContext extractExpression() throws RecognitionException { + ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); + enterRule(_localctx, 62, RULE_extractExpression); + try { + setState(609); + switch (_input.LA(1)) { + case EXTRACT: + enterOuterAlt(_localctx, 1); + { + setState(604); + extractTemplate(); + } + break; + case FUNCTION_ESC: + enterOuterAlt(_localctx, 2); + { + setState(605); + match(FUNCTION_ESC); + setState(606); + extractTemplate(); + setState(607); + match(ESC_END); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class ExtractTemplateContext extends ParserRuleContext { + public IdentifierContext field; + public TerminalNode EXTRACT() { return getToken(SqlBaseParser.EXTRACT, 0); } + public TerminalNode FROM() { return getToken(SqlBaseParser.FROM, 0); } + public ValueExpressionContext valueExpression() { + return getRuleContext(ValueExpressionContext.class,0); + } + public IdentifierContext identifier() { + return getRuleContext(IdentifierContext.class,0); + } + public ExtractTemplateContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_extractTemplate; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterExtractTemplate(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitExtractTemplate(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitExtractTemplate(this); + else return visitor.visitChildren(this); + } + } + + public final ExtractTemplateContext extractTemplate() throws RecognitionException { + ExtractTemplateContext _localctx = new ExtractTemplateContext(_ctx, getState()); + enterRule(_localctx, 64, RULE_extractTemplate); + try { + enterOuterAlt(_localctx, 1); + { + setState(611); + match(EXTRACT); + setState(612); + match(T__0); + setState(613); + ((ExtractTemplateContext)_localctx).field = identifier(); + setState(614); + match(FROM); + setState(615); + valueExpression(0); + setState(616); + match(T__1); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + + public static class FunctionExpressionContext extends ParserRuleContext { + public FunctionTemplateContext functionTemplate() { + return getRuleContext(FunctionTemplateContext.class,0); + } + public TerminalNode FUNCTION_ESC() { return getToken(SqlBaseParser.FUNCTION_ESC, 0); } + public FunctionExpressionContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_functionExpression; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunctionExpression(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunctionExpression(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitFunctionExpression(this); + else return visitor.visitChildren(this); + } + } + + public final FunctionExpressionContext functionExpression() throws RecognitionException { + FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); + enterRule(_localctx, 66, RULE_functionExpression); + try { + setState(623); + switch (_input.LA(1)) { + case ANALYZE: + case ANALYZED: + case CATALOGS: + case COLUMNS: + case DEBUG: + case EXECUTABLE: + case EXPLAIN: + case FORMAT: + case FUNCTIONS: + case GRAPHVIZ: + case MAPPED: + case OPTIMIZED: + case PARSED: + case PHYSICAL: + case PLAN: + case RLIKE: + case QUERY: + case SCHEMAS: + case SHOW: + case SYS: + case TABLES: + case TEXT: + case TYPE: + case TYPES: + case VERIFY: + case IDENTIFIER: + case DIGIT_IDENTIFIER: + case QUOTED_IDENTIFIER: + case BACKQUOTED_IDENTIFIER: + enterOuterAlt(_localctx, 1); + { + setState(618); + functionTemplate(); + } + break; + case FUNCTION_ESC: + enterOuterAlt(_localctx, 2); + { + setState(619); + match(FUNCTION_ESC); + setState(620); + functionTemplate(); + setState(621); + match(ESC_END); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitStar(this); - else return visitor.visitChildren(this); + finally { + exitRule(); } + return _localctx; } - public static class FunctionCallContext extends PrimaryExpressionContext { + + public static class FunctionTemplateContext extends ParserRuleContext { public IdentifierContext identifier() { return getRuleContext(IdentifierContext.class,0); } @@ -3736,206 +4367,72 @@ public ExpressionContext expression(int i) { public SetQuantifierContext setQuantifier() { return getRuleContext(SetQuantifierContext.class,0); } - public FunctionCallContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } - @Override - public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunctionCall(this); - } - @Override - public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunctionCall(this); - } - @Override - public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitFunctionCall(this); - else return visitor.visitChildren(this); - } - } - public static class SubqueryExpressionContext extends PrimaryExpressionContext { - public QueryContext query() { - return getRuleContext(QueryContext.class,0); + public FunctionTemplateContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); } - public SubqueryExpressionContext(PrimaryExpressionContext ctx) { copyFrom(ctx); } + @Override public int getRuleIndex() { return RULE_functionTemplate; } @Override public void enterRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterSubqueryExpression(this); + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunctionTemplate(this); } @Override public void exitRule(ParseTreeListener listener) { - if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitSubqueryExpression(this); + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunctionTemplate(this); } @Override public T accept(ParseTreeVisitor visitor) { - if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitSubqueryExpression(this); + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitFunctionTemplate(this); else return visitor.visitChildren(this); } } - public final PrimaryExpressionContext primaryExpression() throws RecognitionException { - PrimaryExpressionContext _localctx = new PrimaryExpressionContext(_ctx, getState()); - enterRule(_localctx, 52, RULE_primaryExpression); + public final FunctionTemplateContext functionTemplate() throws RecognitionException { + FunctionTemplateContext _localctx = new FunctionTemplateContext(_ctx, getState()); + enterRule(_localctx, 68, RULE_functionTemplate); int _la; try { - setState(587); - _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,83,_ctx) ) { - case 1: - _localctx = new CastContext(_localctx); - enterOuterAlt(_localctx, 1); - { - setState(538); - match(CAST); - setState(539); - match(T__0); - setState(540); - expression(); - setState(541); - match(AS); - setState(542); - dataType(); - setState(543); - match(T__1); - } - break; - case 2: - _localctx = new ExtractContext(_localctx); - enterOuterAlt(_localctx, 2); - { - setState(545); - match(EXTRACT); - setState(546); - match(T__0); - setState(547); - ((ExtractContext)_localctx).field = identifier(); - setState(548); - match(FROM); - setState(549); - valueExpression(0); - setState(550); - match(T__1); - } - break; - case 3: - _localctx = new ConstantDefaultContext(_localctx); - enterOuterAlt(_localctx, 3); - { - setState(552); - constant(); - } - break; - case 4: - _localctx = new StarContext(_localctx); - enterOuterAlt(_localctx, 4); - { - setState(553); - match(ASTERISK); - } - break; - case 5: - _localctx = new StarContext(_localctx); - enterOuterAlt(_localctx, 5); + enterOuterAlt(_localctx, 1); + { + setState(625); + identifier(); + setState(626); + match(T__0); + setState(638); + _la = _input.LA(1); + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(557); + setState(628); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if (_la==ALL || _la==DISTINCT) { { - setState(554); - qualifiedName(); - setState(555); - match(DOT); + setState(627); + setQuantifier(); } } - setState(559); - match(ASTERISK); - } - break; - case 6: - _localctx = new FunctionCallContext(_localctx); - enterOuterAlt(_localctx, 6); - { - setState(560); - identifier(); - setState(561); - match(T__0); - setState(573); + setState(630); + expression(); + setState(635); + _errHandler.sync(this); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + while (_la==T__2) { { - setState(563); - _la = _input.LA(1); - if (_la==ALL || _la==DISTINCT) { - { - setState(562); - setQuantifier(); - } - } - - setState(565); + { + setState(631); + match(T__2); + setState(632); expression(); - setState(570); - _errHandler.sync(this); - _la = _input.LA(1); - while (_la==T__2) { - { - { - setState(566); - match(T__2); - setState(567); - expression(); - } - } - setState(572); - _errHandler.sync(this); - _la = _input.LA(1); } } + setState(637); + _errHandler.sync(this); + _la = _input.LA(1); } - - setState(575); - match(T__1); - } - break; - case 7: - _localctx = new SubqueryExpressionContext(_localctx); - enterOuterAlt(_localctx, 7); - { - setState(577); - match(T__0); - setState(578); - query(); - setState(579); - match(T__1); - } - break; - case 8: - _localctx = new ColumnReferenceContext(_localctx); - enterOuterAlt(_localctx, 8); - { - setState(581); - identifier(); - } - break; - case 9: - _localctx = new DereferenceContext(_localctx); - enterOuterAlt(_localctx, 9); - { - setState(582); - qualifiedName(); - } - break; - case 10: - _localctx = new ParenthesizedExpressionContext(_localctx); - enterOuterAlt(_localctx, 10); - { - setState(583); - match(T__0); - setState(584); - expression(); - setState(585); - match(T__1); } - break; + } + + setState(640); + match(T__1); } } catch (RecognitionException re) { @@ -3977,6 +4474,27 @@ public T accept(ParseTreeVisitor visitor) { else return visitor.visitChildren(this); } } + public static class TimestampEscapedLiteralContext extends ConstantContext { + public TerminalNode TIMESTAMP_ESC() { return getToken(SqlBaseParser.TIMESTAMP_ESC, 0); } + public StringContext string() { + return getRuleContext(StringContext.class,0); + } + public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); } + public TimestampEscapedLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterTimestampEscapedLiteral(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitTimestampEscapedLiteral(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitTimestampEscapedLiteral(this); + else return visitor.visitChildren(this); + } + } public static class StringLiteralContext extends ConstantContext { public List STRING() { return getTokens(SqlBaseParser.STRING); } public TerminalNode STRING(int i) { @@ -4014,6 +4532,48 @@ public T accept(ParseTreeVisitor visitor) { else return visitor.visitChildren(this); } } + public static class TimeEscapedLiteralContext extends ConstantContext { + public TerminalNode TIME_ESC() { return getToken(SqlBaseParser.TIME_ESC, 0); } + public StringContext string() { + return getRuleContext(StringContext.class,0); + } + public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); } + public TimeEscapedLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterTimeEscapedLiteral(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitTimeEscapedLiteral(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitTimeEscapedLiteral(this); + else return visitor.visitChildren(this); + } + } + public static class DateEscapedLiteralContext extends ConstantContext { + public TerminalNode DATE_ESC() { return getToken(SqlBaseParser.DATE_ESC, 0); } + public StringContext string() { + return getRuleContext(StringContext.class,0); + } + public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); } + public DateEscapedLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterDateEscapedLiteral(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitDateEscapedLiteral(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitDateEscapedLiteral(this); + else return visitor.visitChildren(this); + } + } public static class NumericLiteralContext extends ConstantContext { public NumberContext number() { return getRuleContext(NumberContext.class,0); @@ -4052,19 +4612,40 @@ public T accept(ParseTreeVisitor visitor) { else return visitor.visitChildren(this); } } + public static class GuidEscapedLiteralContext extends ConstantContext { + public TerminalNode GUID_ESC() { return getToken(SqlBaseParser.GUID_ESC, 0); } + public StringContext string() { + return getRuleContext(StringContext.class,0); + } + public TerminalNode ESC_END() { return getToken(SqlBaseParser.ESC_END, 0); } + public GuidEscapedLiteralContext(ConstantContext ctx) { copyFrom(ctx); } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterGuidEscapedLiteral(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitGuidEscapedLiteral(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitGuidEscapedLiteral(this); + else return visitor.visitChildren(this); + } + } public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 54, RULE_constant); + enterRule(_localctx, 70, RULE_constant); try { int _alt; - setState(598); + setState(667); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(589); + setState(642); match(NULL); } break; @@ -4073,7 +4654,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(590); + setState(643); number(); } break; @@ -4082,7 +4663,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(591); + setState(644); booleanValue(); } break; @@ -4090,7 +4671,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(593); + setState(646); _errHandler.sync(this); _alt = 1; do { @@ -4098,7 +4679,7 @@ public final ConstantContext constant() throws RecognitionException { case 1: { { - setState(592); + setState(645); match(STRING); } } @@ -4106,9 +4687,9 @@ public final ConstantContext constant() throws RecognitionException { default: throw new NoViableAltException(this); } - setState(595); + setState(648); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,84,_ctx); + _alt = getInterpreter().adaptivePredict(_input,89,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; @@ -4116,10 +4697,58 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(597); + setState(650); match(PARAM); } break; + case DATE_ESC: + _localctx = new DateEscapedLiteralContext(_localctx); + enterOuterAlt(_localctx, 6); + { + setState(651); + match(DATE_ESC); + setState(652); + string(); + setState(653); + match(ESC_END); + } + break; + case TIME_ESC: + _localctx = new TimeEscapedLiteralContext(_localctx); + enterOuterAlt(_localctx, 7); + { + setState(655); + match(TIME_ESC); + setState(656); + string(); + setState(657); + match(ESC_END); + } + break; + case TIMESTAMP_ESC: + _localctx = new TimestampEscapedLiteralContext(_localctx); + enterOuterAlt(_localctx, 8); + { + setState(659); + match(TIMESTAMP_ESC); + setState(660); + string(); + setState(661); + match(ESC_END); + } + break; + case GUID_ESC: + _localctx = new GuidEscapedLiteralContext(_localctx); + enterOuterAlt(_localctx, 9); + { + setState(663); + match(GUID_ESC); + setState(664); + string(); + setState(665); + match(ESC_END); + } + break; default: throw new NoViableAltException(this); } @@ -4163,14 +4792,14 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 56, RULE_comparisonOperator); + enterRule(_localctx, 72, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(600); + setState(669); _la = _input.LA(1); - if ( !(((((_la - 72)) & ~0x3f) == 0 && ((1L << (_la - 72)) & ((1L << (EQ - 72)) | (1L << (NEQ - 72)) | (1L << (LT - 72)) | (1L << (LTE - 72)) | (1L << (GT - 72)) | (1L << (GTE - 72)))) != 0)) ) { + if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (EQ - 80)) | (1L << (NEQ - 80)) | (1L << (LT - 80)) | (1L << (LTE - 80)) | (1L << (GT - 80)) | (1L << (GTE - 80)))) != 0)) ) { _errHandler.recoverInline(this); } else { consume(); @@ -4212,12 +4841,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 58, RULE_booleanValue); + enterRule(_localctx, 74, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(602); + setState(671); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4270,12 +4899,12 @@ public T accept(ParseTreeVisitor visitor) { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 60, RULE_dataType); + enterRule(_localctx, 76, RULE_dataType); try { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(604); + setState(673); identifier(); } } @@ -4322,30 +4951,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 62, RULE_qualifiedName); + enterRule(_localctx, 78, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(611); + setState(680); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,86,_ctx); + _alt = getInterpreter().adaptivePredict(_input,91,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(606); + setState(675); identifier(); - setState(607); + setState(676); match(DOT); } } } - setState(613); + setState(682); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,86,_ctx); + _alt = getInterpreter().adaptivePredict(_input,91,_ctx); } - setState(614); + setState(683); identifier(); } } @@ -4388,15 +5017,15 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 64, RULE_identifier); + enterRule(_localctx, 80, RULE_identifier); try { - setState(618); + setState(687); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(616); + setState(685); quoteIdentifier(); } break; @@ -4429,7 +5058,7 @@ public final IdentifierContext identifier() throws RecognitionException { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(617); + setState(686); unquoteIdentifier(); } break; @@ -4479,46 +5108,46 @@ public T accept(ParseTreeVisitor visitor) { public final TableIdentifierContext tableIdentifier() throws RecognitionException { TableIdentifierContext _localctx = new TableIdentifierContext(_ctx, getState()); - enterRule(_localctx, 66, RULE_tableIdentifier); + enterRule(_localctx, 82, RULE_tableIdentifier); int _la; try { - setState(632); + setState(701); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,90,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(623); + setState(692); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(620); + setState(689); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(621); + setState(690); match(T__3); } } - setState(625); + setState(694); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(629); + setState(698); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,89,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,94,_ctx) ) { case 1: { - setState(626); + setState(695); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(627); + setState(696); match(T__3); } break; } - setState(631); + setState(700); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -4583,15 +5212,15 @@ public T accept(ParseTreeVisitor visitor) { public final QuoteIdentifierContext quoteIdentifier() throws RecognitionException { QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 68, RULE_quoteIdentifier); + enterRule(_localctx, 84, RULE_quoteIdentifier); try { - setState(636); + setState(705); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(634); + setState(703); match(QUOTED_IDENTIFIER); } break; @@ -4599,7 +5228,7 @@ public final QuoteIdentifierContext quoteIdentifier() throws RecognitionExceptio _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(635); + setState(704); match(BACKQUOTED_IDENTIFIER); } break; @@ -4669,15 +5298,15 @@ public T accept(ParseTreeVisitor visitor) { public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionException { UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_unquoteIdentifier); + enterRule(_localctx, 86, RULE_unquoteIdentifier); try { - setState(641); + setState(710); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(638); + setState(707); match(IDENTIFIER); } break; @@ -4709,7 +5338,7 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(639); + setState(708); nonReserved(); } break; @@ -4717,7 +5346,7 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(640); + setState(709); match(DIGIT_IDENTIFIER); } break; @@ -4784,15 +5413,15 @@ public T accept(ParseTreeVisitor visitor) { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_number); + enterRule(_localctx, 88, RULE_number); try { - setState(645); + setState(714); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(643); + setState(712); match(DECIMAL_VALUE); } break; @@ -4800,7 +5429,7 @@ public final NumberContext number() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(644); + setState(713); match(INTEGER_VALUE); } break; @@ -4843,12 +5472,12 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_string); + enterRule(_localctx, 90, RULE_string); int _la; try { enterOuterAlt(_localctx, 1); { - setState(647); + setState(716); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -4915,12 +5544,12 @@ public T accept(ParseTreeVisitor visitor) { public final NonReservedContext nonReserved() throws RecognitionException { NonReservedContext _localctx = new NonReservedContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_nonReserved); + enterRule(_localctx, 92, RULE_nonReserved); int _la; try { enterOuterAlt(_localctx, 1); { - setState(649); + setState(718); _la = _input.LA(1); if ( !(((((_la - 6)) & ~0x3f) == 0 && ((1L << (_la - 6)) & ((1L << (ANALYZE - 6)) | (1L << (ANALYZED - 6)) | (1L << (CATALOGS - 6)) | (1L << (COLUMNS - 6)) | (1L << (DEBUG - 6)) | (1L << (EXECUTABLE - 6)) | (1L << (EXPLAIN - 6)) | (1L << (FORMAT - 6)) | (1L << (FUNCTIONS - 6)) | (1L << (GRAPHVIZ - 6)) | (1L << (MAPPED - 6)) | (1L << (OPTIMIZED - 6)) | (1L << (PARSED - 6)) | (1L << (PHYSICAL - 6)) | (1L << (PLAN - 6)) | (1L << (RLIKE - 6)) | (1L << (QUERY - 6)) | (1L << (SCHEMAS - 6)) | (1L << (SHOW - 6)) | (1L << (SYS - 6)) | (1L << (TABLES - 6)) | (1L << (TEXT - 6)) | (1L << (TYPE - 6)) | (1L << (TYPES - 6)) | (1L << (VERIFY - 6)))) != 0)) ) { _errHandler.recoverInline(this); @@ -4942,9 +5571,9 @@ public final NonReservedContext nonReserved() throws RecognitionException { public boolean sempred(RuleContext _localctx, int ruleIndex, int predIndex) { switch (ruleIndex) { - case 21: + case 22: return booleanExpression_sempred((BooleanExpressionContext)_localctx, predIndex); - case 25: + case 27: return valueExpression_sempred((ValueExpressionContext)_localctx, predIndex); } return true; @@ -4971,266 +5600,291 @@ private boolean valueExpression_sempred(ValueExpressionContext _localctx, int pr } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3d\u028e\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02d3\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ - "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\3\2\3\2\3\2\3\3\3\3"+ - "\3\3\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4`\n\4\f\4\16\4c\13\4\3\4\5"+ - "\4f\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4o\n\4\f\4\16\4r\13\4\3\4\5\4u\n"+ - "\4\3\4\3\4\3\4\3\4\5\4{\n\4\3\4\5\4~\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3"+ - "\4\3\4\5\4\u0089\n\4\3\4\5\4\u008c\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4"+ - "\5\4\u0096\n\4\3\4\5\4\u0099\n\4\3\4\5\4\u009c\n\4\3\4\5\4\u009f\n\4\3"+ - "\4\3\4\3\4\3\4\7\4\u00a5\n\4\f\4\16\4\u00a8\13\4\5\4\u00aa\n\4\3\4\3\4"+ - "\3\4\3\4\5\4\u00b0\n\4\3\4\3\4\5\4\u00b4\n\4\3\4\5\4\u00b7\n\4\3\4\5\4"+ - "\u00ba\n\4\3\4\5\4\u00bd\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u00c4\n\4\3\5\3\5"+ - "\3\5\3\5\7\5\u00ca\n\5\f\5\16\5\u00cd\13\5\5\5\u00cf\n\5\3\5\3\5\3\6\3"+ - "\6\3\6\3\6\3\6\3\6\7\6\u00d9\n\6\f\6\16\6\u00dc\13\6\5\6\u00de\n\6\3\6"+ - "\3\6\5\6\u00e2\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u00e9\n\7\3\b\3\b\5\b\u00ed"+ - "\n\b\3\t\3\t\5\t\u00f1\n\t\3\t\3\t\3\t\7\t\u00f6\n\t\f\t\16\t\u00f9\13"+ - "\t\3\t\5\t\u00fc\n\t\3\t\3\t\5\t\u0100\n\t\3\t\3\t\3\t\5\t\u0105\n\t\3"+ - "\t\3\t\5\t\u0109\n\t\3\n\3\n\3\n\3\n\7\n\u010f\n\n\f\n\16\n\u0112\13\n"+ - "\3\13\5\13\u0115\n\13\3\13\3\13\3\13\7\13\u011a\n\13\f\13\16\13\u011d"+ - "\13\13\3\f\3\f\3\r\3\r\3\r\3\r\7\r\u0125\n\r\f\r\16\r\u0128\13\r\5\r\u012a"+ - "\n\r\3\r\3\r\5\r\u012e\n\r\3\16\3\16\3\16\3\16\3\16\3\16\3\17\3\17\3\20"+ - "\3\20\5\20\u013a\n\20\3\20\5\20\u013d\n\20\3\21\3\21\7\21\u0141\n\21\f"+ - "\21\16\21\u0144\13\21\3\22\3\22\3\22\3\22\5\22\u014a\n\22\3\22\3\22\3"+ - "\22\3\22\3\22\5\22\u0151\n\22\3\23\5\23\u0154\n\23\3\23\3\23\5\23\u0158"+ - "\n\23\3\23\3\23\5\23\u015c\n\23\3\23\3\23\5\23\u0160\n\23\5\23\u0162\n"+ - "\23\3\24\3\24\3\24\3\24\3\24\3\24\3\24\7\24\u016b\n\24\f\24\16\24\u016e"+ - "\13\24\3\24\3\24\5\24\u0172\n\24\3\25\3\25\5\25\u0176\n\25\3\25\5\25\u0179"+ - "\n\25\3\25\3\25\3\25\3\25\5\25\u017f\n\25\3\25\5\25\u0182\n\25\3\25\3"+ - "\25\3\25\3\25\5\25\u0188\n\25\3\25\5\25\u018b\n\25\5\25\u018d\n\25\3\26"+ - "\3\26\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27\3\27"+ - "\7\27\u019e\n\27\f\27\16\27\u01a1\13\27\3\27\3\27\3\27\3\27\3\27\3\27"+ - "\3\27\3\27\3\27\7\27\u01ac\n\27\f\27\16\27\u01af\13\27\3\27\3\27\3\27"+ - "\3\27\3\27\3\27\3\27\3\27\3\27\7\27\u01ba\n\27\f\27\16\27\u01bd\13\27"+ - "\3\27\3\27\3\27\5\27\u01c2\n\27\3\27\3\27\3\27\3\27\3\27\3\27\7\27\u01ca"+ - "\n\27\f\27\16\27\u01cd\13\27\3\30\3\30\5\30\u01d1\n\30\3\31\5\31\u01d4"+ - "\n\31\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u01dc\n\31\3\31\3\31\3\31\3\31"+ - "\3\31\7\31\u01e3\n\31\f\31\16\31\u01e6\13\31\3\31\3\31\3\31\5\31\u01eb"+ - "\n\31\3\31\3\31\3\31\3\31\3\31\3\31\5\31\u01f3\n\31\3\31\3\31\3\31\5\31"+ - "\u01f8\n\31\3\31\3\31\3\31\3\31\5\31\u01fe\n\31\3\31\5\31\u0201\n\31\3"+ - "\32\3\32\3\32\5\32\u0206\n\32\3\33\3\33\3\33\3\33\5\33\u020c\n\33\3\33"+ - "\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\3\33\7\33\u0218\n\33\f\33\16"+ - "\33\u021b\13\33\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34"+ - "\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\5\34\u0230\n\34\3\34\3\34\3\34"+ - "\3\34\5\34\u0236\n\34\3\34\3\34\3\34\7\34\u023b\n\34\f\34\16\34\u023e"+ - "\13\34\5\34\u0240\n\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3\34\3"+ - "\34\3\34\3\34\5\34\u024e\n\34\3\35\3\35\3\35\3\35\6\35\u0254\n\35\r\35"+ - "\16\35\u0255\3\35\5\35\u0259\n\35\3\36\3\36\3\37\3\37\3 \3 \3!\3!\3!\7"+ - "!\u0264\n!\f!\16!\u0267\13!\3!\3!\3\"\3\"\5\"\u026d\n\"\3#\3#\3#\5#\u0272"+ - "\n#\3#\3#\3#\3#\5#\u0278\n#\3#\5#\u027b\n#\3$\3$\5$\u027f\n$\3%\3%\3%"+ - "\5%\u0284\n%\3&\3&\5&\u0288\n&\3\'\3\'\3(\3(\3(\2\4,\64)\2\4\6\b\n\f\16"+ - "\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLN\2\20\b\2\7\7"+ - "\t\t\31\31,,\62\62\66\66\4\2\"\"BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26"+ - "\4\2\7\7YY\4\2\r\r\25\25\4\2\7\7\27\27\3\2PQ\3\2RT\3\2JO\4\2\35\35CC\3"+ - "\2WX\20\2\b\t\22\24\31\31\33\33\36\36!\",,\62\62\668:<>?ABDEGG\u02e8\2"+ - "P\3\2\2\2\4S\3\2\2\2\6\u00c3\3\2\2\2\b\u00ce\3\2\2\2\n\u00d2\3\2\2\2\f"+ - "\u00e8\3\2\2\2\16\u00ea\3\2\2\2\20\u00ee\3\2\2\2\22\u010a\3\2\2\2\24\u0114"+ - "\3\2\2\2\26\u011e\3\2\2\2\30\u012d\3\2\2\2\32\u012f\3\2\2\2\34\u0135\3"+ - "\2\2\2\36\u0137\3\2\2\2 \u013e\3\2\2\2\"\u0150\3\2\2\2$\u0161\3\2\2\2"+ - "&\u0171\3\2\2\2(\u018c\3\2\2\2*\u018e\3\2\2\2,\u01c1\3\2\2\2.\u01ce\3"+ - "\2\2\2\60\u0200\3\2\2\2\62\u0202\3\2\2\2\64\u020b\3\2\2\2\66\u024d\3\2"+ - "\2\28\u0258\3\2\2\2:\u025a\3\2\2\2<\u025c\3\2\2\2>\u025e\3\2\2\2@\u0265"+ - "\3\2\2\2B\u026c\3\2\2\2D\u027a\3\2\2\2F\u027e\3\2\2\2H\u0283\3\2\2\2J"+ - "\u0287\3\2\2\2L\u0289\3\2\2\2N\u028b\3\2\2\2PQ\5\6\4\2QR\7\2\2\3R\3\3"+ - "\2\2\2ST\5*\26\2TU\7\2\2\3U\5\3\2\2\2V\u00c4\5\b\5\2We\7\33\2\2Xa\7\3"+ - "\2\2YZ\78\2\2Z`\t\2\2\2[\\\7\36\2\2\\`\t\3\2\2]^\7G\2\2^`\5<\37\2_Y\3"+ - "\2\2\2_[\3\2\2\2_]\3\2\2\2`c\3\2\2\2a_\3\2\2\2ab\3\2\2\2bd\3\2\2\2ca\3"+ - "\2\2\2df\7\4\2\2eX\3\2\2\2ef\3\2\2\2fg\3\2\2\2g\u00c4\5\6\4\2ht\7\24\2"+ - "\2ip\7\3\2\2jk\78\2\2ko\t\4\2\2lm\7\36\2\2mo\t\3\2\2nj\3\2\2\2nl\3\2\2"+ - "\2or\3\2\2\2pn\3\2\2\2pq\3\2\2\2qs\3\2\2\2rp\3\2\2\2su\7\4\2\2ti\3\2\2"+ - "\2tu\3\2\2\2uv\3\2\2\2v\u00c4\5\6\4\2wx\7>\2\2x}\7A\2\2y{\7*\2\2zy\3\2"+ - "\2\2z{\3\2\2\2{|\3\2\2\2|~\5\62\32\2}z\3\2\2\2}~\3\2\2\2~\u00c4\3\2\2"+ - "\2\177\u0080\7>\2\2\u0080\u0081\7\23\2\2\u0081\u0082\t\5\2\2\u0082\u00c4"+ - "\5D#\2\u0083\u0084\t\6\2\2\u0084\u00c4\5D#\2\u0085\u0086\7>\2\2\u0086"+ - "\u008b\7!\2\2\u0087\u0089\7*\2\2\u0088\u0087\3\2\2\2\u0088\u0089\3\2\2"+ - "\2\u0089\u008a\3\2\2\2\u008a\u008c\5\62\32\2\u008b\u0088\3\2\2\2\u008b"+ - "\u008c\3\2\2\2\u008c\u00c4\3\2\2\2\u008d\u008e\7>\2\2\u008e\u00c4\7<\2"+ - "\2\u008f\u0090\7?\2\2\u0090\u00c4\7\22\2\2\u0091\u0092\7?\2\2\u0092\u0098"+ - "\7A\2\2\u0093\u0095\7\21\2\2\u0094\u0096\7*\2\2\u0095\u0094\3\2\2\2\u0095"+ - "\u0096\3\2\2\2\u0096\u0097\3\2\2\2\u0097\u0099\5\62\32\2\u0098\u0093\3"+ - "\2\2\2\u0098\u0099\3\2\2\2\u0099\u009e\3\2\2\2\u009a\u009c\7*\2\2\u009b"+ - "\u009a\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009d\3\2\2\2\u009d\u009f\5\62"+ - "\32\2\u009e\u009b\3\2\2\2\u009e\u009f\3\2\2\2\u009f\u00a9\3\2\2\2\u00a0"+ - "\u00a1\7D\2\2\u00a1\u00a6\5L\'\2\u00a2\u00a3\7\5\2\2\u00a3\u00a5\5L\'"+ - "\2\u00a4\u00a2\3\2\2\2\u00a5\u00a8\3\2\2\2\u00a6\u00a4\3\2\2\2\u00a6\u00a7"+ - "\3\2\2\2\u00a7\u00aa\3\2\2\2\u00a8\u00a6\3\2\2\2\u00a9\u00a0\3\2\2\2\u00a9"+ - "\u00aa\3\2\2\2\u00aa\u00c4\3\2\2\2\u00ab\u00ac\7?\2\2\u00ac\u00af\7\23"+ - "\2\2\u00ad\u00ae\7\21\2\2\u00ae\u00b0\5L\'\2\u00af\u00ad\3\2\2\2\u00af"+ - "\u00b0\3\2\2\2\u00b0\u00b6\3\2\2\2\u00b1\u00b3\7@\2\2\u00b2\u00b4\7*\2"+ - "\2\u00b3\u00b2\3\2\2\2\u00b3\u00b4\3\2\2\2\u00b4\u00b5\3\2\2\2\u00b5\u00b7"+ - "\5\62\32\2\u00b6\u00b1\3\2\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00bc\3\2\2\2"+ - "\u00b8\u00ba\7*\2\2\u00b9\u00b8\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00bb"+ - "\3\2\2\2\u00bb\u00bd\5\62\32\2\u00bc\u00b9\3\2\2\2\u00bc\u00bd\3\2\2\2"+ - "\u00bd\u00c4\3\2\2\2\u00be\u00bf\7?\2\2\u00bf\u00c4\7E\2\2\u00c0\u00c1"+ - "\7?\2\2\u00c1\u00c2\7@\2\2\u00c2\u00c4\7E\2\2\u00c3V\3\2\2\2\u00c3W\3"+ - "\2\2\2\u00c3h\3\2\2\2\u00c3w\3\2\2\2\u00c3\177\3\2\2\2\u00c3\u0083\3\2"+ - "\2\2\u00c3\u0085\3\2\2\2\u00c3\u008d\3\2\2\2\u00c3\u008f\3\2\2\2\u00c3"+ - "\u0091\3\2\2\2\u00c3\u00ab\3\2\2\2\u00c3\u00be\3\2\2\2\u00c3\u00c0\3\2"+ - "\2\2\u00c4\7\3\2\2\2\u00c5\u00c6\7I\2\2\u00c6\u00cb\5\32\16\2\u00c7\u00c8"+ - "\7\5\2\2\u00c8\u00ca\5\32\16\2\u00c9\u00c7\3\2\2\2\u00ca\u00cd\3\2\2\2"+ - "\u00cb\u00c9\3\2\2\2\u00cb\u00cc\3\2\2\2\u00cc\u00cf\3\2\2\2\u00cd\u00cb"+ - "\3\2\2\2\u00ce\u00c5\3\2\2\2\u00ce\u00cf\3\2\2\2\u00cf\u00d0\3\2\2\2\u00d0"+ - "\u00d1\5\n\6\2\u00d1\t\3\2\2\2\u00d2\u00dd\5\f\7\2\u00d3\u00d4\7\64\2"+ - "\2\u00d4\u00d5\7\17\2\2\u00d5\u00da\5\16\b\2\u00d6\u00d7\7\5\2\2\u00d7"+ - "\u00d9\5\16\b\2\u00d8\u00d6\3\2\2\2\u00d9\u00dc\3\2\2\2\u00da\u00d8\3"+ - "\2\2\2\u00da\u00db\3\2\2\2\u00db\u00de\3\2\2\2\u00dc\u00da\3\2\2\2\u00dd"+ - "\u00d3\3\2\2\2\u00dd\u00de\3\2\2\2\u00de\u00e1\3\2\2\2\u00df\u00e0\7+"+ - "\2\2\u00e0\u00e2\t\7\2\2\u00e1\u00df\3\2\2\2\u00e1\u00e2\3\2\2\2\u00e2"+ - "\13\3\2\2\2\u00e3\u00e9\5\20\t\2\u00e4\u00e5\7\3\2\2\u00e5\u00e6\5\n\6"+ - "\2\u00e6\u00e7\7\4\2\2\u00e7\u00e9\3\2\2\2\u00e8\u00e3\3\2\2\2\u00e8\u00e4"+ - "\3\2\2\2\u00e9\r\3\2\2\2\u00ea\u00ec\5*\26\2\u00eb\u00ed\t\b\2\2\u00ec"+ - "\u00eb\3\2\2\2\u00ec\u00ed\3\2\2\2\u00ed\17\3\2\2\2\u00ee\u00f0\7=\2\2"+ - "\u00ef\u00f1\5\34\17\2\u00f0\u00ef\3\2\2\2\u00f0\u00f1\3\2\2\2\u00f1\u00f2"+ - "\3\2\2\2\u00f2\u00f7\5\36\20\2\u00f3\u00f4\7\5\2\2\u00f4\u00f6\5\36\20"+ - "\2\u00f5\u00f3\3\2\2\2\u00f6\u00f9\3\2\2\2\u00f7\u00f5\3\2\2\2\u00f7\u00f8"+ - "\3\2\2\2\u00f8\u00fb\3\2\2\2\u00f9\u00f7\3\2\2\2\u00fa\u00fc\5\22\n\2"+ - "\u00fb\u00fa\3\2\2\2\u00fb\u00fc\3\2\2\2\u00fc\u00ff\3\2\2\2\u00fd\u00fe"+ - "\7H\2\2\u00fe\u0100\5,\27\2\u00ff\u00fd\3\2\2\2\u00ff\u0100\3\2\2\2\u0100"+ - "\u0104\3\2\2\2\u0101\u0102\7#\2\2\u0102\u0103\7\17\2\2\u0103\u0105\5\24"+ - "\13\2\u0104\u0101\3\2\2\2\u0104\u0105\3\2\2\2\u0105\u0108\3\2\2\2\u0106"+ - "\u0107\7$\2\2\u0107\u0109\5,\27\2\u0108\u0106\3\2\2\2\u0108\u0109\3\2"+ - "\2\2\u0109\21\3\2\2\2\u010a\u010b\7\37\2\2\u010b\u0110\5 \21\2\u010c\u010d"+ - "\7\5\2\2\u010d\u010f\5 \21\2\u010e\u010c\3\2\2\2\u010f\u0112\3\2\2\2\u0110"+ - "\u010e\3\2\2\2\u0110\u0111\3\2\2\2\u0111\23\3\2\2\2\u0112\u0110\3\2\2"+ - "\2\u0113\u0115\5\34\17\2\u0114\u0113\3\2\2\2\u0114\u0115\3\2\2\2\u0115"+ - "\u0116\3\2\2\2\u0116\u011b\5\26\f\2\u0117\u0118\7\5\2\2\u0118\u011a\5"+ - "\26\f\2\u0119\u0117\3\2\2\2\u011a\u011d\3\2\2\2\u011b\u0119\3\2\2\2\u011b"+ - "\u011c\3\2\2\2\u011c\25\3\2\2\2\u011d\u011b\3\2\2\2\u011e\u011f\5\30\r"+ - "\2\u011f\27\3\2\2\2\u0120\u0129\7\3\2\2\u0121\u0126\5*\26\2\u0122\u0123"+ - "\7\5\2\2\u0123\u0125\5*\26\2\u0124\u0122\3\2\2\2\u0125\u0128\3\2\2\2\u0126"+ - "\u0124\3\2\2\2\u0126\u0127\3\2\2\2\u0127\u012a\3\2\2\2\u0128\u0126\3\2"+ - "\2\2\u0129\u0121\3\2\2\2\u0129\u012a\3\2\2\2\u012a\u012b\3\2\2\2\u012b"+ - "\u012e\7\4\2\2\u012c\u012e\5*\26\2\u012d\u0120\3\2\2\2\u012d\u012c\3\2"+ - "\2\2\u012e\31\3\2\2\2\u012f\u0130\5B\"\2\u0130\u0131\7\f\2\2\u0131\u0132"+ - "\7\3\2\2\u0132\u0133\5\n\6\2\u0133\u0134\7\4\2\2\u0134\33\3\2\2\2\u0135"+ - "\u0136\t\t\2\2\u0136\35\3\2\2\2\u0137\u013c\5*\26\2\u0138\u013a\7\f\2"+ - "\2\u0139\u0138\3\2\2\2\u0139\u013a\3\2\2\2\u013a\u013b\3\2\2\2\u013b\u013d"+ - "\5B\"\2\u013c\u0139\3\2\2\2\u013c\u013d\3\2\2\2\u013d\37\3\2\2\2\u013e"+ - "\u0142\5(\25\2\u013f\u0141\5\"\22\2\u0140\u013f\3\2\2\2\u0141\u0144\3"+ - "\2\2\2\u0142\u0140\3\2\2\2\u0142\u0143\3\2\2\2\u0143!\3\2\2\2\u0144\u0142"+ - "\3\2\2\2\u0145\u0146\5$\23\2\u0146\u0147\7(\2\2\u0147\u0149\5(\25\2\u0148"+ - "\u014a\5&\24\2\u0149\u0148\3\2\2\2\u0149\u014a\3\2\2\2\u014a\u0151\3\2"+ - "\2\2\u014b\u014c\7.\2\2\u014c\u014d\5$\23\2\u014d\u014e\7(\2\2\u014e\u014f"+ - "\5(\25\2\u014f\u0151\3\2\2\2\u0150\u0145\3\2\2\2\u0150\u014b\3\2\2\2\u0151"+ - "#\3\2\2\2\u0152\u0154\7&\2\2\u0153\u0152\3\2\2\2\u0153\u0154\3\2\2\2\u0154"+ - "\u0162\3\2\2\2\u0155\u0157\7)\2\2\u0156\u0158\7\65\2\2\u0157\u0156\3\2"+ - "\2\2\u0157\u0158\3\2\2\2\u0158\u0162\3\2\2\2\u0159\u015b\79\2\2\u015a"+ - "\u015c\7\65\2\2\u015b\u015a\3\2\2\2\u015b\u015c\3\2\2\2\u015c\u0162\3"+ - "\2\2\2\u015d\u015f\7 \2\2\u015e\u0160\7\65\2\2\u015f\u015e\3\2\2\2\u015f"+ - "\u0160\3\2\2\2\u0160\u0162\3\2\2\2\u0161\u0153\3\2\2\2\u0161\u0155\3\2"+ - "\2\2\u0161\u0159\3\2\2\2\u0161\u015d\3\2\2\2\u0162%\3\2\2\2\u0163\u0164"+ - "\7\61\2\2\u0164\u0172\5,\27\2\u0165\u0166\7F\2\2\u0166\u0167\7\3\2\2\u0167"+ - "\u016c\5B\"\2\u0168\u0169\7\5\2\2\u0169\u016b\5B\"\2\u016a\u0168\3\2\2"+ - "\2\u016b\u016e\3\2\2\2\u016c\u016a\3\2\2\2\u016c\u016d\3\2\2\2\u016d\u016f"+ - "\3\2\2\2\u016e\u016c\3\2\2\2\u016f\u0170\7\4\2\2\u0170\u0172\3\2\2\2\u0171"+ - "\u0163\3\2\2\2\u0171\u0165\3\2\2\2\u0172\'\3\2\2\2\u0173\u0178\5D#\2\u0174"+ - "\u0176\7\f\2\2\u0175\u0174\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0177\3\2"+ - "\2\2\u0177\u0179\5@!\2\u0178\u0175\3\2\2\2\u0178\u0179\3\2\2\2\u0179\u018d"+ - "\3\2\2\2\u017a\u017b\7\3\2\2\u017b\u017c\5\n\6\2\u017c\u0181\7\4\2\2\u017d"+ - "\u017f\7\f\2\2\u017e\u017d\3\2\2\2\u017e\u017f\3\2\2\2\u017f\u0180\3\2"+ - "\2\2\u0180\u0182\5@!\2\u0181\u017e\3\2\2\2\u0181\u0182\3\2\2\2\u0182\u018d"+ - "\3\2\2\2\u0183\u0184\7\3\2\2\u0184\u0185\5 \21\2\u0185\u018a\7\4\2\2\u0186"+ - "\u0188\7\f\2\2\u0187\u0186\3\2\2\2\u0187\u0188\3\2\2\2\u0188\u0189\3\2"+ - "\2\2\u0189\u018b\5@!\2\u018a\u0187\3\2\2\2\u018a\u018b\3\2\2\2\u018b\u018d"+ - "\3\2\2\2\u018c\u0173\3\2\2\2\u018c\u017a\3\2\2\2\u018c\u0183\3\2\2\2\u018d"+ - ")\3\2\2\2\u018e\u018f\5,\27\2\u018f+\3\2\2\2\u0190\u0191\b\27\1\2\u0191"+ - "\u0192\7/\2\2\u0192\u01c2\5,\27\n\u0193\u0194\7\32\2\2\u0194\u0195\7\3"+ - "\2\2\u0195\u0196\5\b\5\2\u0196\u0197\7\4\2\2\u0197\u01c2\3\2\2\2\u0198"+ - "\u0199\7;\2\2\u0199\u019a\7\3\2\2\u019a\u019f\5L\'\2\u019b\u019c\7\5\2"+ - "\2\u019c\u019e\5L\'\2\u019d\u019b\3\2\2\2\u019e\u01a1\3\2\2\2\u019f\u019d"+ - "\3\2\2\2\u019f\u01a0\3\2\2\2\u01a0\u01a2\3\2\2\2\u01a1\u019f\3\2\2\2\u01a2"+ - "\u01a3\7\4\2\2\u01a3\u01c2\3\2\2\2\u01a4\u01a5\7-\2\2\u01a5\u01a6\7\3"+ - "\2\2\u01a6\u01a7\5@!\2\u01a7\u01a8\7\5\2\2\u01a8\u01ad\5L\'\2\u01a9\u01aa"+ - "\7\5\2\2\u01aa\u01ac\5L\'\2\u01ab\u01a9\3\2\2\2\u01ac\u01af\3\2\2\2\u01ad"+ - "\u01ab\3\2\2\2\u01ad\u01ae\3\2\2\2\u01ae\u01b0\3\2\2\2\u01af\u01ad\3\2"+ - "\2\2\u01b0\u01b1\7\4\2\2\u01b1\u01c2\3\2\2\2\u01b2\u01b3\7-\2\2\u01b3"+ - "\u01b4\7\3\2\2\u01b4\u01b5\5L\'\2\u01b5\u01b6\7\5\2\2\u01b6\u01bb\5L\'"+ - "\2\u01b7\u01b8\7\5\2\2\u01b8\u01ba\5L\'\2\u01b9\u01b7\3\2\2\2\u01ba\u01bd"+ - "\3\2\2\2\u01bb\u01b9\3\2\2\2\u01bb\u01bc\3\2\2\2\u01bc\u01be\3\2\2\2\u01bd"+ - "\u01bb\3\2\2\2\u01be\u01bf\7\4\2\2\u01bf\u01c2\3\2\2\2\u01c0\u01c2\5."+ - "\30\2\u01c1\u0190\3\2\2\2\u01c1\u0193\3\2\2\2\u01c1\u0198\3\2\2\2\u01c1"+ - "\u01a4\3\2\2\2\u01c1\u01b2\3\2\2\2\u01c1\u01c0\3\2\2\2\u01c2\u01cb\3\2"+ - "\2\2\u01c3\u01c4\f\4\2\2\u01c4\u01c5\7\n\2\2\u01c5\u01ca\5,\27\5\u01c6"+ - "\u01c7\f\3\2\2\u01c7\u01c8\7\63\2\2\u01c8\u01ca\5,\27\4\u01c9\u01c3\3"+ - "\2\2\2\u01c9\u01c6\3\2\2\2\u01ca\u01cd\3\2\2\2\u01cb\u01c9\3\2\2\2\u01cb"+ - "\u01cc\3\2\2\2\u01cc-\3\2\2\2\u01cd\u01cb\3\2\2\2\u01ce\u01d0\5\64\33"+ - "\2\u01cf\u01d1\5\60\31\2\u01d0\u01cf\3\2\2\2\u01d0\u01d1\3\2\2\2\u01d1"+ - "/\3\2\2\2\u01d2\u01d4\7/\2\2\u01d3\u01d2\3\2\2\2\u01d3\u01d4\3\2\2\2\u01d4"+ - "\u01d5\3\2\2\2\u01d5\u01d6\7\16\2\2\u01d6\u01d7\5\64\33\2\u01d7\u01d8"+ - "\7\n\2\2\u01d8\u01d9\5\64\33\2\u01d9\u0201\3\2\2\2\u01da\u01dc\7/\2\2"+ - "\u01db\u01da\3\2\2\2\u01db\u01dc\3\2\2\2\u01dc\u01dd\3\2\2\2\u01dd\u01de"+ - "\7%\2\2\u01de\u01df\7\3\2\2\u01df\u01e4\5*\26\2\u01e0\u01e1\7\5\2\2\u01e1"+ - "\u01e3\5*\26\2\u01e2\u01e0\3\2\2\2\u01e3\u01e6\3\2\2\2\u01e4\u01e2\3\2"+ - "\2\2\u01e4\u01e5\3\2\2\2\u01e5\u01e7\3\2\2\2\u01e6\u01e4\3\2\2\2\u01e7"+ - "\u01e8\7\4\2\2\u01e8\u0201\3\2\2\2\u01e9\u01eb\7/\2\2\u01ea\u01e9\3\2"+ - "\2\2\u01ea\u01eb\3\2\2\2\u01eb\u01ec\3\2\2\2\u01ec\u01ed\7%\2\2\u01ed"+ - "\u01ee\7\3\2\2\u01ee\u01ef\5\b\5\2\u01ef\u01f0\7\4\2\2\u01f0\u0201\3\2"+ - "\2\2\u01f1\u01f3\7/\2\2\u01f2\u01f1\3\2\2\2\u01f2\u01f3\3\2\2\2\u01f3"+ - "\u01f4\3\2\2\2\u01f4\u01f5\7*\2\2\u01f5\u0201\5\62\32\2\u01f6\u01f8\7"+ - "/\2\2\u01f7\u01f6\3\2\2\2\u01f7\u01f8\3\2\2\2\u01f8\u01f9\3\2\2\2\u01f9"+ - "\u01fa\7:\2\2\u01fa\u0201\5L\'\2\u01fb\u01fd\7\'\2\2\u01fc\u01fe\7/\2"+ - "\2\u01fd\u01fc\3\2\2\2\u01fd\u01fe\3\2\2\2\u01fe\u01ff\3\2\2\2\u01ff\u0201"+ - "\7\60\2\2\u0200\u01d3\3\2\2\2\u0200\u01db\3\2\2\2\u0200\u01ea\3\2\2\2"+ - "\u0200\u01f2\3\2\2\2\u0200\u01f7\3\2\2\2\u0200\u01fb\3\2\2\2\u0201\61"+ - "\3\2\2\2\u0202\u0205\5L\'\2\u0203\u0204\7\30\2\2\u0204\u0206\5L\'\2\u0205"+ - "\u0203\3\2\2\2\u0205\u0206\3\2\2\2\u0206\63\3\2\2\2\u0207\u0208\b\33\1"+ - "\2\u0208\u020c\5\66\34\2\u0209\u020a\t\n\2\2\u020a\u020c\5\64\33\6\u020b"+ - "\u0207\3\2\2\2\u020b\u0209\3\2\2\2\u020c\u0219\3\2\2\2\u020d\u020e\f\5"+ - "\2\2\u020e\u020f\t\13\2\2\u020f\u0218\5\64\33\6\u0210\u0211\f\4\2\2\u0211"+ - "\u0212\t\n\2\2\u0212\u0218\5\64\33\5\u0213\u0214\f\3\2\2\u0214\u0215\5"+ - ":\36\2\u0215\u0216\5\64\33\4\u0216\u0218\3\2\2\2\u0217\u020d\3\2\2\2\u0217"+ - "\u0210\3\2\2\2\u0217\u0213\3\2\2\2\u0218\u021b\3\2\2\2\u0219\u0217\3\2"+ - "\2\2\u0219\u021a\3\2\2\2\u021a\65\3\2\2\2\u021b\u0219\3\2\2\2\u021c\u021d"+ - "\7\20\2\2\u021d\u021e\7\3\2\2\u021e\u021f\5*\26\2\u021f\u0220\7\f\2\2"+ - "\u0220\u0221\5> \2\u0221\u0222\7\4\2\2\u0222\u024e\3\2\2\2\u0223\u0224"+ - "\7\34\2\2\u0224\u0225\7\3\2\2\u0225\u0226\5B\"\2\u0226\u0227\7\37\2\2"+ - "\u0227\u0228\5\64\33\2\u0228\u0229\7\4\2\2\u0229\u024e\3\2\2\2\u022a\u024e"+ - "\58\35\2\u022b\u024e\7R\2\2\u022c\u022d\5@!\2\u022d\u022e\7V\2\2\u022e"+ - "\u0230\3\2\2\2\u022f\u022c\3\2\2\2\u022f\u0230\3\2\2\2\u0230\u0231\3\2"+ - "\2\2\u0231\u024e\7R\2\2\u0232\u0233\5B\"\2\u0233\u023f\7\3\2\2\u0234\u0236"+ - "\5\34\17\2\u0235\u0234\3\2\2\2\u0235\u0236\3\2\2\2\u0236\u0237\3\2\2\2"+ - "\u0237\u023c\5*\26\2\u0238\u0239\7\5\2\2\u0239\u023b\5*\26\2\u023a\u0238"+ - "\3\2\2\2\u023b\u023e\3\2\2\2\u023c\u023a\3\2\2\2\u023c\u023d\3\2\2\2\u023d"+ - "\u0240\3\2\2\2\u023e\u023c\3\2\2\2\u023f\u0235\3\2\2\2\u023f\u0240\3\2"+ - "\2\2\u0240\u0241\3\2\2\2\u0241\u0242\7\4\2\2\u0242\u024e\3\2\2\2\u0243"+ - "\u0244\7\3\2\2\u0244\u0245\5\b\5\2\u0245\u0246\7\4\2\2\u0246\u024e\3\2"+ - "\2\2\u0247\u024e\5B\"\2\u0248\u024e\5@!\2\u0249\u024a\7\3\2\2\u024a\u024b"+ - "\5*\26\2\u024b\u024c\7\4\2\2\u024c\u024e\3\2\2\2\u024d\u021c\3\2\2\2\u024d"+ - "\u0223\3\2\2\2\u024d\u022a\3\2\2\2\u024d\u022b\3\2\2\2\u024d\u022f\3\2"+ - "\2\2\u024d\u0232\3\2\2\2\u024d\u0243\3\2\2\2\u024d\u0247\3\2\2\2\u024d"+ - "\u0248\3\2\2\2\u024d\u0249\3\2\2\2\u024e\67\3\2\2\2\u024f\u0259\7\60\2"+ - "\2\u0250\u0259\5J&\2\u0251\u0259\5<\37\2\u0252\u0254\7X\2\2\u0253\u0252"+ - "\3\2\2\2\u0254\u0255\3\2\2\2\u0255\u0253\3\2\2\2\u0255\u0256\3\2\2\2\u0256"+ - "\u0259\3\2\2\2\u0257\u0259\7W\2\2\u0258\u024f\3\2\2\2\u0258\u0250\3\2"+ - "\2\2\u0258\u0251\3\2\2\2\u0258\u0253\3\2\2\2\u0258\u0257\3\2\2\2\u0259"+ - "9\3\2\2\2\u025a\u025b\t\f\2\2\u025b;\3\2\2\2\u025c\u025d\t\r\2\2\u025d"+ - "=\3\2\2\2\u025e\u025f\5B\"\2\u025f?\3\2\2\2\u0260\u0261\5B\"\2\u0261\u0262"+ - "\7V\2\2\u0262\u0264\3\2\2\2\u0263\u0260\3\2\2\2\u0264\u0267\3\2\2\2\u0265"+ - "\u0263\3\2\2\2\u0265\u0266\3\2\2\2\u0266\u0268\3\2\2\2\u0267\u0265\3\2"+ - "\2\2\u0268\u0269\5B\"\2\u0269A\3\2\2\2\u026a\u026d\5F$\2\u026b\u026d\5"+ - "H%\2\u026c\u026a\3\2\2\2\u026c\u026b\3\2\2\2\u026dC\3\2\2\2\u026e\u026f"+ - "\5B\"\2\u026f\u0270\7\6\2\2\u0270\u0272\3\2\2\2\u0271\u026e\3\2\2\2\u0271"+ - "\u0272\3\2\2\2\u0272\u0273\3\2\2\2\u0273\u027b\7]\2\2\u0274\u0275\5B\""+ - "\2\u0275\u0276\7\6\2\2\u0276\u0278\3\2\2\2\u0277\u0274\3\2\2\2\u0277\u0278"+ - "\3\2\2\2\u0278\u0279\3\2\2\2\u0279\u027b\5B\"\2\u027a\u0271\3\2\2\2\u027a"+ - "\u0277\3\2\2\2\u027bE\3\2\2\2\u027c\u027f\7^\2\2\u027d\u027f\7_\2\2\u027e"+ - "\u027c\3\2\2\2\u027e\u027d\3\2\2\2\u027fG\3\2\2\2\u0280\u0284\7[\2\2\u0281"+ - "\u0284\5N(\2\u0282\u0284\7\\\2\2\u0283\u0280\3\2\2\2\u0283\u0281\3\2\2"+ - "\2\u0283\u0282\3\2\2\2\u0284I\3\2\2\2\u0285\u0288\7Z\2\2\u0286\u0288\7"+ - "Y\2\2\u0287\u0285\3\2\2\2\u0287\u0286\3\2\2\2\u0288K\3\2\2\2\u0289\u028a"+ - "\t\16\2\2\u028aM\3\2\2\2\u028b\u028c\t\17\2\2\u028cO\3\2\2\2`_aenptz}"+ - "\u0088\u008b\u0095\u0098\u009b\u009e\u00a6\u00a9\u00af\u00b3\u00b6\u00b9"+ - "\u00bc\u00c3\u00cb\u00ce\u00da\u00dd\u00e1\u00e8\u00ec\u00f0\u00f7\u00fb"+ - "\u00ff\u0104\u0108\u0110\u0114\u011b\u0126\u0129\u012d\u0139\u013c\u0142"+ - "\u0149\u0150\u0153\u0157\u015b\u015f\u0161\u016c\u0171\u0175\u0178\u017e"+ - "\u0181\u0187\u018a\u018c\u019f\u01ad\u01bb\u01c1\u01c9\u01cb\u01d0\u01d3"+ - "\u01db\u01e4\u01ea\u01f2\u01f7\u01fd\u0200\u0205\u020b\u0217\u0219\u022f"+ - "\u0235\u023c\u023f\u024d\u0255\u0258\u0265\u026c\u0271\u0277\u027a\u027e"+ - "\u0283\u0287"; + "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ + ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3"+ + "\4\3\4\3\4\3\4\3\4\3\4\7\4p\n\4\f\4\16\4s\13\4\3\4\5\4v\n\4\3\4\3\4\3"+ + "\4\3\4\3\4\3\4\3\4\7\4\177\n\4\f\4\16\4\u0082\13\4\3\4\5\4\u0085\n\4\3"+ + "\4\3\4\3\4\3\4\5\4\u008b\n\4\3\4\5\4\u008e\n\4\3\4\3\4\3\4\3\4\3\4\3\4"+ + "\3\4\3\4\3\4\5\4\u0099\n\4\3\4\5\4\u009c\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3"+ + "\4\3\4\5\4\u00a6\n\4\3\4\5\4\u00a9\n\4\3\4\5\4\u00ac\n\4\3\4\5\4\u00af"+ + "\n\4\3\4\3\4\3\4\3\4\7\4\u00b5\n\4\f\4\16\4\u00b8\13\4\5\4\u00ba\n\4\3"+ + "\4\3\4\3\4\3\4\5\4\u00c0\n\4\3\4\3\4\5\4\u00c4\n\4\3\4\5\4\u00c7\n\4\3"+ + "\4\5\4\u00ca\n\4\3\4\5\4\u00cd\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u00d4\n\4\3"+ + "\5\3\5\3\5\3\5\7\5\u00da\n\5\f\5\16\5\u00dd\13\5\5\5\u00df\n\5\3\5\3\5"+ + "\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u00e9\n\6\f\6\16\6\u00ec\13\6\5\6\u00ee\n"+ + "\6\3\6\5\6\u00f1\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u00f8\n\7\3\b\3\b\3\b\3\b"+ + "\3\b\5\b\u00ff\n\b\3\t\3\t\5\t\u0103\n\t\3\n\3\n\5\n\u0107\n\n\3\n\3\n"+ + "\3\n\7\n\u010c\n\n\f\n\16\n\u010f\13\n\3\n\5\n\u0112\n\n\3\n\3\n\5\n\u0116"+ + "\n\n\3\n\3\n\3\n\5\n\u011b\n\n\3\n\3\n\5\n\u011f\n\n\3\13\3\13\3\13\3"+ + "\13\7\13\u0125\n\13\f\13\16\13\u0128\13\13\3\f\5\f\u012b\n\f\3\f\3\f\3"+ + "\f\7\f\u0130\n\f\f\f\16\f\u0133\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16"+ + "\u013b\n\16\f\16\16\16\u013e\13\16\5\16\u0140\n\16\3\16\3\16\5\16\u0144"+ + "\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21\u0150\n\21"+ + "\3\21\5\21\u0153\n\21\3\22\3\22\7\22\u0157\n\22\f\22\16\22\u015a\13\22"+ + "\3\23\3\23\3\23\3\23\5\23\u0160\n\23\3\23\3\23\3\23\3\23\3\23\5\23\u0167"+ + "\n\23\3\24\5\24\u016a\n\24\3\24\3\24\5\24\u016e\n\24\3\24\3\24\5\24\u0172"+ + "\n\24\3\24\3\24\5\24\u0176\n\24\5\24\u0178\n\24\3\25\3\25\3\25\3\25\3"+ + "\25\3\25\3\25\7\25\u0181\n\25\f\25\16\25\u0184\13\25\3\25\3\25\5\25\u0188"+ + "\n\25\3\26\3\26\5\26\u018c\n\26\3\26\5\26\u018f\n\26\3\26\3\26\3\26\3"+ + "\26\5\26\u0195\n\26\3\26\5\26\u0198\n\26\3\26\3\26\3\26\3\26\5\26\u019e"+ + "\n\26\3\26\5\26\u01a1\n\26\5\26\u01a3\n\26\3\27\3\27\3\30\3\30\3\30\3"+ + "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01b4\n\30\f\30"+ + "\16\30\u01b7\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01c2"+ + "\n\30\f\30\16\30\u01c5\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ + "\30\7\30\u01d0\n\30\f\30\16\30\u01d3\13\30\3\30\3\30\3\30\5\30\u01d8\n"+ + "\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01e0\n\30\f\30\16\30\u01e3\13"+ + "\30\3\31\3\31\5\31\u01e7\n\31\3\32\5\32\u01ea\n\32\3\32\3\32\3\32\3\32"+ + "\3\32\3\32\5\32\u01f2\n\32\3\32\3\32\3\32\3\32\3\32\7\32\u01f9\n\32\f"+ + "\32\16\32\u01fc\13\32\3\32\3\32\3\32\5\32\u0201\n\32\3\32\3\32\3\32\3"+ + "\32\3\32\3\32\5\32\u0209\n\32\3\32\3\32\3\32\5\32\u020e\n\32\3\32\3\32"+ + "\3\32\3\32\5\32\u0214\n\32\3\32\5\32\u0217\n\32\3\33\3\33\5\33\u021b\n"+ + "\33\3\34\3\34\3\34\3\34\3\34\3\34\5\34\u0223\n\34\3\35\3\35\3\35\3\35"+ + "\5\35\u0229\n\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\7\35"+ + "\u0235\n\35\f\35\16\35\u0238\13\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+ + "\5\36\u0241\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+ + "\3\36\5\36\u024f\n\36\3\37\3\37\3\37\3\37\3\37\5\37\u0256\n\37\3 \3 \3"+ + " \3 \3 \3 \3 \3!\3!\3!\3!\3!\5!\u0264\n!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3"+ + "#\3#\3#\3#\3#\5#\u0272\n#\3$\3$\3$\5$\u0277\n$\3$\3$\3$\7$\u027c\n$\f"+ + "$\16$\u027f\13$\5$\u0281\n$\3$\3$\3%\3%\3%\3%\6%\u0289\n%\r%\16%\u028a"+ + "\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\5%\u029e\n%\3&\3&"+ + "\3\'\3\'\3(\3(\3)\3)\3)\7)\u02a9\n)\f)\16)\u02ac\13)\3)\3)\3*\3*\5*\u02b2"+ + "\n*\3+\3+\3+\5+\u02b7\n+\3+\3+\3+\3+\5+\u02bd\n+\3+\5+\u02c0\n+\3,\3,"+ + "\5,\u02c4\n,\3-\3-\3-\5-\u02c9\n-\3.\3.\5.\u02cd\n.\3/\3/\3\60\3\60\3"+ + "\60\2\4.8\61\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64"+ + "\668:<>@BDFHJLNPRTVXZ\\^\2\20\b\2\7\7\t\t\31\31,,\62\62\66\66\4\2\"\""+ + "BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2\7\7"+ + "\27\27\3\2XY\3\2Z\\\3\2RW\4\2\35\35CC\3\2_`\20\2\b\t\22\24\31\31\33\33"+ + "\36\36!\",,\62\62\668:<>?ABDEGG\u032e\2`\3\2\2\2\4c\3\2\2\2\6\u00d3\3"+ + "\2\2\2\b\u00de\3\2\2\2\n\u00e2\3\2\2\2\f\u00f7\3\2\2\2\16\u00fe\3\2\2"+ + "\2\20\u0100\3\2\2\2\22\u0104\3\2\2\2\24\u0120\3\2\2\2\26\u012a\3\2\2\2"+ + "\30\u0134\3\2\2\2\32\u0143\3\2\2\2\34\u0145\3\2\2\2\36\u014b\3\2\2\2 "+ + "\u014d\3\2\2\2\"\u0154\3\2\2\2$\u0166\3\2\2\2&\u0177\3\2\2\2(\u0187\3"+ + "\2\2\2*\u01a2\3\2\2\2,\u01a4\3\2\2\2.\u01d7\3\2\2\2\60\u01e4\3\2\2\2\62"+ + "\u0216\3\2\2\2\64\u0218\3\2\2\2\66\u0222\3\2\2\28\u0228\3\2\2\2:\u024e"+ + "\3\2\2\2<\u0255\3\2\2\2>\u0257\3\2\2\2@\u0263\3\2\2\2B\u0265\3\2\2\2D"+ + "\u0271\3\2\2\2F\u0273\3\2\2\2H\u029d\3\2\2\2J\u029f\3\2\2\2L\u02a1\3\2"+ + "\2\2N\u02a3\3\2\2\2P\u02aa\3\2\2\2R\u02b1\3\2\2\2T\u02bf\3\2\2\2V\u02c3"+ + "\3\2\2\2X\u02c8\3\2\2\2Z\u02cc\3\2\2\2\\\u02ce\3\2\2\2^\u02d0\3\2\2\2"+ + "`a\5\6\4\2ab\7\2\2\3b\3\3\2\2\2cd\5,\27\2de\7\2\2\3e\5\3\2\2\2f\u00d4"+ + "\5\b\5\2gu\7\33\2\2hq\7\3\2\2ij\78\2\2jp\t\2\2\2kl\7\36\2\2lp\t\3\2\2"+ + "mn\7G\2\2np\5L\'\2oi\3\2\2\2ok\3\2\2\2om\3\2\2\2ps\3\2\2\2qo\3\2\2\2q"+ + "r\3\2\2\2rt\3\2\2\2sq\3\2\2\2tv\7\4\2\2uh\3\2\2\2uv\3\2\2\2vw\3\2\2\2"+ + "w\u00d4\5\6\4\2x\u0084\7\24\2\2y\u0080\7\3\2\2z{\78\2\2{\177\t\4\2\2|"+ + "}\7\36\2\2}\177\t\3\2\2~z\3\2\2\2~|\3\2\2\2\177\u0082\3\2\2\2\u0080~\3"+ + "\2\2\2\u0080\u0081\3\2\2\2\u0081\u0083\3\2\2\2\u0082\u0080\3\2\2\2\u0083"+ + "\u0085\7\4\2\2\u0084y\3\2\2\2\u0084\u0085\3\2\2\2\u0085\u0086\3\2\2\2"+ + "\u0086\u00d4\5\6\4\2\u0087\u0088\7>\2\2\u0088\u008d\7A\2\2\u0089\u008b"+ + "\7*\2\2\u008a\u0089\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c"+ + "\u008e\5\64\33\2\u008d\u008a\3\2\2\2\u008d\u008e\3\2\2\2\u008e\u00d4\3"+ + "\2\2\2\u008f\u0090\7>\2\2\u0090\u0091\7\23\2\2\u0091\u0092\t\5\2\2\u0092"+ + "\u00d4\5T+\2\u0093\u0094\t\6\2\2\u0094\u00d4\5T+\2\u0095\u0096\7>\2\2"+ + "\u0096\u009b\7!\2\2\u0097\u0099\7*\2\2\u0098\u0097\3\2\2\2\u0098\u0099"+ + "\3\2\2\2\u0099\u009a\3\2\2\2\u009a\u009c\5\64\33\2\u009b\u0098\3\2\2\2"+ + "\u009b\u009c\3\2\2\2\u009c\u00d4\3\2\2\2\u009d\u009e\7>\2\2\u009e\u00d4"+ + "\7<\2\2\u009f\u00a0\7?\2\2\u00a0\u00d4\7\22\2\2\u00a1\u00a2\7?\2\2\u00a2"+ + "\u00a8\7A\2\2\u00a3\u00a5\7\21\2\2\u00a4\u00a6\7*\2\2\u00a5\u00a4\3\2"+ + "\2\2\u00a5\u00a6\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\u00a9\5\64\33\2\u00a8"+ + "\u00a3\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00ae\3\2\2\2\u00aa\u00ac\7*"+ + "\2\2\u00ab\u00aa\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad"+ + "\u00af\5\64\33\2\u00ae\u00ab\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00b9\3"+ + "\2\2\2\u00b0\u00b1\7D\2\2\u00b1\u00b6\5\\/\2\u00b2\u00b3\7\5\2\2\u00b3"+ + "\u00b5\5\\/\2\u00b4\u00b2\3\2\2\2\u00b5\u00b8\3\2\2\2\u00b6\u00b4\3\2"+ + "\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2\2\2\u00b9"+ + "\u00b0\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00d4\3\2\2\2\u00bb\u00bc\7?"+ + "\2\2\u00bc\u00bf\7\23\2\2\u00bd\u00be\7\21\2\2\u00be\u00c0\5\\/\2\u00bf"+ + "\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\u00c6\3\2\2\2\u00c1\u00c3\7@"+ + "\2\2\u00c2\u00c4\7*\2\2\u00c3\u00c2\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4"+ + "\u00c5\3\2\2\2\u00c5\u00c7\5\64\33\2\u00c6\u00c1\3\2\2\2\u00c6\u00c7\3"+ + "\2\2\2\u00c7\u00cc\3\2\2\2\u00c8\u00ca\7*\2\2\u00c9\u00c8\3\2\2\2\u00c9"+ + "\u00ca\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb\u00cd\5\64\33\2\u00cc\u00c9\3"+ + "\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d4\3\2\2\2\u00ce\u00cf\7?\2\2\u00cf"+ + "\u00d4\7E\2\2\u00d0\u00d1\7?\2\2\u00d1\u00d2\7@\2\2\u00d2\u00d4\7E\2\2"+ + "\u00d3f\3\2\2\2\u00d3g\3\2\2\2\u00d3x\3\2\2\2\u00d3\u0087\3\2\2\2\u00d3"+ + "\u008f\3\2\2\2\u00d3\u0093\3\2\2\2\u00d3\u0095\3\2\2\2\u00d3\u009d\3\2"+ + "\2\2\u00d3\u009f\3\2\2\2\u00d3\u00a1\3\2\2\2\u00d3\u00bb\3\2\2\2\u00d3"+ + "\u00ce\3\2\2\2\u00d3\u00d0\3\2\2\2\u00d4\7\3\2\2\2\u00d5\u00d6\7I\2\2"+ + "\u00d6\u00db\5\34\17\2\u00d7\u00d8\7\5\2\2\u00d8\u00da\5\34\17\2\u00d9"+ + "\u00d7\3\2\2\2\u00da\u00dd\3\2\2\2\u00db\u00d9\3\2\2\2\u00db\u00dc\3\2"+ + "\2\2\u00dc\u00df\3\2\2\2\u00dd\u00db\3\2\2\2\u00de\u00d5\3\2\2\2\u00de"+ + "\u00df\3\2\2\2\u00df\u00e0\3\2\2\2\u00e0\u00e1\5\n\6\2\u00e1\t\3\2\2\2"+ + "\u00e2\u00ed\5\16\b\2\u00e3\u00e4\7\64\2\2\u00e4\u00e5\7\17\2\2\u00e5"+ + "\u00ea\5\20\t\2\u00e6\u00e7\7\5\2\2\u00e7\u00e9\5\20\t\2\u00e8\u00e6\3"+ + "\2\2\2\u00e9\u00ec\3\2\2\2\u00ea\u00e8\3\2\2\2\u00ea\u00eb\3\2\2\2\u00eb"+ + "\u00ee\3\2\2\2\u00ec\u00ea\3\2\2\2\u00ed\u00e3\3\2\2\2\u00ed\u00ee\3\2"+ + "\2\2\u00ee\u00f0\3\2\2\2\u00ef\u00f1\5\f\7\2\u00f0\u00ef\3\2\2\2\u00f0"+ + "\u00f1\3\2\2\2\u00f1\13\3\2\2\2\u00f2\u00f3\7+\2\2\u00f3\u00f8\t\7\2\2"+ + "\u00f4\u00f5\7L\2\2\u00f5\u00f6\t\7\2\2\u00f6\u00f8\7Q\2\2\u00f7\u00f2"+ + "\3\2\2\2\u00f7\u00f4\3\2\2\2\u00f8\r\3\2\2\2\u00f9\u00ff\5\22\n\2\u00fa"+ + "\u00fb\7\3\2\2\u00fb\u00fc\5\n\6\2\u00fc\u00fd\7\4\2\2\u00fd\u00ff\3\2"+ + "\2\2\u00fe\u00f9\3\2\2\2\u00fe\u00fa\3\2\2\2\u00ff\17\3\2\2\2\u0100\u0102"+ + "\5,\27\2\u0101\u0103\t\b\2\2\u0102\u0101\3\2\2\2\u0102\u0103\3\2\2\2\u0103"+ + "\21\3\2\2\2\u0104\u0106\7=\2\2\u0105\u0107\5\36\20\2\u0106\u0105\3\2\2"+ + "\2\u0106\u0107\3\2\2\2\u0107\u0108\3\2\2\2\u0108\u010d\5 \21\2\u0109\u010a"+ + "\7\5\2\2\u010a\u010c\5 \21\2\u010b\u0109\3\2\2\2\u010c\u010f\3\2\2\2\u010d"+ + "\u010b\3\2\2\2\u010d\u010e\3\2\2\2\u010e\u0111\3\2\2\2\u010f\u010d\3\2"+ + "\2\2\u0110\u0112\5\24\13\2\u0111\u0110\3\2\2\2\u0111\u0112\3\2\2\2\u0112"+ + "\u0115\3\2\2\2\u0113\u0114\7H\2\2\u0114\u0116\5.\30\2\u0115\u0113\3\2"+ + "\2\2\u0115\u0116\3\2\2\2\u0116\u011a\3\2\2\2\u0117\u0118\7#\2\2\u0118"+ + "\u0119\7\17\2\2\u0119\u011b\5\26\f\2\u011a\u0117\3\2\2\2\u011a\u011b\3"+ + "\2\2\2\u011b\u011e\3\2\2\2\u011c\u011d\7$\2\2\u011d\u011f\5.\30\2\u011e"+ + "\u011c\3\2\2\2\u011e\u011f\3\2\2\2\u011f\23\3\2\2\2\u0120\u0121\7\37\2"+ + "\2\u0121\u0126\5\"\22\2\u0122\u0123\7\5\2\2\u0123\u0125\5\"\22\2\u0124"+ + "\u0122\3\2\2\2\u0125\u0128\3\2\2\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2"+ + "\2\2\u0127\25\3\2\2\2\u0128\u0126\3\2\2\2\u0129\u012b\5\36\20\2\u012a"+ + "\u0129\3\2\2\2\u012a\u012b\3\2\2\2\u012b\u012c\3\2\2\2\u012c\u0131\5\30"+ + "\r\2\u012d\u012e\7\5\2\2\u012e\u0130\5\30\r\2\u012f\u012d\3\2\2\2\u0130"+ + "\u0133\3\2\2\2\u0131\u012f\3\2\2\2\u0131\u0132\3\2\2\2\u0132\27\3\2\2"+ + "\2\u0133\u0131\3\2\2\2\u0134\u0135\5\32\16\2\u0135\31\3\2\2\2\u0136\u013f"+ + "\7\3\2\2\u0137\u013c\5,\27\2\u0138\u0139\7\5\2\2\u0139\u013b\5,\27\2\u013a"+ + "\u0138\3\2\2\2\u013b\u013e\3\2\2\2\u013c\u013a\3\2\2\2\u013c\u013d\3\2"+ + "\2\2\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013f\u0137\3\2\2\2\u013f"+ + "\u0140\3\2\2\2\u0140\u0141\3\2\2\2\u0141\u0144\7\4\2\2\u0142\u0144\5,"+ + "\27\2\u0143\u0136\3\2\2\2\u0143\u0142\3\2\2\2\u0144\33\3\2\2\2\u0145\u0146"+ + "\5R*\2\u0146\u0147\7\f\2\2\u0147\u0148\7\3\2\2\u0148\u0149\5\n\6\2\u0149"+ + "\u014a\7\4\2\2\u014a\35\3\2\2\2\u014b\u014c\t\t\2\2\u014c\37\3\2\2\2\u014d"+ + "\u0152\5,\27\2\u014e\u0150\7\f\2\2\u014f\u014e\3\2\2\2\u014f\u0150\3\2"+ + "\2\2\u0150\u0151\3\2\2\2\u0151\u0153\5R*\2\u0152\u014f\3\2\2\2\u0152\u0153"+ + "\3\2\2\2\u0153!\3\2\2\2\u0154\u0158\5*\26\2\u0155\u0157\5$\23\2\u0156"+ + "\u0155\3\2\2\2\u0157\u015a\3\2\2\2\u0158\u0156\3\2\2\2\u0158\u0159\3\2"+ + "\2\2\u0159#\3\2\2\2\u015a\u0158\3\2\2\2\u015b\u015c\5&\24\2\u015c\u015d"+ + "\7(\2\2\u015d\u015f\5*\26\2\u015e\u0160\5(\25\2\u015f\u015e\3\2\2\2\u015f"+ + "\u0160\3\2\2\2\u0160\u0167\3\2\2\2\u0161\u0162\7.\2\2\u0162\u0163\5&\24"+ + "\2\u0163\u0164\7(\2\2\u0164\u0165\5*\26\2\u0165\u0167\3\2\2\2\u0166\u015b"+ + "\3\2\2\2\u0166\u0161\3\2\2\2\u0167%\3\2\2\2\u0168\u016a\7&\2\2\u0169\u0168"+ + "\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u0178\3\2\2\2\u016b\u016d\7)\2\2\u016c"+ + "\u016e\7\65\2\2\u016d\u016c\3\2\2\2\u016d\u016e\3\2\2\2\u016e\u0178\3"+ + "\2\2\2\u016f\u0171\79\2\2\u0170\u0172\7\65\2\2\u0171\u0170\3\2\2\2\u0171"+ + "\u0172\3\2\2\2\u0172\u0178\3\2\2\2\u0173\u0175\7 \2\2\u0174\u0176\7\65"+ + "\2\2\u0175\u0174\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0178\3\2\2\2\u0177"+ + "\u0169\3\2\2\2\u0177\u016b\3\2\2\2\u0177\u016f\3\2\2\2\u0177\u0173\3\2"+ + "\2\2\u0178\'\3\2\2\2\u0179\u017a\7\61\2\2\u017a\u0188\5.\30\2\u017b\u017c"+ + "\7F\2\2\u017c\u017d\7\3\2\2\u017d\u0182\5R*\2\u017e\u017f\7\5\2\2\u017f"+ + "\u0181\5R*\2\u0180\u017e\3\2\2\2\u0181\u0184\3\2\2\2\u0182\u0180\3\2\2"+ + "\2\u0182\u0183\3\2\2\2\u0183\u0185\3\2\2\2\u0184\u0182\3\2\2\2\u0185\u0186"+ + "\7\4\2\2\u0186\u0188\3\2\2\2\u0187\u0179\3\2\2\2\u0187\u017b\3\2\2\2\u0188"+ + ")\3\2\2\2\u0189\u018e\5T+\2\u018a\u018c\7\f\2\2\u018b\u018a\3\2\2\2\u018b"+ + "\u018c\3\2\2\2\u018c\u018d\3\2\2\2\u018d\u018f\5P)\2\u018e\u018b\3\2\2"+ + "\2\u018e\u018f\3\2\2\2\u018f\u01a3\3\2\2\2\u0190\u0191\7\3\2\2\u0191\u0192"+ + "\5\n\6\2\u0192\u0197\7\4\2\2\u0193\u0195\7\f\2\2\u0194\u0193\3\2\2\2\u0194"+ + "\u0195\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0198\5P)\2\u0197\u0194\3\2\2"+ + "\2\u0197\u0198\3\2\2\2\u0198\u01a3\3\2\2\2\u0199\u019a\7\3\2\2\u019a\u019b"+ + "\5\"\22\2\u019b\u01a0\7\4\2\2\u019c\u019e\7\f\2\2\u019d\u019c\3\2\2\2"+ + "\u019d\u019e\3\2\2\2\u019e\u019f\3\2\2\2\u019f\u01a1\5P)\2\u01a0\u019d"+ + "\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u01a3\3\2\2\2\u01a2\u0189\3\2\2\2\u01a2"+ + "\u0190\3\2\2\2\u01a2\u0199\3\2\2\2\u01a3+\3\2\2\2\u01a4\u01a5\5.\30\2"+ + "\u01a5-\3\2\2\2\u01a6\u01a7\b\30\1\2\u01a7\u01a8\7/\2\2\u01a8\u01d8\5"+ + ".\30\n\u01a9\u01aa\7\32\2\2\u01aa\u01ab\7\3\2\2\u01ab\u01ac\5\b\5\2\u01ac"+ + "\u01ad\7\4\2\2\u01ad\u01d8\3\2\2\2\u01ae\u01af\7;\2\2\u01af\u01b0\7\3"+ + "\2\2\u01b0\u01b5\5\\/\2\u01b1\u01b2\7\5\2\2\u01b2\u01b4\5\\/\2\u01b3\u01b1"+ + "\3\2\2\2\u01b4\u01b7\3\2\2\2\u01b5\u01b3\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6"+ + "\u01b8\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8\u01b9\7\4\2\2\u01b9\u01d8\3\2"+ + "\2\2\u01ba\u01bb\7-\2\2\u01bb\u01bc\7\3\2\2\u01bc\u01bd\5P)\2\u01bd\u01be"+ + "\7\5\2\2\u01be\u01c3\5\\/\2\u01bf\u01c0\7\5\2\2\u01c0\u01c2\5\\/\2\u01c1"+ + "\u01bf\3\2\2\2\u01c2\u01c5\3\2\2\2\u01c3\u01c1\3\2\2\2\u01c3\u01c4\3\2"+ + "\2\2\u01c4\u01c6\3\2\2\2\u01c5\u01c3\3\2\2\2\u01c6\u01c7\7\4\2\2\u01c7"+ + "\u01d8\3\2\2\2\u01c8\u01c9\7-\2\2\u01c9\u01ca\7\3\2\2\u01ca\u01cb\5\\"+ + "/\2\u01cb\u01cc\7\5\2\2\u01cc\u01d1\5\\/\2\u01cd\u01ce\7\5\2\2\u01ce\u01d0"+ + "\5\\/\2\u01cf\u01cd\3\2\2\2\u01d0\u01d3\3\2\2\2\u01d1\u01cf\3\2\2\2\u01d1"+ + "\u01d2\3\2\2\2\u01d2\u01d4\3\2\2\2\u01d3\u01d1\3\2\2\2\u01d4\u01d5\7\4"+ + "\2\2\u01d5\u01d8\3\2\2\2\u01d6\u01d8\5\60\31\2\u01d7\u01a6\3\2\2\2\u01d7"+ + "\u01a9\3\2\2\2\u01d7\u01ae\3\2\2\2\u01d7\u01ba\3\2\2\2\u01d7\u01c8\3\2"+ + "\2\2\u01d7\u01d6\3\2\2\2\u01d8\u01e1\3\2\2\2\u01d9\u01da\f\4\2\2\u01da"+ + "\u01db\7\n\2\2\u01db\u01e0\5.\30\5\u01dc\u01dd\f\3\2\2\u01dd\u01de\7\63"+ + "\2\2\u01de\u01e0\5.\30\4\u01df\u01d9\3\2\2\2\u01df\u01dc\3\2\2\2\u01e0"+ + "\u01e3\3\2\2\2\u01e1\u01df\3\2\2\2\u01e1\u01e2\3\2\2\2\u01e2/\3\2\2\2"+ + "\u01e3\u01e1\3\2\2\2\u01e4\u01e6\58\35\2\u01e5\u01e7\5\62\32\2\u01e6\u01e5"+ + "\3\2\2\2\u01e6\u01e7\3\2\2\2\u01e7\61\3\2\2\2\u01e8\u01ea\7/\2\2\u01e9"+ + "\u01e8\3\2\2\2\u01e9\u01ea\3\2\2\2\u01ea\u01eb\3\2\2\2\u01eb\u01ec\7\16"+ + "\2\2\u01ec\u01ed\58\35\2\u01ed\u01ee\7\n\2\2\u01ee\u01ef\58\35\2\u01ef"+ + "\u0217\3\2\2\2\u01f0\u01f2\7/\2\2\u01f1\u01f0\3\2\2\2\u01f1\u01f2\3\2"+ + "\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f4\7%\2\2\u01f4\u01f5\7\3\2\2\u01f5"+ + "\u01fa\5,\27\2\u01f6\u01f7\7\5\2\2\u01f7\u01f9\5,\27\2\u01f8\u01f6\3\2"+ + "\2\2\u01f9\u01fc\3\2\2\2\u01fa\u01f8\3\2\2\2\u01fa\u01fb\3\2\2\2\u01fb"+ + "\u01fd\3\2\2\2\u01fc\u01fa\3\2\2\2\u01fd\u01fe\7\4\2\2\u01fe\u0217\3\2"+ + "\2\2\u01ff\u0201\7/\2\2\u0200\u01ff\3\2\2\2\u0200\u0201\3\2\2\2\u0201"+ + "\u0202\3\2\2\2\u0202\u0203\7%\2\2\u0203\u0204\7\3\2\2\u0204\u0205\5\b"+ + "\5\2\u0205\u0206\7\4\2\2\u0206\u0217\3\2\2\2\u0207\u0209\7/\2\2\u0208"+ + "\u0207\3\2\2\2\u0208\u0209\3\2\2\2\u0209\u020a\3\2\2\2\u020a\u020b\7*"+ + "\2\2\u020b\u0217\5\64\33\2\u020c\u020e\7/\2\2\u020d\u020c\3\2\2\2\u020d"+ + "\u020e\3\2\2\2\u020e\u020f\3\2\2\2\u020f\u0210\7:\2\2\u0210\u0217\5\\"+ + "/\2\u0211\u0213\7\'\2\2\u0212\u0214\7/\2\2\u0213\u0212\3\2\2\2\u0213\u0214"+ + "\3\2\2\2\u0214\u0215\3\2\2\2\u0215\u0217\7\60\2\2\u0216\u01e9\3\2\2\2"+ + "\u0216\u01f1\3\2\2\2\u0216\u0200\3\2\2\2\u0216\u0208\3\2\2\2\u0216\u020d"+ + "\3\2\2\2\u0216\u0211\3\2\2\2\u0217\63\3\2\2\2\u0218\u021a\5\\/\2\u0219"+ + "\u021b\5\66\34\2\u021a\u0219\3\2\2\2\u021a\u021b\3\2\2\2\u021b\65\3\2"+ + "\2\2\u021c\u021d\7\30\2\2\u021d\u0223\5\\/\2\u021e\u021f\7J\2\2\u021f"+ + "\u0220\5\\/\2\u0220\u0221\7Q\2\2\u0221\u0223\3\2\2\2\u0222\u021c\3\2\2"+ + "\2\u0222\u021e\3\2\2\2\u0223\67\3\2\2\2\u0224\u0225\b\35\1\2\u0225\u0229"+ + "\5:\36\2\u0226\u0227\t\n\2\2\u0227\u0229\58\35\6\u0228\u0224\3\2\2\2\u0228"+ + "\u0226\3\2\2\2\u0229\u0236\3\2\2\2\u022a\u022b\f\5\2\2\u022b\u022c\t\13"+ + "\2\2\u022c\u0235\58\35\6\u022d\u022e\f\4\2\2\u022e\u022f\t\n\2\2\u022f"+ + "\u0235\58\35\5\u0230\u0231\f\3\2\2\u0231\u0232\5J&\2\u0232\u0233\58\35"+ + "\4\u0233\u0235\3\2\2\2\u0234\u022a\3\2\2\2\u0234\u022d\3\2\2\2\u0234\u0230"+ + "\3\2\2\2\u0235\u0238\3\2\2\2\u0236\u0234\3\2\2\2\u0236\u0237\3\2\2\2\u0237"+ + "9\3\2\2\2\u0238\u0236\3\2\2\2\u0239\u024f\5<\37\2\u023a\u024f\5@!\2\u023b"+ + "\u024f\5H%\2\u023c\u024f\7Z\2\2\u023d\u023e\5P)\2\u023e\u023f\7^\2\2\u023f"+ + "\u0241\3\2\2\2\u0240\u023d\3\2\2\2\u0240\u0241\3\2\2\2\u0241\u0242\3\2"+ + "\2\2\u0242\u024f\7Z\2\2\u0243\u024f\5D#\2\u0244\u0245\7\3\2\2\u0245\u0246"+ + "\5\b\5\2\u0246\u0247\7\4\2\2\u0247\u024f\3\2\2\2\u0248\u024f\5R*\2\u0249"+ + "\u024f\5P)\2\u024a\u024b\7\3\2\2\u024b\u024c\5,\27\2\u024c\u024d\7\4\2"+ + "\2\u024d\u024f\3\2\2\2\u024e\u0239\3\2\2\2\u024e\u023a\3\2\2\2\u024e\u023b"+ + "\3\2\2\2\u024e\u023c\3\2\2\2\u024e\u0240\3\2\2\2\u024e\u0243\3\2\2\2\u024e"+ + "\u0244\3\2\2\2\u024e\u0248\3\2\2\2\u024e\u0249\3\2\2\2\u024e\u024a\3\2"+ + "\2\2\u024f;\3\2\2\2\u0250\u0256\5> \2\u0251\u0252\7K\2\2\u0252\u0253\5"+ + "> \2\u0253\u0254\7Q\2\2\u0254\u0256\3\2\2\2\u0255\u0250\3\2\2\2\u0255"+ + "\u0251\3\2\2\2\u0256=\3\2\2\2\u0257\u0258\7\20\2\2\u0258\u0259\7\3\2\2"+ + "\u0259\u025a\5,\27\2\u025a\u025b\7\f\2\2\u025b\u025c\5N(\2\u025c\u025d"+ + "\7\4\2\2\u025d?\3\2\2\2\u025e\u0264\5B\"\2\u025f\u0260\7K\2\2\u0260\u0261"+ + "\5B\"\2\u0261\u0262\7Q\2\2\u0262\u0264\3\2\2\2\u0263\u025e\3\2\2\2\u0263"+ + "\u025f\3\2\2\2\u0264A\3\2\2\2\u0265\u0266\7\34\2\2\u0266\u0267\7\3\2\2"+ + "\u0267\u0268\5R*\2\u0268\u0269\7\37\2\2\u0269\u026a\58\35\2\u026a\u026b"+ + "\7\4\2\2\u026bC\3\2\2\2\u026c\u0272\5F$\2\u026d\u026e\7K\2\2\u026e\u026f"+ + "\5F$\2\u026f\u0270\7Q\2\2\u0270\u0272\3\2\2\2\u0271\u026c\3\2\2\2\u0271"+ + "\u026d\3\2\2\2\u0272E\3\2\2\2\u0273\u0274\5R*\2\u0274\u0280\7\3\2\2\u0275"+ + "\u0277\5\36\20\2\u0276\u0275\3\2\2\2\u0276\u0277\3\2\2\2\u0277\u0278\3"+ + "\2\2\2\u0278\u027d\5,\27\2\u0279\u027a\7\5\2\2\u027a\u027c\5,\27\2\u027b"+ + "\u0279\3\2\2\2\u027c\u027f\3\2\2\2\u027d\u027b\3\2\2\2\u027d\u027e\3\2"+ + "\2\2\u027e\u0281\3\2\2\2\u027f\u027d\3\2\2\2\u0280\u0276\3\2\2\2\u0280"+ + "\u0281\3\2\2\2\u0281\u0282\3\2\2\2\u0282\u0283\7\4\2\2\u0283G\3\2\2\2"+ + "\u0284\u029e\7\60\2\2\u0285\u029e\5Z.\2\u0286\u029e\5L\'\2\u0287\u0289"+ + "\7`\2\2\u0288\u0287\3\2\2\2\u0289\u028a\3\2\2\2\u028a\u0288\3\2\2\2\u028a"+ + "\u028b\3\2\2\2\u028b\u029e\3\2\2\2\u028c\u029e\7_\2\2\u028d\u028e\7M\2"+ + "\2\u028e\u028f\5\\/\2\u028f\u0290\7Q\2\2\u0290\u029e\3\2\2\2\u0291\u0292"+ + "\7N\2\2\u0292\u0293\5\\/\2\u0293\u0294\7Q\2\2\u0294\u029e\3\2\2\2\u0295"+ + "\u0296\7O\2\2\u0296\u0297\5\\/\2\u0297\u0298\7Q\2\2\u0298\u029e\3\2\2"+ + "\2\u0299\u029a\7P\2\2\u029a\u029b\5\\/\2\u029b\u029c\7Q\2\2\u029c\u029e"+ + "\3\2\2\2\u029d\u0284\3\2\2\2\u029d\u0285\3\2\2\2\u029d\u0286\3\2\2\2\u029d"+ + "\u0288\3\2\2\2\u029d\u028c\3\2\2\2\u029d\u028d\3\2\2\2\u029d\u0291\3\2"+ + "\2\2\u029d\u0295\3\2\2\2\u029d\u0299\3\2\2\2\u029eI\3\2\2\2\u029f\u02a0"+ + "\t\f\2\2\u02a0K\3\2\2\2\u02a1\u02a2\t\r\2\2\u02a2M\3\2\2\2\u02a3\u02a4"+ + "\5R*\2\u02a4O\3\2\2\2\u02a5\u02a6\5R*\2\u02a6\u02a7\7^\2\2\u02a7\u02a9"+ + "\3\2\2\2\u02a8\u02a5\3\2\2\2\u02a9\u02ac\3\2\2\2\u02aa\u02a8\3\2\2\2\u02aa"+ + "\u02ab\3\2\2\2\u02ab\u02ad\3\2\2\2\u02ac\u02aa\3\2\2\2\u02ad\u02ae\5R"+ + "*\2\u02aeQ\3\2\2\2\u02af\u02b2\5V,\2\u02b0\u02b2\5X-\2\u02b1\u02af\3\2"+ + "\2\2\u02b1\u02b0\3\2\2\2\u02b2S\3\2\2\2\u02b3\u02b4\5R*\2\u02b4\u02b5"+ + "\7\6\2\2\u02b5\u02b7\3\2\2\2\u02b6\u02b3\3\2\2\2\u02b6\u02b7\3\2\2\2\u02b7"+ + "\u02b8\3\2\2\2\u02b8\u02c0\7e\2\2\u02b9\u02ba\5R*\2\u02ba\u02bb\7\6\2"+ + "\2\u02bb\u02bd\3\2\2\2\u02bc\u02b9\3\2\2\2\u02bc\u02bd\3\2\2\2\u02bd\u02be"+ + "\3\2\2\2\u02be\u02c0\5R*\2\u02bf\u02b6\3\2\2\2\u02bf\u02bc\3\2\2\2\u02c0"+ + "U\3\2\2\2\u02c1\u02c4\7f\2\2\u02c2\u02c4\7g\2\2\u02c3\u02c1\3\2\2\2\u02c3"+ + "\u02c2\3\2\2\2\u02c4W\3\2\2\2\u02c5\u02c9\7c\2\2\u02c6\u02c9\5^\60\2\u02c7"+ + "\u02c9\7d\2\2\u02c8\u02c5\3\2\2\2\u02c8\u02c6\3\2\2\2\u02c8\u02c7\3\2"+ + "\2\2\u02c9Y\3\2\2\2\u02ca\u02cd\7b\2\2\u02cb\u02cd\7a\2\2\u02cc\u02ca"+ + "\3\2\2\2\u02cc\u02cb\3\2\2\2\u02cd[\3\2\2\2\u02ce\u02cf\t\16\2\2\u02cf"+ + "]\3\2\2\2\u02d0\u02d1\t\17\2\2\u02d1_\3\2\2\2eoqu~\u0080\u0084\u008a\u008d"+ + "\u0098\u009b\u00a5\u00a8\u00ab\u00ae\u00b6\u00b9\u00bf\u00c3\u00c6\u00c9"+ + "\u00cc\u00d3\u00db\u00de\u00ea\u00ed\u00f0\u00f7\u00fe\u0102\u0106\u010d"+ + "\u0111\u0115\u011a\u011e\u0126\u012a\u0131\u013c\u013f\u0143\u014f\u0152"+ + "\u0158\u015f\u0166\u0169\u016d\u0171\u0175\u0177\u0182\u0187\u018b\u018e"+ + "\u0194\u0197\u019d\u01a0\u01a2\u01b5\u01c3\u01d1\u01d7\u01df\u01e1\u01e6"+ + "\u01e9\u01f1\u01fa\u0200\u0208\u020d\u0213\u0216\u021a\u0222\u0228\u0234"+ + "\u0236\u0240\u024e\u0255\u0263\u0271\u0276\u027d\u0280\u028a\u029d\u02aa"+ + "\u02b1\u02b6\u02bc\u02bf\u02c3\u02c8\u02cc"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index 35ce6cd0029d6..6745b3fa89ba9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -1,8 +1,3 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ // ANTLR GENERATED CODE: DO NOT EDIT package org.elasticsearch.xpack.sql.parser; import org.antlr.v4.runtime.tree.ParseTreeVisitor; @@ -123,6 +118,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitQueryNoWith(SqlBaseParser.QueryNoWithContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#limitClause}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitLimitClause(SqlBaseParser.LimitClauseContext ctx); /** * Visit a parse tree produced by the {@code queryPrimaryDefault} * labeled alternative in {@link SqlBaseParser#queryTerm}. @@ -311,6 +312,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitPattern(SqlBaseParser.PatternContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#patternEscape}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitPatternEscape(SqlBaseParser.PatternEscapeContext ctx); /** * Visit a parse tree produced by the {@code valueExpressionDefault} * labeled alternative in {@link SqlBaseParser#valueExpression}. @@ -368,12 +375,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor { */ T visitStar(SqlBaseParser.StarContext ctx); /** - * Visit a parse tree produced by the {@code functionCall} + * Visit a parse tree produced by the {@code function} * labeled alternative in {@link SqlBaseParser#primaryExpression}. * @param ctx the parse tree * @return the visitor result */ - T visitFunctionCall(SqlBaseParser.FunctionCallContext ctx); + T visitFunction(SqlBaseParser.FunctionContext ctx); /** * Visit a parse tree produced by the {@code subqueryExpression} * labeled alternative in {@link SqlBaseParser#primaryExpression}. @@ -402,6 +409,42 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitParenthesizedExpression(SqlBaseParser.ParenthesizedExpressionContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#castExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCastExpression(SqlBaseParser.CastExpressionContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#castTemplate}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitCastTemplate(SqlBaseParser.CastTemplateContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#extractExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtractExpression(SqlBaseParser.ExtractExpressionContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#extractTemplate}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitExtractTemplate(SqlBaseParser.ExtractTemplateContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#functionExpression}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFunctionExpression(SqlBaseParser.FunctionExpressionContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#functionTemplate}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx); /** * Visit a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link SqlBaseParser#constant}. @@ -437,6 +480,34 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitParamLiteral(SqlBaseParser.ParamLiteralContext ctx); + /** + * Visit a parse tree produced by the {@code dateEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitDateEscapedLiteral(SqlBaseParser.DateEscapedLiteralContext ctx); + /** + * Visit a parse tree produced by the {@code timeEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTimeEscapedLiteral(SqlBaseParser.TimeEscapedLiteralContext ctx); + /** + * Visit a parse tree produced by the {@code timestampEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitTimestampEscapedLiteral(SqlBaseParser.TimestampEscapedLiteralContext ctx); + /** + * Visit a parse tree produced by the {@code guidEscapedLiteral} + * labeled alternative in {@link SqlBaseParser#constant}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitGuidEscapedLiteral(SqlBaseParser.GuidEscapedLiteralContext ctx); /** * Visit a parse tree produced by {@link SqlBaseParser#comparisonOperator}. * @param ctx the parse tree diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java index b7fe9178f911f..2824b5502a860 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlParser.java @@ -33,10 +33,13 @@ import java.util.Collections; import java.util.HashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.function.BiFunction; import java.util.function.Function; +import static java.lang.String.format; + public class SqlParser { private static final Logger log = Loggers.getLogger(SqlParser.class); @@ -102,16 +105,30 @@ private T invokeParser(String sql, List params, Function if (DEBUG) { debug(parser); + tokenStream.fill(); + + for (Token t : tokenStream.getTokens()) { + String symbolicName = SqlBaseLexer.VOCABULARY.getSymbolicName(t.getType()); + String literalName = SqlBaseLexer.VOCABULARY.getLiteralName(t.getType()); + log.info(format(Locale.ROOT, " %-15s '%s'", + symbolicName == null ? literalName : symbolicName, + t.getText())); + }; } ParserRuleContext tree = parseFunction.apply(parser); + if (DEBUG) { + log.info("Parse tree {} " + tree.toStringTree()); + } + return visitor.apply(new AstBuilder(paramTokens), tree); } private void debug(SqlBaseParser parser) { + // when debugging, use the exact prediction mode (needed for diagnostics as well) - parser.getInterpreter().setPredictionMode(PredictionMode.LL_EXACT_AMBIG_DETECTION); + parser.getInterpreter().setPredictionMode(PredictionMode.SLL); parser.addParseListener(parser.new TraceListener()); diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java index dd0456e9aefc8..e691aef8d3e61 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/planner/QueryTranslator.java @@ -414,6 +414,9 @@ protected QueryTranslation asQuery(BinaryExpression e, boolean onAggs) { FieldAttribute fa = (FieldAttribute) e.left(); inexact = fa.isInexact(); target = nameOf(inexact ? fa : fa.exactAttribute()); + } else { + throw new SqlIllegalArgumentException("Scalar function ({}) not allowed (yet) as arguments for LIKE", + Expressions.name(e.left())); } if (e instanceof Like) { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java index e8bb9368d69ad..9570eaf1b6a06 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/util/StringUtils.java @@ -57,6 +57,33 @@ public static String camelCaseToUnderscore(String string) { } return sb.toString().toUpperCase(Locale.ROOT); } + + //CAMEL_CASE to camelCase + public static String underscoreToLowerCamelCase(String string) { + if (!Strings.hasText(string)) { + return EMPTY; + } + StringBuilder sb = new StringBuilder(); + String s = string.trim().toLowerCase(Locale.ROOT); + + boolean previousCharWasUnderscore = false; + for (int i = 0; i < s.length(); i++) { + char ch = s.charAt(i); + if (ch == '_') { + previousCharWasUnderscore = true; + } + else { + if (previousCharWasUnderscore) { + sb.append(Character.toUpperCase(ch)); + previousCharWasUnderscore = false; + } + else { + sb.append(ch); + } + } + } + return sb.toString(); + } public static String nullAsEmpty(String string) { return string == null ? EMPTY : string; diff --git a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt index 8dae4f8c0d1d6..73a002c249f3c 100644 --- a/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt +++ b/x-pack/plugin/sql/src/main/resources/org/elasticsearch/xpack/sql/plugin/sql_whitelist.txt @@ -9,4 +9,14 @@ class org.elasticsearch.xpack.sql.expression.function.scalar.whitelist.InternalSqlScriptUtils { Integer dateTimeChrono(long, String, String) + Integer ascii(String) + Integer bitLength(String) + String character(Number) + Integer charLength(String) + String lcase(String) + String ucase(String) + Integer length(String) + String rtrim(String) + String ltrim(String) + String space(Number) } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java index 60875e0194a0c..dce665a97e95d 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/analysis/analyzer/VerifierErrorMessagesTests.java @@ -159,4 +159,14 @@ public void testGroupByOrderByScore() { assertEquals("1:44: Cannot order by non-grouped column [SCORE()], expected [int]", verify("SELECT int FROM test GROUP BY int ORDER BY SCORE()")); } + + public void testHavingOnColumn() { + assertEquals("1:42: Cannot filter HAVING on non-aggregate [int]; consider using WHERE instead", + verify("SELECT int FROM test GROUP BY int HAVING int > 2")); + } + + public void testHavingOnScalar() { + assertEquals("1:42: Cannot filter HAVING on non-aggregate [int]; consider using WHERE instead", + verify("SELECT int FROM test GROUP BY int HAVING 2 < ABS(int)")); + } } \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java new file mode 100644 index 0000000000000..dcfb8d278ff3f --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionProcessorTests.java @@ -0,0 +1,160 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.expression.function.scalar.string; + +import org.elasticsearch.common.io.stream.Writeable.Reader; +import org.elasticsearch.test.AbstractWireSerializingTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.function.scalar.string.StringProcessor.StringOperation; + +import java.io.IOException; + +public class StringFunctionProcessorTests extends AbstractWireSerializingTestCase { + public static StringProcessor randomStringFunctionProcessor() { + return new StringProcessor(randomFrom(StringOperation.values())); + } + + @Override + protected StringProcessor createTestInstance() { + return randomStringFunctionProcessor(); + } + + @Override + protected Reader instanceReader() { + return StringProcessor::new; + } + + @Override + protected StringProcessor mutateInstance(StringProcessor instance) throws IOException { + return new StringProcessor(randomValueOtherThan(instance.processor(), () -> randomFrom(StringOperation.values()))); + } + + private void stringCharInputValidation(StringProcessor proc) { + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, () -> proc.process(123)); + assertEquals("A string/char is required; received [123]", siae.getMessage()); + } + + private void numericInputValidation(StringProcessor proc) { + SqlIllegalArgumentException siae = expectThrows(SqlIllegalArgumentException.class, () -> proc.process("A")); + assertEquals("A number is required; received [A]", siae.getMessage()); + } + + public void testAscii() { + StringProcessor proc = new StringProcessor(StringOperation.ASCII); + assertNull(proc.process(null)); + assertEquals(65, proc.process("A")); + // accepts chars as well + assertEquals(65, proc.process('A')); + assertEquals(65, proc.process("Alpha")); + // validate input + stringCharInputValidation(proc); + } + + public void testChar() { + StringProcessor proc = new StringProcessor(StringOperation.CHAR); + assertNull(proc.process(null)); + assertEquals("A", proc.process(65)); + assertNull(proc.process(256)); + assertNull(proc.process(-1)); + // validate input + numericInputValidation(proc); + } + + public void testLCase() { + StringProcessor proc = new StringProcessor(StringOperation.LCASE); + assertNull(proc.process(null)); + assertEquals("fulluppercase", proc.process("FULLUPPERCASE")); + assertEquals("someuppercase", proc.process("SomeUpPerCasE")); + assertEquals("fulllowercase", proc.process("fulllowercase")); + assertEquals("a", proc.process('A')); + + stringCharInputValidation(proc); + } + + public void testUCase() { + StringProcessor proc = new StringProcessor(StringOperation.UCASE); + assertNull(proc.process(null)); + assertEquals("FULLLOWERCASE", proc.process("fulllowercase")); + assertEquals("SOMELOWERCASE", proc.process("SomeLoweRCasE")); + assertEquals("FULLUPPERCASE", proc.process("FULLUPPERCASE")); + assertEquals("A", proc.process('a')); + + stringCharInputValidation(proc); + } + + public void testLength() { + StringProcessor proc = new StringProcessor(StringOperation.LENGTH); + assertNull(proc.process(null)); + assertEquals(7, proc.process("foo bar")); + assertEquals(0, proc.process("")); + assertEquals(0, proc.process(" ")); + assertEquals(7, proc.process("foo bar ")); + assertEquals(10, proc.process(" foo bar ")); + assertEquals(1, proc.process('f')); + + stringCharInputValidation(proc); + } + + public void testRTrim() { + StringProcessor proc = new StringProcessor(StringOperation.RTRIM); + assertNull(proc.process(null)); + assertEquals("foo bar", proc.process("foo bar")); + assertEquals("", proc.process("")); + assertEquals("", proc.process(" ")); + assertEquals("foo bar", proc.process("foo bar ")); + assertEquals(" foo bar", proc.process(" foo bar ")); + assertEquals("f", proc.process('f')); + + stringCharInputValidation(proc); + } + + public void testLTrim() { + StringProcessor proc = new StringProcessor(StringOperation.LTRIM); + assertNull(proc.process(null)); + assertEquals("foo bar", proc.process("foo bar")); + assertEquals("", proc.process("")); + assertEquals("", proc.process(" ")); + assertEquals("foo bar", proc.process(" foo bar")); + assertEquals("foo bar ", proc.process(" foo bar ")); + assertEquals("f", proc.process('f')); + + stringCharInputValidation(proc); + } + + public void testSpace() { + StringProcessor proc = new StringProcessor(StringOperation.SPACE); + int count = 7; + assertNull(proc.process(null)); + assertEquals(" ", proc.process(count)); + assertEquals(count, ((String) proc.process(count)).length()); + assertNotNull(proc.process(0)); + assertEquals("", proc.process(0)); + assertNull(proc.process(-1)); + + numericInputValidation(proc); + } + + public void testBitLength() { + StringProcessor proc = new StringProcessor(StringOperation.BIT_LENGTH); + assertNull(proc.process(null)); + assertEquals(56, proc.process("foo bar")); + assertEquals(0, proc.process("")); + assertEquals(8, proc.process('f')); + + stringCharInputValidation(proc); + } + + public void testCharLength() { + StringProcessor proc = new StringProcessor(StringOperation.CHAR_LENGTH); + assertNull(proc.process(null)); + assertEquals(7, proc.process("foo bar")); + assertEquals(0, proc.process("")); + assertEquals(1, proc.process('f')); + assertEquals(1, proc.process('€')); + + stringCharInputValidation(proc); + } +} diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java new file mode 100644 index 0000000000000..11ad24582efb6 --- /dev/null +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/EscapedFunctionsTests.java @@ -0,0 +1,237 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.sql.parser; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.sql.SqlIllegalArgumentException; +import org.elasticsearch.xpack.sql.expression.Expression; +import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.UnresolvedAttribute; +import org.elasticsearch.xpack.sql.expression.function.Function; +import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; +import org.elasticsearch.xpack.sql.expression.regex.Like; +import org.elasticsearch.xpack.sql.expression.regex.LikePattern; +import org.elasticsearch.xpack.sql.plan.logical.Limit; +import org.elasticsearch.xpack.sql.plan.logical.LogicalPlan; +import org.elasticsearch.xpack.sql.plan.logical.With; +import org.elasticsearch.xpack.sql.proto.SqlTypedParamValue; +import org.elasticsearch.xpack.sql.type.DataType; +import org.junit.Assert; + +import java.util.List; +import java.util.Locale; + +import static java.lang.String.format; +import static java.util.Arrays.asList; +import static org.hamcrest.Matchers.instanceOf; +import static org.hamcrest.Matchers.is; + +public class EscapedFunctionsTests extends ESTestCase { + + private final SqlParser parser = new SqlParser(); + + private Literal dateLiteral(String date) { + Expression exp = parser.createExpression(format(Locale.ROOT, "{d '%s'}", date)); + assertThat(exp, instanceOf(Expression.class)); + return (Literal) exp; + } + + private Literal timeLiteral(String date) { + Expression exp = parser.createExpression(format(Locale.ROOT, "{t '%s'}", date)); + assertThat(exp, instanceOf(Expression.class)); + return (Literal) exp; + } + + private Literal timestampLiteral(String date) { + Expression exp = parser.createExpression(format(Locale.ROOT, "{ts '%s'}", date)); + assertThat(exp, instanceOf(Expression.class)); + return (Literal) exp; + } + + private Literal guidLiteral(String date) { + Expression exp = parser.createExpression(format(Locale.ROOT, "{guid '%s'}", date)); + assertThat(exp, instanceOf(Expression.class)); + return (Literal) exp; + } + + private Limit limit(int limit) { + LogicalPlan plan = parser.createStatement(format(Locale.ROOT, "SELECT * FROM emp {limit %d}", limit)); + assertThat(plan, instanceOf(With.class)); + With with = (With) plan; + Limit limitPlan = (Limit) (with.child()); + assertThat(limitPlan.limit(), instanceOf(Literal.class)); + return limitPlan; + } + + private LikePattern likeEscape(String like, String character) { + Expression exp = parser.createExpression(format(Locale.ROOT, "exp LIKE '%s' {escape '%s'}", like, character)); + assertThat(exp, instanceOf(Like.class)); + return ((Like) exp).right(); + } + + private Function function(String name) { + Expression exp = parser.createExpression(format(Locale.ROOT, "{fn %s}", name)); + assertThat(exp, instanceOf(Function.class)); + return (Function) exp; + } + + public void testFunctionNoArg() { + Function f = function("SCORE()"); + assertEquals("SCORE", f.functionName()); + } + + public void testFunctionOneArg() { + Function f = function("ABS(foo)"); + assertEquals("ABS", f.functionName()); + assertEquals(1, f.arguments().size()); + Expression arg = f.arguments().get(0); + assertThat(arg, instanceOf(UnresolvedAttribute.class)); + UnresolvedAttribute ua = (UnresolvedAttribute) arg; + assertThat(ua.name(), is("foo")); + } + + public void testFunctionOneArgFunction() { + Function f = function("ABS({fn SCORE()})"); + assertEquals("ABS", f.functionName()); + assertEquals(1, f.arguments().size()); + Expression arg = f.arguments().get(0); + assertThat(arg, instanceOf(UnresolvedFunction.class)); + UnresolvedFunction uf = (UnresolvedFunction) arg; + assertThat(uf.name(), is("SCORE")); + } + + public void testFunctionFloorWithExtract() { + Function f = function("CAST({fn FLOOR({fn EXTRACT(YEAR FROM \"foo\")})} AS int)"); + assertEquals("CAST", f.functionName()); + assertEquals(1, f.arguments().size()); + Expression arg = f.arguments().get(0); + assertThat(arg, instanceOf(UnresolvedFunction.class)); + f = (Function) arg; + assertEquals("FLOOR", f.functionName()); + assertEquals(1, f.arguments().size()); + arg = f.arguments().get(0); + assertThat(arg, instanceOf(UnresolvedFunction.class)); + UnresolvedFunction uf = (UnresolvedFunction) arg; + assertThat(uf.name(), is("YEAR")); + } + + public void testFunctionWithFunctionWithArg() { + Function f = function("POWER(foo, {fn POWER({fn SCORE()}, {fN SCORE()})})"); + assertEquals("POWER", f.functionName()); + assertEquals(2, f.arguments().size()); + Expression arg = f.arguments().get(1); + assertThat(arg, instanceOf(UnresolvedFunction.class)); + UnresolvedFunction uf = (UnresolvedFunction) arg; + assertThat(uf.name(), is("POWER")); + assertEquals(2, uf.arguments().size()); + + List args = uf.arguments(); + arg = args.get(0); + assertThat(arg, instanceOf(UnresolvedFunction.class)); + uf = (UnresolvedFunction) arg; + assertThat(uf.name(), is("SCORE")); + + arg = args.get(1); + assertThat(arg, instanceOf(UnresolvedFunction.class)); + uf = (UnresolvedFunction) arg; + assertThat(uf.name(), is("SCORE")); + } + + public void testFunctionWithFunctionWithArgAndParams() { + Function f = (Function) parser.createExpression("POWER(?, {fn POWER({fn ABS(?)}, {fN ABS(?)})})", + asList(new SqlTypedParamValue(DataType.LONG, 1), + new SqlTypedParamValue(DataType.LONG, 1), + new SqlTypedParamValue(DataType.LONG, 1))); + + assertEquals("POWER", f.functionName()); + assertEquals(2, f.arguments().size()); + Expression arg = f.arguments().get(1); + assertThat(arg, instanceOf(UnresolvedFunction.class)); + UnresolvedFunction uf = (UnresolvedFunction) arg; + assertThat(uf.name(), is("POWER")); + assertEquals(2, uf.arguments().size()); + + List args = uf.arguments(); + arg = args.get(0); + assertThat(arg, instanceOf(UnresolvedFunction.class)); + uf = (UnresolvedFunction) arg; + assertThat(uf.name(), is("ABS")); + + arg = args.get(1); + assertThat(arg, instanceOf(UnresolvedFunction.class)); + uf = (UnresolvedFunction) arg; + assertThat(uf.name(), is("ABS")); + } + + public void testDateLiteral() { + Literal l = dateLiteral("2012-01-01"); + assertThat(l.dataType(), is(DataType.DATE)); + } + + public void testDateLiteralValidation() { + ParsingException ex = expectThrows(ParsingException.class, () -> dateLiteral("2012-13-01")); + assertEquals("line 1:2: Invalid date received; Cannot parse \"2012-13-01\": Value 13 for monthOfYear must be in the range [1,12]", + ex.getMessage()); + } + + public void testTimeLiteralUnsupported() { + SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> timeLiteral("10:10:10")); + assertThat(ex.getMessage(), is("Time (only) literals are not supported; a date component is required as well")); + } + + public void testTimeLiteralValidation() { + ParsingException ex = expectThrows(ParsingException.class, () -> timeLiteral("10:10:65")); + assertEquals("line 1:2: Invalid time received; Cannot parse \"10:10:65\": Value 65 for secondOfMinute must be in the range [0,59]", + ex.getMessage()); + } + + public void testTimestampLiteral() { + Literal l = timestampLiteral("2012-01-01 10:01:02.3456"); + assertThat(l.dataType(), is(DataType.DATE)); + } + + public void testTimestampLiteralValidation() { + ParsingException ex = expectThrows(ParsingException.class, () -> timestampLiteral("2012-01-01T10:01:02.3456")); + assertEquals( + "line 1:2: Invalid timestamp received; Invalid format: \"2012-01-01T10:01:02.3456\" is malformed at \"T10:01:02.3456\"", + ex.getMessage()); + } + + public void testGUID() { + Literal l = guidLiteral("12345678-90ab-cdef-0123-456789abcdef"); + assertThat(l.dataType(), is(DataType.KEYWORD)); + + l = guidLiteral("12345678-90AB-cdef-0123-456789ABCdef"); + assertThat(l.dataType(), is(DataType.KEYWORD)); + } + + public void testGUIDValidationHexa() { + ParsingException ex = expectThrows(ParsingException.class, () -> guidLiteral("12345678-90ab-cdef-0123-456789abcdeH")); + assertEquals("line 1:8: Invalid GUID, expected hexadecimal at offset[35], found [H]", ex.getMessage()); + } + + public void testGUIDValidationGroups() { + ParsingException ex = expectThrows(ParsingException.class, () -> guidLiteral("12345678A90ab-cdef-0123-456789abcdeH")); + assertEquals("line 1:8: Invalid GUID, expected group separator at offset [8], found [A]", ex.getMessage()); + } + + public void testGUIDValidationLength() { + ParsingException ex = expectThrows(ParsingException.class, () -> guidLiteral("12345678A90")); + assertEquals("line 1:8: Invalid GUID, too short", ex.getMessage()); + } + + + public void testLimit() { + Limit limit = limit(10); + Literal l = (Literal) limit.limit(); + Assert.assertThat(l.value(), is(10)); + } + + public void testLikeEscape() { + LikePattern pattern = likeEscape("|%tring", "|"); + assertThat(pattern.escape(), is('|')); + } +} \ No newline at end of file diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java index 2a3d87b65c964..71f4dab679c99 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/planner/QueryTranslatorTests.java @@ -139,4 +139,14 @@ public void testDateRangeCast() { assertEquals("date", rq.field()); assertEquals(DateTime.parse("1969-05-13T12:34:56Z"), rq.lower()); } + + public void testLikeConstructsNotSupported() { + LogicalPlan p = plan("SELECT LTRIM(keyword) lt FROM test WHERE LTRIM(keyword) LIKE '%a%'"); + assertTrue(p instanceof Project); + p = ((Project) p).child(); + assertTrue(p instanceof Filter); + Expression condition = ((Filter) p).condition(); + SqlIllegalArgumentException ex = expectThrows(SqlIllegalArgumentException.class, () -> QueryTranslator.toQuery(condition, false)); + assertEquals("Scalar function (LTRIM(keyword)) not allowed (yet) as arguments for LIKE", ex.getMessage()); + } } \ No newline at end of file diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java index e0687ee5d6316..3d2ea583eddf2 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java @@ -79,6 +79,10 @@ public class EmailService extends NotificationService { Setting.affixKeySetting("xpack.notification.email.account.", "smtp.local_address", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); + private static final Setting.AffixSetting SETTING_SMTP_SSL_TRUST_ADDRESS = + Setting.affixKeySetting("xpack.notification.email.account.", "smtp.ssl.trust", + (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope)); + private static final Setting.AffixSetting SETTING_SMTP_LOCAL_PORT = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.local_port", (key) -> Setting.intSetting(key, 25, Property.Dynamic, Property.NodeScope)); @@ -110,6 +114,7 @@ public EmailService(Settings settings, @Nullable CryptoService cryptoService, Cl clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_CONNECTION_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WRITE_TIMEOUT, (s, o) -> {}, (s, o) -> {}); + clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SSL_TRUST_ADDRESS, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_LOCAL_ADDRESS, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_LOCAL_PORT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SEND_PARTIAL, (s, o) -> {}, (s, o) -> {}); @@ -167,7 +172,7 @@ public static List> getSettings() { return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_PROFILE, SETTING_EMAIL_DEFAULTS, SETTING_SMTP_AUTH, SETTING_SMTP_HOST, SETTING_SMTP_PASSWORD, SETTING_SMTP_PORT, SETTING_SMTP_STARTTLS_ENABLE, SETTING_SMTP_USER, SETTING_SMTP_STARTTLS_REQUIRED, SETTING_SMTP_TIMEOUT, SETTING_SMTP_CONNECTION_TIMEOUT, SETTING_SMTP_WRITE_TIMEOUT, SETTING_SMTP_LOCAL_ADDRESS, - SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT); + SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT, SETTING_SMTP_SSL_TRUST_ADDRESS); } } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessage.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessage.java index 9f7cd36c9106a..ffff28ce862bd 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessage.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessage.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.xpack.watcher.common.text.TextTemplate; import org.elasticsearch.xpack.watcher.common.text.TextTemplateEngine; +import org.elasticsearch.common.Nullable; import java.io.IOException; import java.util.ArrayList; @@ -29,7 +30,11 @@ public class SlackMessage implements MessageElement { final String text; final Attachment[] attachments; - public SlackMessage(String from, String[] to, String icon, String text, Attachment[] attachments) { + public SlackMessage(String from, String[] to, String icon, @Nullable String text, @Nullable Attachment[] attachments) { + if(text == null && attachments == null) { + throw new IllegalArgumentException("Both text and attachments cannot be null."); + } + this.from = from; this.to = to; this.icon = icon; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailServiceTests.java index d2d4e585afbb7..88bc500f10a2d 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailServiceTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/EmailServiceTests.java @@ -68,6 +68,7 @@ public void testAccountSmtpPropertyConfiguration() { .put("xpack.notification.email.account.account4.smtp.local_port", "1025") .put("xpack.notification.email.account.account5.smtp.host", "localhost") .put("xpack.notification.email.account.account5.smtp.wait_on_quit", true) + .put("xpack.notification.email.account.account5.smtp.ssl.trust", "host1,host2,host3") .build(); EmailService emailService = new EmailService(settings, null, new ClusterSettings(Settings.EMPTY, new HashSet<>(EmailService.getSettings()))); @@ -100,5 +101,6 @@ public void testAccountSmtpPropertyConfiguration() { Account account5 = emailService.getAccount("account5"); Properties properties5 = account5.getConfig().smtp.properties; assertThat(properties5, hasEntry("mail.smtp.quitwait", "true")); + assertThat(properties5, hasEntry("mail.smtp.ssl.trust", "host1,host2,host3")); } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java index 0432fa41d864c..14d732064e5e0 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java @@ -49,7 +49,7 @@ public void testToXContent() throws Exception { } String icon = randomBoolean() ? null : randomAlphaOfLength(10); String text = randomBoolean() ? null : randomAlphaOfLength(50); - Attachment[] attachments = randomBoolean() ? null : new Attachment[randomIntBetween(0, 2)]; + Attachment[] attachments = (text != null && randomBoolean()) ? null : new Attachment[randomIntBetween(0, 2)]; if (attachments != null) { for (int i = 0; i < attachments.length; i++) { String fallback = randomBoolean() ? null : randomAlphaOfLength(10); @@ -461,6 +461,7 @@ public void testTemplateParseSelfGenerated() throws Exception { assertThat(parsed, equalTo(template)); } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31948") public void testTemplateRender() throws Exception { Settings settings = SlackMessageDefaultsTests.randomSettings(); SlackMessageDefaults defaults = new SlackMessageDefaults(settings); @@ -481,7 +482,7 @@ public void testTemplateRender() throws Exception { if (randomBoolean()) { templateBuilder.setText(randomAlphaOfLength(10)); } - if (randomBoolean()) { + if (templateBuilder.text == null || randomBoolean()) { int count = randomIntBetween(0, 3); for (int i = 0; i < count; i++) { Attachment.Template.Builder attachmentBuilder = createRandomAttachmentTemplateBuilder(); @@ -600,6 +601,22 @@ public void testUrlPathIsFiltered() throws Exception { } } + public void testCanHaveNullText() throws Exception { + SlackMessage slackMessage = new SlackMessage("from", new String[] {"to"}, "icon", null, new Attachment[1]); + assertNull(slackMessage.getText()); + assertNotNull(slackMessage.getAttachments()); + } + + public void testCanHaveNullAttachments() throws Exception { + SlackMessage slackMessage = new SlackMessage("from", new String[] {"to"}, "icon", "text", null); + assertNotNull(slackMessage.getText()); + assertNull(slackMessage.getAttachments()); + } + + public void testCannotHaveNullAttachmentsAndNullText() throws Exception { + expectThrows(IllegalArgumentException.class, () -> new SlackMessage("from", new String[]{"to"}, "icon", null, null)); + } + private static void writeFieldIfNotNull(XContentBuilder builder, String field, Object value) throws IOException { if (value != null) { builder.field(field, value); diff --git a/x-pack/protocol/LICENSE.txt b/x-pack/protocol/LICENSE.txt new file mode 100644 index 0000000000000..d645695673349 --- /dev/null +++ b/x-pack/protocol/LICENSE.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/x-pack/protocol/build.gradle b/x-pack/protocol/build.gradle new file mode 100644 index 0000000000000..7ca81c05e3947 --- /dev/null +++ b/x-pack/protocol/build.gradle @@ -0,0 +1,29 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +apply plugin: 'elasticsearch.build' + +description = 'Request and Response objects for x-pack that are used by the' + + ' high level rest client and x-pack itself' + +dependencies { + compileOnly "org.elasticsearch:elasticsearch:${version}" + + testCompile "org.elasticsearch.test:framework:${version}" +} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java similarity index 71% rename from x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequest.java rename to x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java index b3c88be93aaca..ce43b763e2313 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackInfoRequest.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoRequest.java @@ -1,9 +1,22 @@ /* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. */ -package org.elasticsearch.xpack.core.action; +package org.elasticsearch.protocol.xpack; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; @@ -14,6 +27,9 @@ import java.util.EnumSet; import java.util.Locale; +/** + * Fetch information about X-Pack from the cluster. + */ public class XPackInfoRequest extends ActionRequest { public enum Category { diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java new file mode 100644 index 0000000000000..3b9032f092185 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -0,0 +1,500 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.Version; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.ObjectParser.ValueType; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.EnumSet; +import java.util.HashMap; +import java.util.HashSet; +import java.util.List; +import java.util.Map; +import java.util.Objects; +import java.util.Set; +import java.util.concurrent.TimeUnit; +import java.util.stream.Collectors; + +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; +import static org.elasticsearch.common.xcontent.ConstructingObjectParser.optionalConstructorArg; + +public class XPackInfoResponse extends ActionResponse implements ToXContentObject { + /** + * Value of the license's expiration time if it should never expire. + */ + public static final long BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS = Long.MAX_VALUE - TimeUnit.HOURS.toMillis(24 * 365); + // TODO move this constant to License.java once we move License.java to the protocol jar + + @Nullable private BuildInfo buildInfo; + @Nullable private LicenseInfo licenseInfo; + @Nullable private FeatureSetsInfo featureSetsInfo; + + public XPackInfoResponse() {} + + public XPackInfoResponse(@Nullable BuildInfo buildInfo, @Nullable LicenseInfo licenseInfo, @Nullable FeatureSetsInfo featureSetsInfo) { + this.buildInfo = buildInfo; + this.licenseInfo = licenseInfo; + this.featureSetsInfo = featureSetsInfo; + } + + /** + * @return The build info (incl. build hash and timestamp) + */ + public BuildInfo getBuildInfo() { + return buildInfo; + } + + /** + * @return The current license info (incl. UID, type/mode. status and expiry date). May return {@code null} when no + * license is currently installed. + */ + public LicenseInfo getLicenseInfo() { + return licenseInfo; + } + + /** + * @return The current status of the feature sets in X-Pack. Feature sets describe the features available/enabled in X-Pack. + */ + public FeatureSetsInfo getFeatureSetsInfo() { + return featureSetsInfo; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeOptionalWriteable(buildInfo); + out.writeOptionalWriteable(licenseInfo); + out.writeOptionalWriteable(featureSetsInfo); + } + + @Override + public void readFrom(StreamInput in) throws IOException { + this.buildInfo = in.readOptionalWriteable(BuildInfo::new); + this.licenseInfo = in.readOptionalWriteable(LicenseInfo::new); + this.featureSetsInfo = in.readOptionalWriteable(FeatureSetsInfo::new); + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + XPackInfoResponse rhs = (XPackInfoResponse) other; + return Objects.equals(buildInfo, rhs.buildInfo) + && Objects.equals(licenseInfo, rhs.licenseInfo) + && Objects.equals(featureSetsInfo, rhs.featureSetsInfo); + } + + @Override + public int hashCode() { + return Objects.hash(buildInfo, licenseInfo, featureSetsInfo); + } + + @Override + public String toString() { + return Strings.toString(this, true, false); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "xpack_info_response", true, (a, v) -> { + BuildInfo buildInfo = (BuildInfo) a[0]; + LicenseInfo licenseInfo = (LicenseInfo) a[1]; + @SuppressWarnings("unchecked") // This is how constructing object parser works + List featureSets = (List) a[2]; + FeatureSetsInfo featureSetsInfo = featureSets == null ? null : new FeatureSetsInfo(new HashSet<>(featureSets)); + return new XPackInfoResponse(buildInfo, licenseInfo, featureSetsInfo); + }); + static { + PARSER.declareObject(optionalConstructorArg(), BuildInfo.PARSER, new ParseField("build")); + /* + * licenseInfo is sort of "double optional" because it is + * optional but it can also be send as `null`. + */ + PARSER.declareField(optionalConstructorArg(), (p, v) -> { + if (p.currentToken() == XContentParser.Token.VALUE_NULL) { + return null; + } + return LicenseInfo.PARSER.parse(p, v); + }, + new ParseField("license"), ValueType.OBJECT_OR_NULL); + PARSER.declareNamedObjects(optionalConstructorArg(), + (p, c, name) -> FeatureSetsInfo.FeatureSet.PARSER.parse(p, name), + new ParseField("features")); + } + public static XPackInfoResponse fromXContent(XContentParser parser) throws IOException { + return PARSER.parse(parser, null); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + + if (buildInfo != null) { + builder.field("build", buildInfo, params); + } + + EnumSet categories = XPackInfoRequest.Category + .toSet(Strings.splitStringByCommaToArray(params.param("categories", "_all"))); + if (licenseInfo != null) { + builder.field("license", licenseInfo, params); + } else if (categories.contains(XPackInfoRequest.Category.LICENSE)) { + // if the user requested the license info, and there is no license, we should send + // back an explicit null value (indicating there is no license). This is different + // than not adding the license info at all + builder.nullField("license"); + } + + if (featureSetsInfo != null) { + builder.field("features", featureSetsInfo, params); + } + + if (params.paramAsBoolean("human", true)) { + builder.field("tagline", "You know, for X"); + } + + return builder.endObject(); + } + + public static class LicenseInfo implements ToXContentObject, Writeable { + private final String uid; + private final String type; + private final String mode; + private final LicenseStatus status; + private final long expiryDate; + + public LicenseInfo(String uid, String type, String mode, LicenseStatus status, long expiryDate) { + this.uid = uid; + this.type = type; + this.mode = mode; + this.status = status; + this.expiryDate = expiryDate; + } + + public LicenseInfo(StreamInput in) throws IOException { + this(in.readString(), in.readString(), in.readString(), LicenseStatus.readFrom(in), in.readLong()); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(uid); + out.writeString(type); + out.writeString(mode); + status.writeTo(out); + out.writeLong(expiryDate); + } + + public String getUid() { + return uid; + } + + public String getType() { + return type; + } + + public String getMode() { + return mode; + } + + public long getExpiryDate() { + return expiryDate; + } + + public LicenseStatus getStatus() { + return status; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + LicenseInfo rhs = (LicenseInfo) other; + return Objects.equals(uid, rhs.uid) + && Objects.equals(type, rhs.type) + && Objects.equals(mode, rhs.mode) + && Objects.equals(status, rhs.status) + && expiryDate == rhs.expiryDate; + } + + @Override + public int hashCode() { + return Objects.hash(uid, type, mode, status, expiryDate); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "license_info", true, (a, v) -> { + String uid = (String) a[0]; + String type = (String) a[1]; + String mode = (String) a[2]; + LicenseStatus status = LicenseStatus.fromString((String) a[3]); + Long expiryDate = (Long) a[4]; + long primitiveExpiryDate = expiryDate == null ? BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS : expiryDate; + return new LicenseInfo(uid, type, mode, status, primitiveExpiryDate); + }); + static { + PARSER.declareString(constructorArg(), new ParseField("uid")); + PARSER.declareString(constructorArg(), new ParseField("type")); + PARSER.declareString(constructorArg(), new ParseField("mode")); + PARSER.declareString(constructorArg(), new ParseField("status")); + PARSER.declareLong(optionalConstructorArg(), new ParseField("expiry_date_in_millis")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject() + .field("uid", uid) + .field("type", type) + .field("mode", mode) + .field("status", status.label()); + if (expiryDate != BASIC_SELF_GENERATED_LICENSE_EXPIRATION_MILLIS) { + builder.timeField("expiry_date_in_millis", "expiry_date", expiryDate); + } + return builder.endObject(); + } + } + + public static class BuildInfo implements ToXContentObject, Writeable { + private final String hash; + private final String timestamp; + + public BuildInfo(String hash, String timestamp) { + this.hash = hash; + this.timestamp = timestamp; + } + + public BuildInfo(StreamInput input) throws IOException { + this(input.readString(), input.readString()); + } + + @Override + public void writeTo(StreamOutput output) throws IOException { + output.writeString(hash); + output.writeString(timestamp); + } + + public String getHash() { + return hash; + } + + public String getTimestamp() { + return timestamp; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + BuildInfo rhs = (BuildInfo) other; + return Objects.equals(hash, rhs.hash) + && Objects.equals(timestamp, rhs.timestamp); + } + + @Override + public int hashCode() { + return Objects.hash(hash, timestamp); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "build_info", true, (a, v) -> new BuildInfo((String) a[0], (String) a[1])); + static { + PARSER.declareString(constructorArg(), new ParseField("hash")); + PARSER.declareString(constructorArg(), new ParseField("date")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + return builder.startObject() + .field("hash", hash) + .field("date", timestamp) + .endObject(); + } + } + + public static class FeatureSetsInfo implements ToXContentObject, Writeable { + private final Map featureSets; + + public FeatureSetsInfo(Set featureSets) { + Map map = new HashMap<>(featureSets.size()); + for (FeatureSet featureSet : featureSets) { + map.put(featureSet.name, featureSet); + } + this.featureSets = Collections.unmodifiableMap(map); + } + + public FeatureSetsInfo(StreamInput in) throws IOException { + int size = in.readVInt(); + Map featureSets = new HashMap<>(size); + for (int i = 0; i < size; i++) { + FeatureSet featureSet = new FeatureSet(in); + featureSets.put(featureSet.name, featureSet); + } + this.featureSets = Collections.unmodifiableMap(featureSets); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeVInt(featureSets.size()); + for (FeatureSet featureSet : featureSets.values()) { + featureSet.writeTo(out); + } + } + + public Map getFeatureSets() { + return featureSets; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + FeatureSetsInfo rhs = (FeatureSetsInfo) other; + return Objects.equals(featureSets, rhs.featureSets); + } + + @Override + public int hashCode() { + return Objects.hash(featureSets); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + List names = new ArrayList<>(this.featureSets.keySet()).stream().sorted().collect(Collectors.toList()); + for (String name : names) { + builder.field(name, featureSets.get(name), params); + } + return builder.endObject(); + } + + public static class FeatureSet implements ToXContentObject, Writeable { + private final String name; + @Nullable private final String description; + private final boolean available; + private final boolean enabled; + @Nullable private final Map nativeCodeInfo; + + public FeatureSet(String name, @Nullable String description, boolean available, boolean enabled, + @Nullable Map nativeCodeInfo) { + this.name = name; + this.description = description; + this.available = available; + this.enabled = enabled; + this.nativeCodeInfo = nativeCodeInfo; + } + + public FeatureSet(StreamInput in) throws IOException { + this(in.readString(), in.readOptionalString(), in.readBoolean(), in.readBoolean(), + in.getVersion().onOrAfter(Version.V_5_4_0) ? in.readMap() : null); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(name); + out.writeOptionalString(description); + out.writeBoolean(available); + out.writeBoolean(enabled); + if (out.getVersion().onOrAfter(Version.V_5_4_0)) { + out.writeMap(nativeCodeInfo); + } + } + + public String name() { + return name; + } + + @Nullable + public String description() { + return description; + } + + public boolean available() { + return available; + } + + public boolean enabled() { + return enabled; + } + + @Nullable + public Map nativeCodeInfo() { + return nativeCodeInfo; + } + + @Override + public boolean equals(Object other) { + if (other == null || other.getClass() != getClass()) return false; + if (this == other) return true; + FeatureSet rhs = (FeatureSet) other; + return Objects.equals(name, rhs.name) + && Objects.equals(description, rhs.description) + && available == rhs.available + && enabled == rhs.enabled + && Objects.equals(nativeCodeInfo, rhs.nativeCodeInfo); + } + + @Override + public int hashCode() { + return Objects.hash(name, description, available, enabled, nativeCodeInfo); + } + + private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( + "feature_set", true, (a, name) -> { + String description = (String) a[0]; + boolean available = (Boolean) a[1]; + boolean enabled = (Boolean) a[2]; + @SuppressWarnings("unchecked") // Matches up with declaration below + Map nativeCodeInfo = (Map) a[3]; + return new FeatureSet(name, description, available, enabled, nativeCodeInfo); + }); + static { + PARSER.declareString(optionalConstructorArg(), new ParseField("description")); + PARSER.declareBoolean(constructorArg(), new ParseField("available")); + PARSER.declareBoolean(constructorArg(), new ParseField("enabled")); + PARSER.declareObject(optionalConstructorArg(), (p, name) -> p.map(), new ParseField("native_code_info")); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + if (description != null) { + builder.field("description", description); + } + builder.field("available", available); + builder.field("enabled", enabled); + if (nativeCodeInfo != null) { + builder.field("native_code_info", nativeCodeInfo); + } + return builder.endObject(); + } + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java new file mode 100644 index 0000000000000..ea3e4f8a8965b --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/LicenseStatus.java @@ -0,0 +1,67 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.io.IOException; + +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; + +/** + * Status of an X-Pack license. + */ +public enum LicenseStatus implements Writeable { + + ACTIVE("active"), + INVALID("invalid"), + EXPIRED("expired"); + + private final String label; + + LicenseStatus(String label) { + this.label = label; + } + + public String label() { + return label; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeString(label); + } + + public static LicenseStatus readFrom(StreamInput in) throws IOException { + return fromString(in.readString()); + } + + public static LicenseStatus fromString(String value) { + switch (value) { + case "active": + return ACTIVE; + case "invalid": + return INVALID; + case "expired": + return EXPIRED; + default: + throw new IllegalArgumentException("unknown license status [" + value + "]"); + } + } +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java new file mode 100644 index 0000000000000..ca859f29e440b --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/license/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Request and Response objects for the default distribution's License + * APIs. + */ +package org.elasticsearch.protocol.xpack.license; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/package-info.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/package-info.java new file mode 100644 index 0000000000000..fab18ccc637e8 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/package-info.java @@ -0,0 +1,23 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Request and Response objects for miscellaneous X-Pack APIs. + */ +package org.elasticsearch.protocol.xpack; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java new file mode 100644 index 0000000000000..216990d9f0ec0 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Request and Response objects for the default distribution's Security + * APIs. + */ +package org.elasticsearch.protocol.xpack.security; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java new file mode 100644 index 0000000000000..d34fd598ab170 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/watcher/package-info.java @@ -0,0 +1,24 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/** + * Request and Response objects for the default distribution's Watcher + * APIs. + */ +package org.elasticsearch.protocol.xpack.watcher; diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java new file mode 100644 index 0000000000000..820b8200ed940 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/XPackInfoResponseTests.java @@ -0,0 +1,160 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; +import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo.FeatureSet; +import org.elasticsearch.protocol.xpack.license.LicenseStatus; +import org.elasticsearch.test.AbstractStreamableXContentTestCase; + +import java.util.HashMap; +import java.util.HashSet; +import java.util.Map; +import java.util.Set; +import java.util.function.Function; +import java.util.function.Predicate; +import java.io.IOException; + +public class XPackInfoResponseTests extends AbstractStreamableXContentTestCase { + @Override + protected XPackInfoResponse doParseInstance(XContentParser parser) throws IOException { + return XPackInfoResponse.fromXContent(parser); + } + + @Override + protected XPackInfoResponse createBlankInstance() { + return new XPackInfoResponse(); + } + + @Override + protected Predicate getRandomFieldsExcludeFilter() { + return path -> path.equals("features") + || (path.startsWith("features") && path.endsWith("native_code_info")); + } + + @Override + protected ToXContent.Params getToXContentParams() { + Map params = new HashMap<>(); + if (randomBoolean()) { + params.put("human", randomBoolean() ? "true" : "false"); + } + if (randomBoolean()) { + params.put("categories", "_none"); + } + return new ToXContent.MapParams(params); + } + + @Override + protected XPackInfoResponse createTestInstance() { + return new XPackInfoResponse( + randomBoolean() ? null : randomBuildInfo(), + randomBoolean() ? null : randomLicenseInfo(), + randomBoolean() ? null : randomFeatureSetsInfo()); + } + + @Override + protected XPackInfoResponse mutateInstance(XPackInfoResponse response) { + @SuppressWarnings("unchecked") + Function mutator = randomFrom( + r -> new XPackInfoResponse( + mutateBuildInfo(r.getBuildInfo()), + r.getLicenseInfo(), + r.getFeatureSetsInfo()), + r -> new XPackInfoResponse( + r.getBuildInfo(), + mutateLicenseInfo(r.getLicenseInfo()), + r.getFeatureSetsInfo()), + r -> new XPackInfoResponse( + r.getBuildInfo(), + r.getLicenseInfo(), + mutateFeatureSetsInfo(r.getFeatureSetsInfo()))); + return mutator.apply(response); + } + + private BuildInfo randomBuildInfo() { + return new BuildInfo( + randomAlphaOfLength(10), + randomAlphaOfLength(15)); + } + + private BuildInfo mutateBuildInfo(BuildInfo buildInfo) { + if (buildInfo == null) { + return randomBuildInfo(); + } + return null; + } + + private LicenseInfo randomLicenseInfo() { + return new LicenseInfo( + randomAlphaOfLength(10), + randomAlphaOfLength(4), + randomAlphaOfLength(5), + randomFrom(LicenseStatus.values()), + randomLong()); + } + + private LicenseInfo mutateLicenseInfo(LicenseInfo licenseInfo) { + if (licenseInfo == null) { + return randomLicenseInfo(); + } + return null; + } + + private FeatureSetsInfo randomFeatureSetsInfo() { + int size = between(0, 10); + Set featureSets = new HashSet<>(size); + while (featureSets.size() < size) { + featureSets.add(randomFeatureSet()); + } + return new FeatureSetsInfo(featureSets); + } + + private FeatureSetsInfo mutateFeatureSetsInfo(FeatureSetsInfo featureSetsInfo) { + if (featureSetsInfo == null) { + return randomFeatureSetsInfo(); + } + return null; + } + + private FeatureSet randomFeatureSet() { + return new FeatureSet( + randomAlphaOfLength(5), + randomBoolean() ? null : randomAlphaOfLength(20), + randomBoolean(), + randomBoolean(), + randomNativeCodeInfo()); + } + + private Map randomNativeCodeInfo() { + if (randomBoolean()) { + return null; + } + int size = between(0, 10); + Map nativeCodeInfo = new HashMap<>(size); + while (nativeCodeInfo.size() < size) { + nativeCodeInfo.put(randomAlphaOfLength(5), randomAlphaOfLength(5)); + } + return nativeCodeInfo; + } +} diff --git a/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java new file mode 100644 index 0000000000000..c256e7562f7f8 --- /dev/null +++ b/x-pack/protocol/src/test/java/org/elasticsearch/protocol/xpack/license/LicenseStatusTests.java @@ -0,0 +1,30 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack.license; + +import java.io.IOException; + +import org.elasticsearch.test.ESTestCase; + +public class LicenseStatusTests extends ESTestCase { + public void testSerialization() throws IOException { + LicenseStatus status = randomFrom(LicenseStatus.values()); + assertSame(status, copyWriteable(status, writableRegistry(), LicenseStatus::readFrom)); + } +} diff --git a/x-pack/qa/ml-native-multi-node-tests/build.gradle b/x-pack/qa/ml-native-multi-node-tests/build.gradle new file mode 100644 index 0000000000000..286d4daee8aa5 --- /dev/null +++ b/x-pack/qa/ml-native-multi-node-tests/build.gradle @@ -0,0 +1,85 @@ +import org.elasticsearch.gradle.LoggedExec + +apply plugin: 'elasticsearch.standalone-rest-test' +apply plugin: 'elasticsearch.rest-test' + +dependencies { + testCompile project(path: xpackModule('core'), configuration: 'runtime') + testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') + testCompile project(path: xpackModule('ml'), configuration: 'runtime') + testCompile project(path: xpackModule('ml'), configuration: 'testArtifacts') +} + +integTestRunner { + /* + * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each + * other if we allow them to set the number of available processors as it's set-once in Netty. + */ + systemProperty 'es.set.netty.runtime.available.processors', 'false' +} + +// location of generated keystores and certificates +File keystoreDir = new File(project.buildDir, 'keystore') + +// Generate the node's keystore +File nodeKeystore = new File(keystoreDir, 'test-node.jks') +task createNodeKeyStore(type: LoggedExec) { + doFirst { + if (nodeKeystore.parentFile.exists() == false) { + nodeKeystore.parentFile.mkdirs() + } + if (nodeKeystore.exists()) { + delete nodeKeystore + } + } + executable = new File(project.runtimeJavaHome, 'bin/keytool') + standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) + args '-genkey', + '-alias', 'test-node', + '-keystore', nodeKeystore, + '-keyalg', 'RSA', + '-keysize', '2048', + '-validity', '712', + '-dname', 'CN=smoke-test-plugins-ssl', + '-keypass', 'keypass', + '-storepass', 'keypass' +} + +// Add keystores to test classpath: it expects it there +sourceSets.test.resources.srcDir(keystoreDir) +processTestResources.dependsOn(createNodeKeyStore) + +integTestCluster { + dependsOn createNodeKeyStore + setting 'xpack.security.enabled', 'true' + setting 'xpack.ml.enabled', 'true' + setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE' + setting 'xpack.monitoring.enabled', 'false' + setting 'xpack.security.authc.token.enabled', 'true' + setting 'xpack.security.transport.ssl.enabled', 'true' + setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name + setting 'xpack.security.transport.ssl.verification_mode', 'certificate' + setting 'xpack.security.audit.enabled', 'true' + setting 'xpack.license.self_generated.type', 'trial' + + keystoreSetting 'bootstrap.password', 'x-pack-test-password' + keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' + + numNodes = 3 + + setupCommand 'setupDummyUser', + 'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser' + + extraConfigFile nodeKeystore.name, nodeKeystore + + waitCondition = { node, ant -> + File tmpFile = new File(node.cwd, 'wait.success') + ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", + dest: tmpFile.toString(), + username: 'x_pack_rest_user', + password: 'x-pack-test-password', + ignoreerrors: true, + retries: 10) + return tmpFile.exists() + } +} diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java similarity index 97% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java index e9ba002779e37..2a332fb05d555 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java @@ -37,7 +37,6 @@ public void cleanUpTest() { cleanUp(); } - @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pulls/122") public void testTooManyPartitions() throws Exception { Detector.Builder detector = new Detector.Builder("count", null); detector.setPartitionFieldName("user"); @@ -63,7 +62,7 @@ public void testTooManyPartitions() throws Exception { long timestamp = now - 8 * bucketSpan.seconds(); List data = new ArrayList<>(); while (timestamp < now) { - for (int i = 0; i < 10000; i++) { + for (int i = 0; i < 11000; i++) { // It's important that the values used here are either always represented in less than 16 UTF-8 bytes or // always represented in more than 22 UTF-8 bytes. Otherwise platform differences in when the small string // optimisation is used will make the results of this test very different for the different platforms. @@ -83,7 +82,6 @@ public void testTooManyPartitions() throws Exception { assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pulls/122") public void testTooManyByFields() throws Exception { Detector.Builder detector = new Detector.Builder("count", null); detector.setByFieldName("user"); @@ -129,7 +127,6 @@ public void testTooManyByFields() throws Exception { assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pulls/122") public void testTooManyByAndOverFields() throws Exception { Detector.Builder detector = new Detector.Builder("count", null); detector.setByFieldName("department"); @@ -179,7 +176,6 @@ public void testTooManyByAndOverFields() throws Exception { assertThat(modelSizeStats.getMemoryStatus(), equalTo(ModelSizeStats.MemoryStatus.HARD_LIMIT)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/ml-cpp/pulls/122") public void testManyDistinctOverFields() throws Exception { Detector.Builder detector = new Detector.Builder("sum", "value"); detector.setOverFieldName("user"); diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/BasicRenormalizationIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/CategorizationIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DatafeedJobsRestIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DeleteExpiredDataIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/DetectionRulesIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ForecastIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/InterimResultsDeletedAfterReopeningJobIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java similarity index 96% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java index 4e6fb03497e6a..c9956e6092fc5 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlNativeAutodetectIntegTestCase.java @@ -6,6 +6,7 @@ package org.elasticsearch.xpack.ml.integration; import org.elasticsearch.action.admin.cluster.node.tasks.list.ListTasksRequest; +import org.elasticsearch.action.get.GetResponse; import org.elasticsearch.action.search.SearchResponse; import org.elasticsearch.client.Client; import org.elasticsearch.cluster.ClusterModule; @@ -342,21 +343,17 @@ protected void waitForecastToFinish(String jobId, String forecastId) throws Exce } protected ForecastRequestStats getForecastStats(String jobId, String forecastId) { - SearchResponse searchResponse = client().prepareSearch(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)) - .setQuery(QueryBuilders.boolQuery() - .filter(QueryBuilders.termQuery(Result.RESULT_TYPE.getPreferredName(), ForecastRequestStats.RESULT_TYPE_VALUE)) - .filter(QueryBuilders.termQuery(Job.ID.getPreferredName(), jobId)) - .filter(QueryBuilders.termQuery(ForecastRequestStats.FORECAST_ID.getPreferredName(), forecastId))) + GetResponse getResponse = client().prepareGet() + .setIndex(AnomalyDetectorsIndex.jobResultsAliasedName(jobId)) + .setId(ForecastRequestStats.documentId(jobId, forecastId)) .execute().actionGet(); - SearchHits hits = searchResponse.getHits(); - if (hits.getTotalHits() == 0) { + + if (getResponse.isExists() == false) { return null; } - assertThat(hits.getTotalHits(), equalTo(1L)); - try { - XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser( + try (XContentParser parser = XContentFactory.xContent(XContentType.JSON).createParser( NamedXContentRegistry.EMPTY, DeprecationHandler.THROW_UNSUPPORTED_OPERATION, - hits.getHits()[0].getSourceRef().streamInput()); + getResponse.getSourceAsBytesRef().streamInput())) { return ForecastRequestStats.STRICT_PARSER.apply(parser, null); } catch (IOException e) { throw new IllegalStateException(e); diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ModelPlotsIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/OverallBucketsIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/PersistJobIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ReopenJobWithGapIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java similarity index 91% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java index c5bc7c4ed1426..9ff80bc739b16 100644 --- a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java @@ -23,7 +23,11 @@ import java.util.Map; import java.util.stream.Collectors; +import static org.hamcrest.Matchers.anyOf; +import static org.hamcrest.Matchers.empty; import static org.hamcrest.Matchers.equalTo; +import static org.hamcrest.Matchers.greaterThan; +import static org.hamcrest.Matchers.nullValue; /** * This test aims to catch regressions where, @@ -69,7 +73,9 @@ public void test() throws Exception { String forecastId = forecast(job.getId(), TimeValue.timeValueHours(3), null); waitForecastToFinish(job.getId(), forecastId); ForecastRequestStats forecastStats = getForecastStats(job.getId(), forecastId); - assertThat(forecastStats.getStatus(), equalTo(ForecastRequestStats.ForecastRequestStatus.FINISHED)); + assertThat(forecastStats.getMessages(), anyOf(nullValue(), empty())); + assertThat(forecastStats.getMemoryUsage(), greaterThan(0L)); + assertEquals(forecastStats.getRecordCount(), 3L); closeJob(job.getId()); diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RevertModelSnapshotIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/ScheduledEventsIT.java diff --git a/x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java similarity index 100% rename from x-pack/qa/ml-native-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java rename to x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/UpdateInterimResultsIT.java diff --git a/x-pack/qa/ml-native-tests/build.gradle b/x-pack/qa/ml-native-tests/build.gradle deleted file mode 100644 index 657aa7cfef68b..0000000000000 --- a/x-pack/qa/ml-native-tests/build.gradle +++ /dev/null @@ -1,84 +0,0 @@ -import org.elasticsearch.gradle.LoggedExec - -apply plugin: 'elasticsearch.standalone-rest-test' -apply plugin: 'elasticsearch.rest-test' - -dependencies { - testCompile project(path: xpackModule('core'), configuration: 'runtime') - testCompile project(path: xpackModule('core'), configuration: 'testArtifacts') - testCompile project(path: xpackModule('ml'), configuration: 'runtime') - testCompile project(path: xpackModule('ml'), configuration: 'testArtifacts') -} - -integTestRunner { - /* - * We have to disable setting the number of available processors as tests in the same JVM randomize processors and will step on each - * other if we allow them to set the number of available processors as it's set-once in Netty. - */ - systemProperty 'es.set.netty.runtime.available.processors', 'false' -} - -// location of generated keystores and certificates -File keystoreDir = new File(project.buildDir, 'keystore') - -// Generate the node's keystore -File nodeKeystore = new File(keystoreDir, 'test-node.jks') -task createNodeKeyStore(type: LoggedExec) { - doFirst { - if (nodeKeystore.parentFile.exists() == false) { - nodeKeystore.parentFile.mkdirs() - } - if (nodeKeystore.exists()) { - delete nodeKeystore - } - } - executable = new File(project.runtimeJavaHome, 'bin/keytool') - standardInput = new ByteArrayInputStream('FirstName LastName\nUnit\nOrganization\nCity\nState\nNL\nyes\n\n'.getBytes('UTF-8')) - args '-genkey', - '-alias', 'test-node', - '-keystore', nodeKeystore, - '-keyalg', 'RSA', - '-keysize', '2048', - '-validity', '712', - '-dname', 'CN=smoke-test-plugins-ssl', - '-keypass', 'keypass', - '-storepass', 'keypass' -} - -// Add keystores to test classpath: it expects it there -sourceSets.test.resources.srcDir(keystoreDir) -processTestResources.dependsOn(createNodeKeyStore) - -integTestCluster { - dependsOn createNodeKeyStore - setting 'xpack.security.enabled', 'true' - setting 'xpack.ml.enabled', 'true' - setting 'logger.org.elasticsearch.xpack.ml.datafeed', 'TRACE' - setting 'xpack.monitoring.enabled', 'false' - setting 'xpack.security.authc.token.enabled', 'true' - setting 'xpack.security.transport.ssl.enabled', 'true' - setting 'xpack.security.transport.ssl.keystore.path', nodeKeystore.name - setting 'xpack.security.transport.ssl.verification_mode', 'certificate' - setting 'xpack.security.audit.enabled', 'true' - setting 'xpack.license.self_generated.type', 'trial' - setting 'xpack.ml.min_disk_space_off_heap', '200mb' - - keystoreSetting 'bootstrap.password', 'x-pack-test-password' - keystoreSetting 'xpack.security.transport.ssl.keystore.secure_password', 'keypass' - - setupCommand 'setupDummyUser', - 'bin/elasticsearch-users', 'useradd', 'x_pack_rest_user', '-p', 'x-pack-test-password', '-r', 'superuser' - - extraConfigFile nodeKeystore.name, nodeKeystore - - waitCondition = { node, ant -> - File tmpFile = new File(node.cwd, 'wait.success') - ant.get(src: "http://${node.httpUri()}/_cluster/health?wait_for_nodes=>=${numNodes}&wait_for_status=yellow", - dest: tmpFile.toString(), - username: 'x_pack_rest_user', - password: 'x-pack-test-password', - ignoreerrors: true, - retries: 10) - return tmpFile.exists() - } -} diff --git a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml index 3dd1f708959ed..66c92797eef2a 100644 --- a/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml +++ b/x-pack/qa/rolling-upgrade/src/test/resources/rest-api-spec/test/mixed_cluster/10_basic.yml @@ -6,7 +6,10 @@ wait_for_active_shards: all body: settings: - number_of_replicas: 0 +# we use 1 replica to make sure we don't have shards relocating. Relocating a shard with +# a scroll on it prevents shards from moving back into a where a scroll is running (it holds the shard lock) +# see https://github.com/elastic/elasticsearch/issues/31827 + number_of_replicas: 1 index.routing.allocation.include.upgraded: true - do: diff --git a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java index c427d8bf32c86..52c2a7dfa2d2d 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java +++ b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java @@ -127,6 +127,7 @@ private boolean getMonitoringUsageExportersDefined() throws Exception { return monitoringUsage.get().getExporters().isEmpty() == false; } + @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31940") public void testHTTPExporterWithSSL() throws Exception { // Ensures that the exporter is actually on assertBusy(() -> assertThat("[_http] exporter is not defined", getMonitoringUsageExportersDefined(), is(true))); diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java index 2605f6c27ce9b..32d1a67e5620f 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/cli/ShowTestCase.java @@ -48,6 +48,9 @@ public void testShowFunctionsLikePrefix() throws IOException { assertThat(readLine(), containsString("----------")); assertThat(readLine(), RegexMatcher.matches("\\s*LOG\\s*\\|\\s*SCALAR\\s*")); assertThat(readLine(), RegexMatcher.matches("\\s*LOG10\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*LCASE\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*LENGTH\\s*\\|\\s*SCALAR\\s*")); + assertThat(readLine(), RegexMatcher.matches("\\s*LTRIM\\s*\\|\\s*SCALAR\\s*")); assertEquals("", readLine()); } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java index 99e8432370471..4aa599290e6e1 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/CsvSpecTestCase.java @@ -37,6 +37,7 @@ public static List readScriptSpec() throws Exception { tests.addAll(readScriptSpec("/alias.csv-spec", parser)); tests.addAll(readScriptSpec("/nulls.csv-spec", parser)); tests.addAll(readScriptSpec("/nested.csv-spec", parser)); + tests.addAll(readScriptSpec("/functions.csv-spec", parser)); return tests; } diff --git a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java index 3b5cae742d34b..b782e1474ea85 100644 --- a/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java +++ b/x-pack/qa/sql/src/main/java/org/elasticsearch/xpack/qa/sql/jdbc/SqlSpecTestCase.java @@ -34,6 +34,7 @@ public static List readScriptSpec() throws Exception { tests.addAll(readScriptSpec("/math.sql-spec", parser)); tests.addAll(readScriptSpec("/agg.sql-spec", parser)); tests.addAll(readScriptSpec("/arithmetic.sql-spec", parser)); + tests.addAll(readScriptSpec("/string-functions.sql-spec", parser)); return tests; } diff --git a/x-pack/qa/sql/src/main/resources/command.csv-spec b/x-pack/qa/sql/src/main/resources/command.csv-spec index d54fb6bf1554c..47615a4c3ae84 100644 --- a/x-pack/qa/sql/src/main/resources/command.csv-spec +++ b/x-pack/qa/sql/src/main/resources/command.csv-spec @@ -69,6 +69,16 @@ SIN |SCALAR SINH |SCALAR SQRT |SCALAR TAN |SCALAR +ASCII |SCALAR +CHAR |SCALAR +BIT_LENGTH |SCALAR +CHAR_LENGTH |SCALAR +LCASE |SCALAR +LENGTH |SCALAR +LTRIM |SCALAR +RTRIM |SCALAR +SPACE |SCALAR +UCASE |SCALAR SCORE |SCORE ; @@ -90,6 +100,7 @@ ACOS |SCALAR ASIN |SCALAR ATAN |SCALAR ATAN2 |SCALAR +ASCII |SCALAR ; showFunctionsWithPatternChar diff --git a/x-pack/qa/sql/src/main/resources/docs.csv-spec b/x-pack/qa/sql/src/main/resources/docs.csv-spec index 54509b21df36f..8d7debee331fc 100644 --- a/x-pack/qa/sql/src/main/resources/docs.csv-spec +++ b/x-pack/qa/sql/src/main/resources/docs.csv-spec @@ -222,6 +222,16 @@ SIN |SCALAR SINH |SCALAR SQRT |SCALAR TAN |SCALAR +ASCII |SCALAR +CHAR |SCALAR +BIT_LENGTH |SCALAR +CHAR_LENGTH |SCALAR +LCASE |SCALAR +LENGTH |SCALAR +LTRIM |SCALAR +RTRIM |SCALAR +SPACE |SCALAR +UCASE |SCALAR SCORE |SCORE // end::showFunctions @@ -249,7 +259,8 @@ ABS |SCALAR ACOS |SCALAR ASIN |SCALAR ATAN |SCALAR -ATAN2 |SCALAR +ATAN2 |SCALAR +ASCII |SCALAR // end::showFunctionsLikeWildcard ; diff --git a/x-pack/qa/sql/src/main/resources/functions.csv-spec b/x-pack/qa/sql/src/main/resources/functions.csv-spec new file mode 100644 index 0000000000000..09320c3d384c0 --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/functions.csv-spec @@ -0,0 +1,30 @@ +bitLengthGroupByAndOrderBy +SELECT BIT_LENGTH(first_name), COUNT(*) count FROM "test_emp" GROUP BY BIT_LENGTH(first_name) ORDER BY BIT_LENGTH(first_name) LIMIT 10; + +BIT_LENGTH(first_name):i| count:l +24 |4 +32 |11 +40 |16 +48 |24 +56 |19 +64 |14 +72 |10 +80 |1 +88 |1 +; + +bitLengthOrderByFieldWithWhere +SELECT BIT_LENGTH(first_name) len, first_name FROM "test_emp" WHERE BIT_LENGTH(first_name) > 64 ORDER BY first_name LIMIT 10; + +len:i | first_name:s +80 |Adamantios +72 |Alejandro +72 |Alejandro +72 |Chirstian +72 |Cristinel +72 |Duangkaew +72 |Eberhardt +72 |Margareta +72 |Prasadram +88 |Sreekrishna +; diff --git a/x-pack/qa/sql/src/main/resources/string-functions.sql-spec b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec new file mode 100644 index 0000000000000..d9a35edf1b030 --- /dev/null +++ b/x-pack/qa/sql/src/main/resources/string-functions.sql-spec @@ -0,0 +1,76 @@ +stringAscii +SELECT ASCII(first_name) s FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; +stringChar +SELECT CHAR(emp_no % 10000) m, first_name FROM "test_emp" WHERE emp_no < 10010 ORDER BY emp_no; + +stringAsciiFilter +SELECT emp_no, ASCII(first_name) a FROM "test_emp" WHERE ASCII(first_name) < 10010 ORDER BY emp_no; + +stringAsciiEqualsConstant +SELECT emp_no, ASCII(first_name) a, first_name name FROM "test_emp" WHERE ASCII(first_name) = 65 ORDER BY emp_no; + +//https://github.com/elastic/elasticsearch/issues/31863 +//stringSelectConstantAsciiEqualsConstant +//SELECT ASCII('A') = 65 a FROM "test_emp" WHERE ASCII('A') = 65 ORDER BY emp_no; + +stringCharFilter +SELECT emp_no, CHAR(emp_no % 10000) m FROM "test_emp" WHERE CHAR(emp_no % 10000) = 'A'; + +lcaseFilter +SELECT LCASE(first_name) lc, CHAR(ASCII(LCASE(first_name))) chr FROM "test_emp" WHERE CHAR(ASCII(LCASE(first_name))) = 'a'; + +ltrimFilter +SELECT LTRIM(first_name) lt FROM "test_emp" WHERE LTRIM(first_name) = 'Bob'; + +//Unsupported yet +//ltrimFilterWithLike +//SELECT LTRIM("first_name") lt FROM "test_emp" WHERE LTRIM("first_name") LIKE '%a%'; + +rtrimFilter +SELECT RTRIM(first_name) rt FROM "test_emp" WHERE RTRIM(first_name) = 'Johnny'; + +spaceFilter +SELECT SPACE(languages) spaces, languages FROM "test_emp" WHERE SPACE(languages) = ' '; + +spaceFilterWithLengthFunctions +SELECT SPACE(languages) spaces, languages, first_name FROM "test_emp" WHERE CHAR_LENGTH(SPACE(languages)) = 3 ORDER BY first_name; + +ucaseFilter +SELECT UCASE(gender) uppercased, COUNT(*) count FROM "test_emp" WHERE UCASE(gender) = 'F' GROUP BY UCASE(gender); + +// +// Group and order by +// +asciiGroupByAndOrderBy +SELECT ASCII(first_name) A, COUNT(*) count FROM "test_emp" WHERE ASCII(first_name) < 75 GROUP BY ASCII(first_name) ORDER BY ASCII(first_name) DESC; + +charGroupByAndOrderBy +SELECT CHAR(emp_no % 10000) C FROM "test_emp" WHERE emp_no > 10010 GROUP BY CHAR(emp_no % 10000) ORDER BY CHAR(emp_no % 10000) DESC LIMIT 20; + +//this would fail because H2 returns the result of char_length as Long, while we use a DataType of type String (size Integer.MAX_VALUE) and we return an Integer +//CAST is used as an "workaround" +charLengthGroupByAndHavingAndOrderBy +SELECT CAST(CHAR_LENGTH("first_name") AS INT) cl, COUNT(*) count FROM "test_emp" GROUP BY "first_name" HAVING COUNT(*)>1 ORDER BY CHAR_LENGTH("first_name") ; + +//this one, without ORDER BY, would return different results than H2. In ES, the default ordering of the composite aggregation +//values is "asc" while in H2 there is no default ordering +lcaseGroupByAndOrderBy +SELECT LCASE(first_name) lc, CHAR(ASCII(LCASE(first_name))) chr FROM "test_emp" GROUP BY LCASE(first_name) ORDER BY LCASE(first_name); + +ucaseGroupByAndOrderBy +SELECT UCASE(gender) uc, COUNT(*) count FROM "test_emp" GROUP BY UCASE(gender) ORDER BY UCASE(gender) DESC; + +rtrimGroupByAndOrderBy +SELECT RTRIM(first_name) rt FROM "test_emp" GROUP BY RTRIM(first_name) HAVING COUNT(*)>1; + +ltrimGroupByAndOrderBy +SELECT LTRIM(first_name) lt FROM "test_emp" GROUP BY LTRIM(first_name) HAVING COUNT(*)>1; + +spaceGroupByWithCharLength +SELECT CAST(CHAR_LENGTH(SPACE(languages)) AS INT) cls FROM "test_emp" GROUP BY CHAR_LENGTH(SPACE(languages)); + +spaceGroupByAndOrderBy +SELECT SPACE("languages") s, COUNT(*) count FROM "test_emp" GROUP BY SPACE("languages") ORDER BY SPACE(languages); + +spaceGroupByAndOrderByWithCharLength +SELECT SPACE("languages") s, COUNT(*) count, CAST(CHAR_LENGTH(SPACE("languages")) AS INT) cls FROM "test_emp" WHERE "languages" IS NOT NULL GROUP BY SPACE("languages") ORDER BY SPACE("languages");