From e5b20de6fdf47d33ae0bb30d0145691d46fc07a8 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 29 Jun 2018 12:24:22 +0100 Subject: [PATCH 001/107] WIP --- .../analysis/common/CommonAnalysisPlugin.java | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index ca2f74b5efee0..5729c70f48275 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -111,9 +111,16 @@ import org.apache.lucene.analysis.tr.ApostropheFilter; import org.apache.lucene.analysis.tr.TurkishAnalyzer; import org.apache.lucene.analysis.util.ElisionFilter; +import org.apache.lucene.util.SetOnce; +import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; import org.elasticsearch.common.logging.DeprecationLogger; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.regex.Regex; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; +import org.elasticsearch.env.Environment; +import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.analysis.AnalyzerProvider; import org.elasticsearch.index.analysis.CharFilterFactory; import org.elasticsearch.index.analysis.PreBuiltAnalyzerProviderFactory; @@ -127,10 +134,15 @@ import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.threadpool.ThreadPool; +import org.elasticsearch.watcher.ResourceWatcherService; import org.tartarus.snowball.ext.DutchStemmer; import org.tartarus.snowball.ext.FrenchStemmer; import java.util.ArrayList; +import java.util.Collection; +import java.util.Collections; import java.util.List; import java.util.Map; import java.util.TreeMap; @@ -141,6 +153,14 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(CommonAnalysisPlugin.class)); + private final SetOnce scriptService = new SetOnce<>(); + + @Override + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + this.scriptService.set(scriptService); + return Collections.emptyList(); + } + @Override public Map>> getAnalyzers() { Map>> analyzers = new TreeMap<>(); From da0fd1e966055b1cc407c509cbbf04516da5f378 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Mon, 2 Jul 2018 15:47:22 +0100 Subject: [PATCH 002/107] WIP --- .../ScriptedConditionTokenFilterFactory.java | 92 +++++++++++++++++++ 1 file changed, 92 insertions(+) create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java new file mode 100644 index 0000000000000..96a60808ec040 --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -0,0 +1,92 @@ +package org.elasticsearch.analysis.common; + +import org.apache.lucene.analysis.TokenStream; +import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; +import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; +import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; +import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; +import org.elasticsearch.index.analysis.ReferringFilterFactory; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; +import org.elasticsearch.script.ScriptService; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.function.Function; + +public class ScriptedConditionTokenFilterFactory extends AbstractTokenFilterFactory implements ReferringFilterFactory { + + private final ConditionFactory factory; + private final List filters = new ArrayList<>(); + private final List filterNames; + + public ScriptedConditionTokenFilterFactory(IndexSettings indexSettings, String name, + Settings settings, ScriptService scriptService) { + super(indexSettings, name, settings); + + Settings scriptSettings = settings.getAsSettings("script"); + Script script = Script.parse(scriptSettings); + this.factory = scriptService.compile(script, CONTEXT); + + this.filterNames = settings.getAsList("filters"); + } + + @Override + public TokenStream create(TokenStream tokenStream) { + Function filter = in -> { + for (TokenFilterFactory tff : filters) { + in = tff.create(in); + } + return in; + }; + ConditionScript script = factory.newInstance(); + return new ConditionalTokenFilter(tokenStream, filter) { + + CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); + PositionIncrementAttribute posIncAtt = addAttribute(PositionIncrementAttribute.class); + PositionLengthAttribute posLenAtt = addAttribute(PositionLengthAttribute.class); + OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); + TypeAttribute typeAtt = addAttribute(TypeAttribute.class); + + @Override + protected boolean shouldFilter() { + return script.execute(termAtt, posIncAtt.getPositionIncrement(), offsetAtt.startOffset(), offsetAtt.endOffset(), + posLenAtt.getPositionLength(), typeAtt.type()); + } + }; + } + + @Override + public void setReferences(Map factories) { + for (String filter : filterNames) { + TokenFilterFactory tff = factories.get(filter); + if (tff == null) { + throw new IllegalArgumentException("ScriptedConditionTokenFilter [" + name() + + "] refers to undefined token filter [" + filter + "]"); + } + filters.add(tff); + } + } + + public static abstract class ConditionScript { + + public abstract boolean execute(CharSequence term, int posInc, int startOffset, int endOffset, int posLen, String type); + + } + + public interface ConditionFactory { + ConditionScript newInstance(); + } + + public static final String[] PARAMETERS = new String[] {"term", "posInc", "startOffset", "endOffset", "posLen", "type"}; + public static final ScriptContext CONTEXT = new ScriptContext<>("similarity", ConditionFactory.class); +} From df9bffc375865d5260305cae5a9670eee24cad8a Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 4 Jul 2018 13:36:01 +0100 Subject: [PATCH 003/107] WIP --- .../analysis/common/CommonAnalysisPlugin.java | 2 ++ .../ScriptedConditionTokenFilterFactory.java | 6 +++- .../ScriptedConditionTokenFilterTests.java | 32 +++++++++++++++++++ 3 files changed, 39 insertions(+), 1 deletion(-) create mode 100644 modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index 5729c70f48275..f7aa2a429e68e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -222,6 +222,8 @@ public Map> getTokenFilters() { filters.put("classic", ClassicFilterFactory::new); filters.put("czech_stem", CzechStemTokenFilterFactory::new); filters.put("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new)); + filters.put("condition", + requriesAnalysisSettings((i, e, n, s) -> new ScriptedConditionTokenFilterFactory(i, n, s, scriptService.get()))); filters.put("decimal_digit", DecimalDigitFilterFactory::new); filters.put("delimited_payload_filter", LegacyDelimitedPayloadTokenFilterFactory::new); filters.put("delimited_payload", DelimitedPayloadTokenFilterFactory::new); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java index 96a60808ec040..615187e6b1bab 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -16,6 +16,7 @@ import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; import java.io.IOException; import java.util.ArrayList; @@ -35,6 +36,9 @@ public ScriptedConditionTokenFilterFactory(IndexSettings indexSettings, String n Settings scriptSettings = settings.getAsSettings("script"); Script script = Script.parse(scriptSettings); + if (script.getType() != ScriptType.INLINE) { + throw new IllegalArgumentException("Cannot use stored scripts in tokenfilter [" + name + "]"); + } this.factory = scriptService.compile(script, CONTEXT); this.filterNames = settings.getAsList("filters"); @@ -88,5 +92,5 @@ public interface ConditionFactory { } public static final String[] PARAMETERS = new String[] {"term", "posInc", "startOffset", "endOffset", "posLen", "type"}; - public static final ScriptContext CONTEXT = new ScriptContext<>("similarity", ConditionFactory.class); + public static final ScriptContext CONTEXT = new ScriptContext<>("condition", ConditionFactory.class); } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java new file mode 100644 index 0000000000000..8d42c4ee77f0e --- /dev/null +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java @@ -0,0 +1,32 @@ +package org.elasticsearch.analysis.common; + +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.test.ESTokenStreamTestCase; +import org.elasticsearch.test.IndexSettingsModule; + +public class ScriptedConditionTokenFilterTests extends ESTokenStreamTestCase { + + public void testSimpleCondition() throws Exception { + Settings settings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .build(); + Settings indexSettings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put("index.analysis.filter.cond.type", "condition") + .put("index.analysis.filter.cond.script", "return \"two\".equals(term)") + .putList("index.analysis.filter.cond.filters", "uppercase") + .put("index.analysis.analyzer.myAnalyzer.type", "custom") + .put("index.analysis.analyzer.myAnalyzer.tokenizer", "standard") + .putList("index.analysis.analyzer.myAnalyzer.filter", "cond") + .build(); + IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); + + ScriptService scriptService = new ScriptService() + } + +} From 402ed36560b8aa48ca8e090083f71d78b93fd4a7 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 11 Jul 2018 11:29:18 +0100 Subject: [PATCH 004/107] WIP --- .../ScriptedConditionTokenFilterFactory.java | 32 +++++++------- .../ScriptedConditionTokenFilterTests.java | 44 ++++++++++++++++++- .../painless/spi/org.elasticsearch.txt | 10 +++++ .../painless/AnalysisScriptTests.java | 32 ++++++++++++++ .../elasticsearch/script/AnalysisScript.java | 43 ++++++++++++++++++ .../elasticsearch/script/ScriptModule.java | 1 + 6 files changed, 143 insertions(+), 19 deletions(-) create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java create mode 100644 server/src/main/java/org/elasticsearch/script/AnalysisScript.java diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java index 615187e6b1bab..509cd71990670 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -3,6 +3,7 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; +import org.apache.lucene.analysis.tokenattributes.KeywordAttribute; import org.apache.lucene.analysis.tokenattributes.OffsetAttribute; import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; @@ -13,6 +14,7 @@ import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.ReferringFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.script.AnalysisScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; @@ -26,7 +28,7 @@ public class ScriptedConditionTokenFilterFactory extends AbstractTokenFilterFactory implements ReferringFilterFactory { - private final ConditionFactory factory; + private final AnalysisScript.Factory factory; private final List filters = new ArrayList<>(); private final List filterNames; @@ -39,7 +41,7 @@ public ScriptedConditionTokenFilterFactory(IndexSettings indexSettings, String n if (script.getType() != ScriptType.INLINE) { throw new IllegalArgumentException("Cannot use stored scripts in tokenfilter [" + name + "]"); } - this.factory = scriptService.compile(script, CONTEXT); + this.factory = scriptService.compile(script, AnalysisScript.CONTEXT); this.filterNames = settings.getAsList("filters"); } @@ -52,7 +54,8 @@ public TokenStream create(TokenStream tokenStream) { } return in; }; - ConditionScript script = factory.newInstance(); + AnalysisScript script = factory.newInstance(); + final AnalysisScript.Term term = new AnalysisScript.Term(); return new ConditionalTokenFilter(tokenStream, filter) { CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); @@ -60,11 +63,18 @@ public TokenStream create(TokenStream tokenStream) { PositionLengthAttribute posLenAtt = addAttribute(PositionLengthAttribute.class); OffsetAttribute offsetAtt = addAttribute(OffsetAttribute.class); TypeAttribute typeAtt = addAttribute(TypeAttribute.class); + KeywordAttribute keywordAtt = addAttribute(KeywordAttribute.class); @Override protected boolean shouldFilter() { - return script.execute(termAtt, posIncAtt.getPositionIncrement(), offsetAtt.startOffset(), offsetAtt.endOffset(), - posLenAtt.getPositionLength(), typeAtt.type()); + term.term = termAtt; + term.posInc = posIncAtt.getPositionIncrement(); + term.posLen = posLenAtt.getPositionLength(); + term.startOffset = offsetAtt.startOffset(); + term.endOffset = offsetAtt.endOffset(); + term.type = typeAtt.type(); + term.isKeyword = keywordAtt.isKeyword(); + return script.execute(term); } }; } @@ -81,16 +91,4 @@ public void setReferences(Map factories) { } } - public static abstract class ConditionScript { - - public abstract boolean execute(CharSequence term, int posInc, int startOffset, int endOffset, int posLen, String type); - - } - - public interface ConditionFactory { - ConditionScript newInstance(); - } - - public static final String[] PARAMETERS = new String[] {"term", "posInc", "startOffset", "endOffset", "posLen", "type"}; - public static final ScriptContext CONTEXT = new ScriptContext<>("condition", ConditionFactory.class); } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java index 8d42c4ee77f0e..25977af09c6b0 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java @@ -4,11 +4,22 @@ import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.analysis.IndexAnalyzers; +import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.indices.analysis.AnalysisModule; +import org.elasticsearch.plugins.AnalysisPlugin; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.AnalysisScript; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.test.ESTokenStreamTestCase; import org.elasticsearch.test.IndexSettingsModule; +import java.util.Collections; + public class ScriptedConditionTokenFilterTests extends ESTokenStreamTestCase { public void testSimpleCondition() throws Exception { @@ -18,7 +29,7 @@ public void testSimpleCondition() throws Exception { Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.cond.type", "condition") - .put("index.analysis.filter.cond.script", "return \"two\".equals(term)") + .put("index.analysis.filter.cond.script.source", "return \"two\".equals(term.term)") .putList("index.analysis.filter.cond.filters", "uppercase") .put("index.analysis.analyzer.myAnalyzer.type", "custom") .put("index.analysis.analyzer.myAnalyzer.tokenizer", "standard") @@ -26,7 +37,36 @@ public void testSimpleCondition() throws Exception { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - ScriptService scriptService = new ScriptService() + AnalysisScript.Factory factory = () -> new AnalysisScript() { + @Override + public boolean execute(Term term) { + return "two".contentEquals(term.term); + } + }; + + ScriptService scriptService = new ScriptService(indexSettings, Collections.emptyMap(), Collections.emptyMap()){ + @Override + public FactoryType compile(Script script, ScriptContext context) { + assertEquals(context, AnalysisScript.CONTEXT); + assertEquals(new Script("return \"two\".equals(term.term)"), script); + return (FactoryType) factory; + } + }; + + CommonAnalysisPlugin plugin = new CommonAnalysisPlugin(); + plugin.createComponents(null, null, null, null, scriptService, null, null, null, null); + AnalysisModule module + = new AnalysisModule(TestEnvironment.newEnvironment(settings), Collections.singletonList(plugin)); + + IndexAnalyzers analyzers = module.getAnalysisRegistry().build(idxSettings); + + try (NamedAnalyzer analyzer = analyzers.get("myAnalyzer")) { + assertNotNull(analyzer); + assertAnalyzesTo(analyzer, "one two three", new String[]{ + "one", "TWO", "three" + }); + } + } } diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 6495659d9cdc0..d06741bf27efe 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -175,3 +175,13 @@ class org.elasticsearch.index.similarity.ScriptedSimilarity$Doc { int getLength() float getFreq() } + +class org.elasticsearch.script.AnalysisScript$Term { + CharSequence term + int posInc + int posLen + int startOffset + int endOffset + String type + boolean isKeyword +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java new file mode 100644 index 0000000000000..88f2493490d02 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java @@ -0,0 +1,32 @@ +package org.elasticsearch.painless; + +import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.script.AnalysisScript; +import org.elasticsearch.script.ScriptContext; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class AnalysisScriptTests extends ScriptTestCase { + + @Override + protected Map, List> scriptContexts() { + Map, List> contexts = new HashMap<>(); + contexts.put(AnalysisScript.CONTEXT, Whitelist.BASE_WHITELISTS); + return contexts; + } + + public void testAnalysisScript() { + AnalysisScript.Factory factory = scriptEngine.compile("test", "return \"one\".contentEquals(term.term)", + AnalysisScript.CONTEXT, Collections.emptyMap()); + + AnalysisScript script = factory.newInstance(); + AnalysisScript.Term term = new AnalysisScript.Term(); + term.term = "one"; + assertTrue(script.execute(term)); + term.term = "two"; + assertFalse(script.execute(term)); + } +} diff --git a/server/src/main/java/org/elasticsearch/script/AnalysisScript.java b/server/src/main/java/org/elasticsearch/script/AnalysisScript.java new file mode 100644 index 0000000000000..02d5b085b2158 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/script/AnalysisScript.java @@ -0,0 +1,43 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.script; + +public abstract class AnalysisScript { + + public static class Term { + public CharSequence term; + public int posInc; + public int posLen; + public int startOffset; + public int endOffset; + public String type; + public boolean isKeyword; + } + + public abstract boolean execute(Term term); + + public interface Factory { + AnalysisScript newInstance(); + } + + public static final String[] PARAMETERS = new String[]{ "term" }; + public static final ScriptContext CONTEXT = new ScriptContext<>("analysis", Factory.class); + +} diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index f0e075eac7d93..d746f5ba20e03 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -54,6 +54,7 @@ public class ScriptModule { SimilarityWeightScript.CONTEXT, TemplateScript.CONTEXT, MovingFunctionScript.CONTEXT, + AnalysisScript.CONTEXT, ScriptedMetricAggContexts.InitScript.CONTEXT, ScriptedMetricAggContexts.MapScript.CONTEXT, ScriptedMetricAggContexts.CombineScript.CONTEXT, From fb7c21df96f3a016839456028fa418e9186274d9 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 11 Jul 2018 11:44:28 +0100 Subject: [PATCH 005/107] WIP --- .../ScriptedConditionTokenFilterFactory.java | 19 ++++++++++--------- .../ScriptedConditionTokenFilterTests.java | 8 +++----- .../painless/AnalysisScriptTests.java | 12 ++++++------ ...ript.java => AnalysisPredicateScript.java} | 13 +++++++++++-- .../elasticsearch/script/ScriptModule.java | 2 +- 5 files changed, 31 insertions(+), 23 deletions(-) rename server/src/main/java/org/elasticsearch/script/{AnalysisScript.java => AnalysisPredicateScript.java} (80%) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java index 509cd71990670..59c70faaed90f 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -8,31 +8,32 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; -import org.apache.lucene.util.BytesRef; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.ReferringFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.script.AnalysisScript; +import org.elasticsearch.script.AnalysisPredicateScript; import org.elasticsearch.script.Script; -import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; -import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.Map; import java.util.function.Function; +/** + * A factory for a conditional token filter that only applies child filters if the underlying token + * matches an {@link AnalysisPredicateScript} + */ public class ScriptedConditionTokenFilterFactory extends AbstractTokenFilterFactory implements ReferringFilterFactory { - private final AnalysisScript.Factory factory; + private final AnalysisPredicateScript.Factory factory; private final List filters = new ArrayList<>(); private final List filterNames; - public ScriptedConditionTokenFilterFactory(IndexSettings indexSettings, String name, + ScriptedConditionTokenFilterFactory(IndexSettings indexSettings, String name, Settings settings, ScriptService scriptService) { super(indexSettings, name, settings); @@ -41,7 +42,7 @@ public ScriptedConditionTokenFilterFactory(IndexSettings indexSettings, String n if (script.getType() != ScriptType.INLINE) { throw new IllegalArgumentException("Cannot use stored scripts in tokenfilter [" + name + "]"); } - this.factory = scriptService.compile(script, AnalysisScript.CONTEXT); + this.factory = scriptService.compile(script, AnalysisPredicateScript.CONTEXT); this.filterNames = settings.getAsList("filters"); } @@ -54,8 +55,8 @@ public TokenStream create(TokenStream tokenStream) { } return in; }; - AnalysisScript script = factory.newInstance(); - final AnalysisScript.Term term = new AnalysisScript.Term(); + AnalysisPredicateScript script = factory.newInstance(); + final AnalysisPredicateScript.Term term = new AnalysisPredicateScript.Term(); return new ConditionalTokenFilter(tokenStream, filter) { CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java index 25977af09c6b0..6dd3aec8b664c 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java @@ -9,9 +9,7 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.indices.analysis.AnalysisModule; -import org.elasticsearch.plugins.AnalysisPlugin; -import org.elasticsearch.plugins.Plugin; -import org.elasticsearch.script.AnalysisScript; +import org.elasticsearch.script.AnalysisPredicateScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; @@ -37,7 +35,7 @@ public void testSimpleCondition() throws Exception { .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings); - AnalysisScript.Factory factory = () -> new AnalysisScript() { + AnalysisPredicateScript.Factory factory = () -> new AnalysisPredicateScript() { @Override public boolean execute(Term term) { return "two".contentEquals(term.term); @@ -47,7 +45,7 @@ public boolean execute(Term term) { ScriptService scriptService = new ScriptService(indexSettings, Collections.emptyMap(), Collections.emptyMap()){ @Override public FactoryType compile(Script script, ScriptContext context) { - assertEquals(context, AnalysisScript.CONTEXT); + assertEquals(context, AnalysisPredicateScript.CONTEXT); assertEquals(new Script("return \"two\".equals(term.term)"), script); return (FactoryType) factory; } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java index 88f2493490d02..43d31258c69f1 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java @@ -1,7 +1,7 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.script.AnalysisScript; +import org.elasticsearch.script.AnalysisPredicateScript; import org.elasticsearch.script.ScriptContext; import java.util.Collections; @@ -14,16 +14,16 @@ public class AnalysisScriptTests extends ScriptTestCase { @Override protected Map, List> scriptContexts() { Map, List> contexts = new HashMap<>(); - contexts.put(AnalysisScript.CONTEXT, Whitelist.BASE_WHITELISTS); + contexts.put(AnalysisPredicateScript.CONTEXT, Whitelist.BASE_WHITELISTS); return contexts; } public void testAnalysisScript() { - AnalysisScript.Factory factory = scriptEngine.compile("test", "return \"one\".contentEquals(term.term)", - AnalysisScript.CONTEXT, Collections.emptyMap()); + AnalysisPredicateScript.Factory factory = scriptEngine.compile("test", "return \"one\".contentEquals(term.term)", + AnalysisPredicateScript.CONTEXT, Collections.emptyMap()); - AnalysisScript script = factory.newInstance(); - AnalysisScript.Term term = new AnalysisScript.Term(); + AnalysisPredicateScript script = factory.newInstance(); + AnalysisPredicateScript.Term term = new AnalysisPredicateScript.Term(); term.term = "one"; assertTrue(script.execute(term)); term.term = "two"; diff --git a/server/src/main/java/org/elasticsearch/script/AnalysisScript.java b/server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java similarity index 80% rename from server/src/main/java/org/elasticsearch/script/AnalysisScript.java rename to server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java index 02d5b085b2158..2d4ef769892c5 100644 --- a/server/src/main/java/org/elasticsearch/script/AnalysisScript.java +++ b/server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java @@ -19,8 +19,14 @@ package org.elasticsearch.script; -public abstract class AnalysisScript { +/** + * A predicate based on the current token in a TokenStream + */ +public abstract class AnalysisPredicateScript { + /** + * Encapsulation of the state of the current token + */ public static class Term { public CharSequence term; public int posInc; @@ -31,10 +37,13 @@ public static class Term { public boolean isKeyword; } + /** + * Returns {@code true} if the current term matches the predicate + */ public abstract boolean execute(Term term); public interface Factory { - AnalysisScript newInstance(); + AnalysisPredicateScript newInstance(); } public static final String[] PARAMETERS = new String[]{ "term" }; diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index d746f5ba20e03..1acf441bc0349 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -54,7 +54,7 @@ public class ScriptModule { SimilarityWeightScript.CONTEXT, TemplateScript.CONTEXT, MovingFunctionScript.CONTEXT, - AnalysisScript.CONTEXT, + AnalysisPredicateScript.CONTEXT, ScriptedMetricAggContexts.InitScript.CONTEXT, ScriptedMetricAggContexts.MapScript.CONTEXT, ScriptedMetricAggContexts.CombineScript.CONTEXT, From d8f01704e20c6fb7fafb1a83c58f501ad9a7d69d Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 11 Jul 2018 12:04:32 +0100 Subject: [PATCH 006/107] docs --- docs/reference/analysis/tokenfilters.asciidoc | 2 + .../condition-tokenfilter.asciidoc | 90 +++++++++++++++++++ 2 files changed, 92 insertions(+) create mode 100644 docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc diff --git a/docs/reference/analysis/tokenfilters.asciidoc b/docs/reference/analysis/tokenfilters.asciidoc index ee891fdd09aa7..5899744247899 100644 --- a/docs/reference/analysis/tokenfilters.asciidoc +++ b/docs/reference/analysis/tokenfilters.asciidoc @@ -37,6 +37,8 @@ include::tokenfilters/word-delimiter-graph-tokenfilter.asciidoc[] include::tokenfilters/multiplexer-tokenfilter.asciidoc[] +include::tokenfilters/condition-tokenfilter.asciidoc[] + include::tokenfilters/stemmer-tokenfilter.asciidoc[] include::tokenfilters/stemmer-override-tokenfilter.asciidoc[] diff --git a/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc new file mode 100644 index 0000000000000..48382cf8056bf --- /dev/null +++ b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc @@ -0,0 +1,90 @@ +[[analysis-condition-tokenfilter]] +=== Conditional Token Filter + +The conditional token filter takes a predicate script and a list of subfilters, and +only applies the subfilters to the current token if it matches the predicate. + +[float] +=== Options +[horizontal] +filters:: a list of token filters to apply to the current token if the predicate + matches. These can be any token filters defined elsewhere in the index mappings. + +script:: a predicate script that determines whether or not the filters will be applied + to the current token. Note that only inline scripts are supported + +[float] +=== Settings example + +You can set it up like: + +[source,js] +-------------------------------------------------- +PUT /multiplexer_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "my_analyzer" : { + "tokenizer" : "standard", + "filter" : [ "my_condition" ] + } + }, + "filter" : { + "my_multiplexer" : { + "type" : "condition", + "filters" : [ "lowercase" ], + "script" : { + "source" : "return term.term.length() < 5" <1> + } + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +<1> This will only apply the lowercase filter to terms that are less than 5 +characters in length + +And test it like: + +[source,js] +-------------------------------------------------- +POST /multiplexer_example/_analyze +{ + "analyzer" : "my_analyzer", + "text" : "What Flapdoodle" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +And it'd respond: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "what", <1> + "start_offset": 0, + "end_offset": 4, + "type": "", + "position": 0 + }, + { + "token": "Flapdoodle", <2> + "start_offset": 5, + "end_offset": 15, + "type": "", + "position": 0 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE +<1> The term `What` has been lowercased, because it is only 4 characters long +<2> The term `Flapdoodle` has been left in its original case, because it doesn't pass + the predicate \ No newline at end of file From bcee3f034d6fee106559a06dd94f8c0d940a161f Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 11 Jul 2018 12:48:06 +0100 Subject: [PATCH 007/107] tests --- .../org/elasticsearch/script/ScriptServiceTests.java | 2 ++ .../org/elasticsearch/script/MockScriptEngine.java | 12 ++++++++++++ 2 files changed, 14 insertions(+) diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index b35fcbcc03c17..2bfdf8b3bb8b0 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -18,6 +18,7 @@ */ package org.elasticsearch.script; +import com.carrotsearch.randomizedtesting.annotations.Seed; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.cluster.ClusterName; @@ -47,6 +48,7 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; +@Seed("BB1473C5C751C4FB") public class ScriptServiceTests extends ESTestCase { private ScriptEngine scriptEngine; diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index e608bd13d2559..2bd29dca840f0 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -128,6 +128,9 @@ public String execute() { } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.ReduceScript.class)) { ScriptedMetricAggContexts.ReduceScript.Factory factory = mockCompiled::createMetricAggReduceScript; return context.factoryClazz.cast(factory); + } else if (context.instanceClazz.equals(AnalysisPredicateScript.class)) { + AnalysisPredicateScript.Factory factory = mockCompiled::createAnalysisPredicateScript; + return context.factoryClazz.cast(factory); } throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]"); } @@ -209,6 +212,15 @@ public ScriptedMetricAggContexts.CombineScript createMetricAggCombineScript(Map< public ScriptedMetricAggContexts.ReduceScript createMetricAggReduceScript(Map params, List states) { return new MockMetricAggReduceScript(params, states, script != null ? script : ctx -> 42d); } + + public AnalysisPredicateScript createAnalysisPredicateScript() { + return new AnalysisPredicateScript() { + @Override + public boolean execute(Term term) { + return true; + } + }; + } } public class MockExecutableScript implements ExecutableScript { From bba59395cbffa2f539cb784fabd5b0b8aa2141dd Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 11 Jul 2018 12:57:59 +0100 Subject: [PATCH 008/107] d'oh --- .../test/java/org/elasticsearch/script/ScriptServiceTests.java | 2 -- 1 file changed, 2 deletions(-) diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java index 2bfdf8b3bb8b0..b35fcbcc03c17 100644 --- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java +++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.script; -import com.carrotsearch.randomizedtesting.annotations.Seed; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.action.admin.cluster.storedscripts.GetStoredScriptRequest; import org.elasticsearch.cluster.ClusterName; @@ -48,7 +47,6 @@ import static org.hamcrest.Matchers.notNullValue; import static org.hamcrest.Matchers.sameInstance; -@Seed("BB1473C5C751C4FB") public class ScriptServiceTests extends ESTestCase { private ScriptEngine scriptEngine; From 21cd02f2ff9ccdddb37884edd1f24530dcf13748 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 11 Jul 2018 13:32:03 +0100 Subject: [PATCH 009/107] class name change in SPI --- .../org/elasticsearch/painless/spi/org.elasticsearch.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index d06741bf27efe..7d995a1732476 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -176,7 +176,7 @@ class org.elasticsearch.index.similarity.ScriptedSimilarity$Doc { float getFreq() } -class org.elasticsearch.script.AnalysisScript$Term { +class org.elasticsearch.script.AnalysisPredicateScript$Term { CharSequence term int posInc int posLen From 4315682c31f59d6d44b53f491df5ac79d972d450 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 11 Jul 2018 14:08:57 +0100 Subject: [PATCH 010/107] docs --- .../analysis/tokenfilters/condition-tokenfilter.asciidoc | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc index 48382cf8056bf..496c5e5f1b003 100644 --- a/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc @@ -20,7 +20,7 @@ You can set it up like: [source,js] -------------------------------------------------- -PUT /multiplexer_example +PUT /condition_example { "settings" : { "analysis" : { @@ -31,7 +31,7 @@ PUT /multiplexer_example } }, "filter" : { - "my_multiplexer" : { + "my_condition" : { "type" : "condition", "filters" : [ "lowercase" ], "script" : { @@ -52,7 +52,7 @@ And test it like: [source,js] -------------------------------------------------- -POST /multiplexer_example/_analyze +POST /condition_example/_analyze { "analyzer" : "my_analyzer", "text" : "What Flapdoodle" From 52955df8d904fdeba93745f851b80bc6da2d2d98 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 13 Jul 2018 13:13:54 +0100 Subject: [PATCH 011/107] Broekn --- modules/analysis-common/build.gradle | 4 +++ .../common/script/AnalysisScriptTests.java | 32 +++++++++++++++++++ 2 files changed, 36 insertions(+) create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/script/AnalysisScriptTests.java diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index 391b74934c97d..a2160c58658a8 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -21,3 +21,7 @@ esplugin { description 'Adds "built in" analyzers to Elasticsearch.' classname 'org.elasticsearch.analysis.common.CommonAnalysisPlugin' } + +dependencies { + compileOnly project(':modules:lang-painless') +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/script/AnalysisScriptTests.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/script/AnalysisScriptTests.java new file mode 100644 index 0000000000000..611c7e30a45b6 --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/script/AnalysisScriptTests.java @@ -0,0 +1,32 @@ +package org.elasticsearch.analysis.common.script; + +import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.script.AnalysisPredicateScript; +import org.elasticsearch.script.ScriptContext; + +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +public class AnalysisScriptTests extends ScriptTestCase { + + @Override + protected Map, List> scriptContexts() { + Map, List> contexts = new HashMap<>(); + contexts.put(AnalysisPredicateScript.CONTEXT, Whitelist.BASE_WHITELISTS); + return contexts; + } + + public void testAnalysisScript() { + AnalysisPredicateScript.Factory factory = scriptEngine.compile("test", "return \"one\".contentEquals(term.term)", + AnalysisPredicateScript.CONTEXT, Collections.emptyMap()); + + AnalysisPredicateScript script = factory.newInstance(); + AnalysisPredicateScript.Term term = new AnalysisPredicateScript.Term(); + term.term = "one"; + Assert.assertTrue(script.execute(term)); + term.term = "two"; + Assert.assertFalse(script.execute(term)); + } +} From dd139c74112ca1a54cf8477fc32c1bb797d3592a Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 17 Jul 2018 09:49:37 +0100 Subject: [PATCH 012/107] nuke unit test --- .../common/script/AnalysisScriptTests.java | 32 ------------------- 1 file changed, 32 deletions(-) delete mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/script/AnalysisScriptTests.java diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/script/AnalysisScriptTests.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/script/AnalysisScriptTests.java deleted file mode 100644 index 611c7e30a45b6..0000000000000 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/script/AnalysisScriptTests.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.elasticsearch.analysis.common.script; - -import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.script.AnalysisPredicateScript; -import org.elasticsearch.script.ScriptContext; - -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class AnalysisScriptTests extends ScriptTestCase { - - @Override - protected Map, List> scriptContexts() { - Map, List> contexts = new HashMap<>(); - contexts.put(AnalysisPredicateScript.CONTEXT, Whitelist.BASE_WHITELISTS); - return contexts; - } - - public void testAnalysisScript() { - AnalysisPredicateScript.Factory factory = scriptEngine.compile("test", "return \"one\".contentEquals(term.term)", - AnalysisPredicateScript.CONTEXT, Collections.emptyMap()); - - AnalysisPredicateScript script = factory.newInstance(); - AnalysisPredicateScript.Term term = new AnalysisPredicateScript.Term(); - term.term = "one"; - Assert.assertTrue(script.execute(term)); - term.term = "two"; - Assert.assertFalse(script.execute(term)); - } -} From 57a73f24e50a52f7f1e22d0bca90c7a4b6a46d19 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 17 Jul 2018 10:16:42 +0100 Subject: [PATCH 013/107] feedback --- .../condition-tokenfilter.asciidoc | 4 +-- .../common/AnalysisPainlessExtension.java | 22 +++++++++++++ .../ScriptedConditionTokenFilterFactory.java | 3 +- ...asticsearch.painless.spi.PainlessExtension | 1 + .../analysis/common/painless_whitelist.txt | 29 ++++++++++++++++ .../ScriptedConditionTokenFilterTests.java | 2 +- .../analysis-common/60_analysis_scripting.yml | 17 ++++++++++ .../painless/spi/org.elasticsearch.txt | 10 ------ .../script/AnalysisPredicateScript.java | 33 +++++++++++++++++++ 9 files changed, 107 insertions(+), 14 deletions(-) create mode 100644 modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java create mode 100644 modules/analysis-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension create mode 100644 modules/analysis-common/src/main/resources/org/elasticsearch/analysis/common/painless_whitelist.txt create mode 100644 modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml diff --git a/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc index 496c5e5f1b003..ab997adf2bd6a 100644 --- a/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc @@ -33,9 +33,9 @@ PUT /condition_example "filter" : { "my_condition" : { "type" : "condition", - "filters" : [ "lowercase" ], + "filter" : [ "lowercase" ], "script" : { - "source" : "return term.term.length() < 5" <1> + "source" : "term.term().length() < 5" <1> } } } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java new file mode 100644 index 0000000000000..22ea0842a9c54 --- /dev/null +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java @@ -0,0 +1,22 @@ +package org.elasticsearch.analysis.common; + +import org.elasticsearch.painless.spi.PainlessExtension; +import org.elasticsearch.painless.spi.Whitelist; +import org.elasticsearch.painless.spi.WhitelistLoader; +import org.elasticsearch.script.AnalysisPredicateScript; +import org.elasticsearch.script.ScriptContext; + +import java.util.Collections; +import java.util.List; +import java.util.Map; + +public class AnalysisPainlessExtension implements PainlessExtension { + + private static final Whitelist WHITELIST = + WhitelistLoader.loadFromResourceFiles(AnalysisPainlessExtension.class, "painless_whitelist.txt"); + + @Override + public Map, List> getContextWhitelists() { + return Collections.singletonMap(AnalysisPredicateScript.CONTEXT, Collections.singletonList(WHITELIST)); + } +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java index 59c70faaed90f..5855d93461495 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -44,7 +44,7 @@ public class ScriptedConditionTokenFilterFactory extends AbstractTokenFilterFact } this.factory = scriptService.compile(script, AnalysisPredicateScript.CONTEXT); - this.filterNames = settings.getAsList("filters"); + this.filterNames = settings.getAsList("filter"); } @Override @@ -70,6 +70,7 @@ public TokenStream create(TokenStream tokenStream) { protected boolean shouldFilter() { term.term = termAtt; term.posInc = posIncAtt.getPositionIncrement(); + term.pos += term.posInc; term.posLen = posLenAtt.getPositionLength(); term.startOffset = offsetAtt.startOffset(); term.endOffset = offsetAtt.endOffset(); diff --git a/modules/analysis-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension b/modules/analysis-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension new file mode 100644 index 0000000000000..44e98a3dd9c68 --- /dev/null +++ b/modules/analysis-common/src/main/resources/META-INF/services/org.elasticsearch.painless.spi.PainlessExtension @@ -0,0 +1 @@ +org.elasticsearch.analysis.common.AnalysisPainlessExtension \ No newline at end of file diff --git a/modules/analysis-common/src/main/resources/org/elasticsearch/analysis/common/painless_whitelist.txt b/modules/analysis-common/src/main/resources/org/elasticsearch/analysis/common/painless_whitelist.txt new file mode 100644 index 0000000000000..26368c7e527a5 --- /dev/null +++ b/modules/analysis-common/src/main/resources/org/elasticsearch/analysis/common/painless_whitelist.txt @@ -0,0 +1,29 @@ +# +# Licensed to Elasticsearch under one or more contributor +# license agreements. See the NOTICE file distributed with +# this work for additional information regarding copyright +# ownership. Elasticsearch licenses this file to you under +# the Apache License, Version 2.0 (the "License"); you may +# not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# + +class org.elasticsearch.script.AnalysisPredicateScript$Term { + CharSequence getTerm() + int getPosition() + int getPositionIncrement() + int getPositionLength() + int getStartOffset() + int getEndOffset() + String getType() + boolean isKeyword() +} \ No newline at end of file diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java index 6dd3aec8b664c..bb3371b00f224 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java @@ -28,7 +28,7 @@ public void testSimpleCondition() throws Exception { .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.cond.type", "condition") .put("index.analysis.filter.cond.script.source", "return \"two\".equals(term.term)") - .putList("index.analysis.filter.cond.filters", "uppercase") + .putList("index.analysis.filter.cond.filter", "uppercase") .put("index.analysis.analyzer.myAnalyzer.type", "custom") .put("index.analysis.analyzer.myAnalyzer.tokenizer", "standard") .putList("index.analysis.analyzer.myAnalyzer.filter", "cond") diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml new file mode 100644 index 0000000000000..85d51b0b89a68 --- /dev/null +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml @@ -0,0 +1,17 @@ +## Test analysis scripts + +"condition": + - do: + indices.analyze: + body: "Vorsprung Durch Technik" + tokenizer: "whitespace" + filter: + - type: condition + script: + source: "token.term().length() > 5" + filter: [ lowercase ] + + - length: { tokens: 3 } + - match: { tokens.0.token: "vorsprung" } + - match: { tokens.1.token: "Durch" } + - match: { tokens.2.token: "technik" } \ No newline at end of file diff --git a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt index 7d995a1732476..6495659d9cdc0 100644 --- a/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt +++ b/modules/lang-painless/src/main/resources/org/elasticsearch/painless/spi/org.elasticsearch.txt @@ -175,13 +175,3 @@ class org.elasticsearch.index.similarity.ScriptedSimilarity$Doc { int getLength() float getFreq() } - -class org.elasticsearch.script.AnalysisPredicateScript$Term { - CharSequence term - int posInc - int posLen - int startOffset - int endOffset - String type - boolean isKeyword -} diff --git a/server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java b/server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java index 2d4ef769892c5..0ff6708c992e6 100644 --- a/server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java +++ b/server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java @@ -29,12 +29,45 @@ public abstract class AnalysisPredicateScript { */ public static class Term { public CharSequence term; + public int pos; public int posInc; public int posLen; public int startOffset; public int endOffset; public String type; public boolean isKeyword; + + public CharSequence getTerm() { + return term; + } + + public int getPositionIncrement() { + return posInc; + } + + public int getPosition() { + return pos; + } + + public int getPositionLength() { + return posLen; + } + + public int getStartOffset() { + return startOffset; + } + + public int getEndOffset() { + return endOffset; + } + + public String getType() { + return type; + } + + public boolean isKeyword() { + return isKeyword; + } } /** From 609951b228a1b435932ab7899d86490dc1881cfe Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 17 Jul 2018 14:25:36 +0100 Subject: [PATCH 014/107] Term -> Token; move ScriptContext into module --- modules/analysis-common/build.gradle | 5 +++ .../common/AnalysisPainlessExtension.java | 1 - .../common}/AnalysisPredicateScript.java | 10 +++--- .../analysis/common/CommonAnalysisPlugin.java | 10 +++++- .../ScriptedConditionTokenFilterFactory.java | 21 ++++++------ .../analysis/common/painless_whitelist.txt | 3 +- .../ScriptedConditionTokenFilterTests.java | 4 +-- .../analysis-common/60_analysis_scripting.yml | 15 +++++---- .../painless/AnalysisScriptTests.java | 32 ------------------- .../elasticsearch/script/ScriptModule.java | 1 - .../script/MockScriptEngine.java | 12 ------- 11 files changed, 41 insertions(+), 73 deletions(-) rename {server/src/main/java/org/elasticsearch/script => modules/analysis-common/src/main/java/org/elasticsearch/analysis/common}/AnalysisPredicateScript.java (89%) delete mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java diff --git a/modules/analysis-common/build.gradle b/modules/analysis-common/build.gradle index a2160c58658a8..e5193ab3c8451 100644 --- a/modules/analysis-common/build.gradle +++ b/modules/analysis-common/build.gradle @@ -20,8 +20,13 @@ esplugin { description 'Adds "built in" analyzers to Elasticsearch.' classname 'org.elasticsearch.analysis.common.CommonAnalysisPlugin' + extendedPlugins = ['lang-painless'] } dependencies { compileOnly project(':modules:lang-painless') } + +integTestCluster { + module project(':modules:lang-painless') +} diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java index 22ea0842a9c54..ab2a9a4263391 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java @@ -3,7 +3,6 @@ import org.elasticsearch.painless.spi.PainlessExtension; import org.elasticsearch.painless.spi.Whitelist; import org.elasticsearch.painless.spi.WhitelistLoader; -import org.elasticsearch.script.AnalysisPredicateScript; import org.elasticsearch.script.ScriptContext; import java.util.Collections; diff --git a/server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPredicateScript.java similarity index 89% rename from server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java rename to modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPredicateScript.java index 0ff6708c992e6..7de588a958c77 100644 --- a/server/src/main/java/org/elasticsearch/script/AnalysisPredicateScript.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPredicateScript.java @@ -17,7 +17,9 @@ * under the License. */ -package org.elasticsearch.script; +package org.elasticsearch.analysis.common; + +import org.elasticsearch.script.ScriptContext; /** * A predicate based on the current token in a TokenStream @@ -27,7 +29,7 @@ public abstract class AnalysisPredicateScript { /** * Encapsulation of the state of the current token */ - public static class Term { + public static class Token { public CharSequence term; public int pos; public int posInc; @@ -73,13 +75,13 @@ public boolean isKeyword() { /** * Returns {@code true} if the current term matches the predicate */ - public abstract boolean execute(Term term); + public abstract boolean execute(Token token); public interface Factory { AnalysisPredicateScript newInstance(); } - public static final String[] PARAMETERS = new String[]{ "term" }; + public static final String[] PARAMETERS = new String[]{ "token" }; public static final ScriptContext CONTEXT = new ScriptContext<>("analysis", Factory.class); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index dc2ae18b1f52d..f43b2c03d82f1 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -134,6 +134,8 @@ import org.elasticsearch.indices.analysis.PreBuiltCacheFactory.CachingStrategy; import org.elasticsearch.plugins.AnalysisPlugin; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.plugins.ScriptPlugin; +import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.watcher.ResourceWatcherService; @@ -149,7 +151,7 @@ import static org.elasticsearch.plugins.AnalysisPlugin.requiresAnalysisSettings; -public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin { +public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, ScriptPlugin { private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(Loggers.getLogger(CommonAnalysisPlugin.class)); @@ -161,6 +163,12 @@ public Collection createComponents(Client client, ClusterService cluster return Collections.emptyList(); } + @Override + @SuppressWarnings("rawtypes") // TODO ScriptPlugin needs to change this to pass precommit? + public List getContexts() { + return Collections.singletonList(AnalysisPredicateScript.CONTEXT); + } + @Override public Map>> getAnalyzers() { Map>> analyzers = new TreeMap<>(); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java index 5855d93461495..6ceebf07389b6 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -13,7 +13,6 @@ import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; import org.elasticsearch.index.analysis.ReferringFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.script.AnalysisPredicateScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; @@ -56,7 +55,7 @@ public TokenStream create(TokenStream tokenStream) { return in; }; AnalysisPredicateScript script = factory.newInstance(); - final AnalysisPredicateScript.Term term = new AnalysisPredicateScript.Term(); + final AnalysisPredicateScript.Token token = new AnalysisPredicateScript.Token(); return new ConditionalTokenFilter(tokenStream, filter) { CharTermAttribute termAtt = addAttribute(CharTermAttribute.class); @@ -68,15 +67,15 @@ public TokenStream create(TokenStream tokenStream) { @Override protected boolean shouldFilter() { - term.term = termAtt; - term.posInc = posIncAtt.getPositionIncrement(); - term.pos += term.posInc; - term.posLen = posLenAtt.getPositionLength(); - term.startOffset = offsetAtt.startOffset(); - term.endOffset = offsetAtt.endOffset(); - term.type = typeAtt.type(); - term.isKeyword = keywordAtt.isKeyword(); - return script.execute(term); + token.term = termAtt; + token.posInc = posIncAtt.getPositionIncrement(); + token.pos += token.posInc; + token.posLen = posLenAtt.getPositionLength(); + token.startOffset = offsetAtt.startOffset(); + token.endOffset = offsetAtt.endOffset(); + token.type = typeAtt.type(); + token.isKeyword = keywordAtt.isKeyword(); + return script.execute(token); } }; } diff --git a/modules/analysis-common/src/main/resources/org/elasticsearch/analysis/common/painless_whitelist.txt b/modules/analysis-common/src/main/resources/org/elasticsearch/analysis/common/painless_whitelist.txt index 26368c7e527a5..83b70be58774e 100644 --- a/modules/analysis-common/src/main/resources/org/elasticsearch/analysis/common/painless_whitelist.txt +++ b/modules/analysis-common/src/main/resources/org/elasticsearch/analysis/common/painless_whitelist.txt @@ -16,8 +16,7 @@ # specific language governing permissions and limitations # under the License. # - -class org.elasticsearch.script.AnalysisPredicateScript$Term { +class org.elasticsearch.analysis.common.AnalysisPredicateScript$Token { CharSequence getTerm() int getPosition() int getPositionIncrement() diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java index bb3371b00f224..c7e47959bc67c 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java @@ -9,7 +9,6 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.NamedAnalyzer; import org.elasticsearch.indices.analysis.AnalysisModule; -import org.elasticsearch.script.AnalysisPredicateScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; @@ -37,11 +36,12 @@ public void testSimpleCondition() throws Exception { AnalysisPredicateScript.Factory factory = () -> new AnalysisPredicateScript() { @Override - public boolean execute(Term term) { + public boolean execute(Token term) { return "two".contentEquals(term.term); } }; + @SuppressWarnings("unchecked") ScriptService scriptService = new ScriptService(indexSettings, Collections.emptyMap(), Collections.emptyMap()){ @Override public FactoryType compile(Script script, ScriptContext context) { diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml index 85d51b0b89a68..2a632007c7890 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml @@ -3,15 +3,16 @@ "condition": - do: indices.analyze: - body: "Vorsprung Durch Technik" - tokenizer: "whitespace" - filter: - - type: condition - script: - source: "token.term().length() > 5" + body: + text: "Vorsprung Durch Technik" + tokenizer: "whitespace" + filter: + - type: condition + script: + source: "token.getTerm().length() > 5" filter: [ lowercase ] - length: { tokens: 3 } - match: { tokens.0.token: "vorsprung" } - match: { tokens.1.token: "Durch" } - - match: { tokens.2.token: "technik" } \ No newline at end of file + - match: { tokens.2.token: "technik" } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java deleted file mode 100644 index 43d31258c69f1..0000000000000 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/AnalysisScriptTests.java +++ /dev/null @@ -1,32 +0,0 @@ -package org.elasticsearch.painless; - -import org.elasticsearch.painless.spi.Whitelist; -import org.elasticsearch.script.AnalysisPredicateScript; -import org.elasticsearch.script.ScriptContext; - -import java.util.Collections; -import java.util.HashMap; -import java.util.List; -import java.util.Map; - -public class AnalysisScriptTests extends ScriptTestCase { - - @Override - protected Map, List> scriptContexts() { - Map, List> contexts = new HashMap<>(); - contexts.put(AnalysisPredicateScript.CONTEXT, Whitelist.BASE_WHITELISTS); - return contexts; - } - - public void testAnalysisScript() { - AnalysisPredicateScript.Factory factory = scriptEngine.compile("test", "return \"one\".contentEquals(term.term)", - AnalysisPredicateScript.CONTEXT, Collections.emptyMap()); - - AnalysisPredicateScript script = factory.newInstance(); - AnalysisPredicateScript.Term term = new AnalysisPredicateScript.Term(); - term.term = "one"; - assertTrue(script.execute(term)); - term.term = "two"; - assertFalse(script.execute(term)); - } -} diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java index fe8de816e6906..042953117c5a5 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java @@ -57,7 +57,6 @@ public class ScriptModule { SimilarityWeightScript.CONTEXT, TemplateScript.CONTEXT, MovingFunctionScript.CONTEXT, - AnalysisPredicateScript.CONTEXT, ScriptedMetricAggContexts.InitScript.CONTEXT, ScriptedMetricAggContexts.MapScript.CONTEXT, ScriptedMetricAggContexts.CombineScript.CONTEXT, diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java index 2bd29dca840f0..e608bd13d2559 100644 --- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java +++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java @@ -128,9 +128,6 @@ public String execute() { } else if (context.instanceClazz.equals(ScriptedMetricAggContexts.ReduceScript.class)) { ScriptedMetricAggContexts.ReduceScript.Factory factory = mockCompiled::createMetricAggReduceScript; return context.factoryClazz.cast(factory); - } else if (context.instanceClazz.equals(AnalysisPredicateScript.class)) { - AnalysisPredicateScript.Factory factory = mockCompiled::createAnalysisPredicateScript; - return context.factoryClazz.cast(factory); } throw new IllegalArgumentException("mock script engine does not know how to handle context [" + context.name + "]"); } @@ -212,15 +209,6 @@ public ScriptedMetricAggContexts.CombineScript createMetricAggCombineScript(Map< public ScriptedMetricAggContexts.ReduceScript createMetricAggReduceScript(Map params, List states) { return new MockMetricAggReduceScript(params, states, script != null ? script : ctx -> 42d); } - - public AnalysisPredicateScript createAnalysisPredicateScript() { - return new AnalysisPredicateScript() { - @Override - public boolean execute(Term term) { - return true; - } - }; - } } public class MockExecutableScript implements ExecutableScript { From 801a704918e59e42e1978dd6de0163336bb5cd9d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 13 Jul 2018 14:34:02 +0200 Subject: [PATCH 015/107] Re-instate link in StringFunctionUtils javadocs The previous errors in compileJava were not cause by the brackets but my the content of the @link section. Corrected this so its a working javadoc link again. --- .../function/scalar/string/StringFunctionUtils.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java index 75db52ed09968..cef826d37ed5c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/expression/function/scalar/string/StringFunctionUtils.java @@ -8,7 +8,7 @@ abstract class StringFunctionUtils { /** - * Trims the trailing whitespace characters from the given String. Uses @link java.lang.Character.isWhitespace(char) + * Trims the trailing whitespace characters from the given String. Uses {@link Character#isWhitespace(char)} * to determine if a character is whitespace or not. * * @param s the original String @@ -27,7 +27,7 @@ static String trimTrailingWhitespaces(String s) { } /** - * Trims the leading whitespace characters from the given String. Uses @link java.lang.Character.isWhitespace(char) + * Trims the leading whitespace characters from the given String. Uses {@link Character#isWhitespace(char)} * to determine if a character is whitespace or not. * * @param s the original String From f923d9cd8fa36f8d73e363171db979ab400d4837 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 13 Jul 2018 15:40:38 +0200 Subject: [PATCH 016/107] Docs: Change formatting of Cloud options --- docs/reference/setup/install.asciidoc | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc index 7675e5ad14659..4433ffb8c3876 100644 --- a/docs/reference/setup/install.asciidoc +++ b/docs/reference/setup/install.asciidoc @@ -1,11 +1,16 @@ [[install-elasticsearch]] == Installing Elasticsearch +[float] +=== Hosted Elasticsearch Elasticsearch can be run on your own hardware or using our hosted Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is available on AWS and GCP. You can https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free. +[float] +=== Installing Elasticsearch Yourself + Elasticsearch is provided in the following package formats: [horizontal] From a21fb82b1ba27f9be4d01f424c94db80f263e336 Mon Sep 17 00:00:00 2001 From: Clinton Gormley Date: Fri, 13 Jul 2018 15:48:14 +0200 Subject: [PATCH 017/107] Docs: Restyled cloud link in getting started --- docs/reference/getting-started.asciidoc | 3 +++ 1 file changed, 3 insertions(+) diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc index a29a743fed82f..ff00c310a4308 100755 --- a/docs/reference/getting-started.asciidoc +++ b/docs/reference/getting-started.asciidoc @@ -104,10 +104,13 @@ With that out of the way, let's get started with the fun part... == Installation +[TIP] +============== You can skip installation completely by using our hosted Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is available on AWS and GCP. You can https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free. +============== Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed): From 62fea58c1a9a59924ce0ee17a49a20dcd6bc22c5 Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Fri, 13 Jul 2018 10:07:42 -0400 Subject: [PATCH 018/107] [Rollup] Use composite's missing_bucket (#31402) We can leverage the composite agg's new `missing_bucket` feature on terms groupings. This means the aggregation criteria used in the indexer will now return null buckets for missing keys. Because all buckets are now returned (even if a key is null), we can guarantee correct doc counts with "combined" jobs (where a job rolls up multiple schemas). This was previously impossible since composite would ignore documents that didn't have _all_ the keys, meaning non-overlapping schemas would cause composite to return no buckets. Note: date_histo does not use `missing_bucket`, since a timestamp is always required. The docs have been adjusted to recommend a single, combined job. It also makes reference to the previous issue to help users that are upgrading (rather than just deleting the sections). --- .../en/rollup/understanding-groups.asciidoc | 237 +----------------- .../core/rollup/job/DateHistoGroupConfig.java | 1 - .../core/rollup/job/HistoGroupConfig.java | 1 + .../core/rollup/job/TermsGroupConfig.java | 1 + .../rollup/RollupRestTestStateCleaner.java | 24 +- .../rollup/RollupResponseTranslator.java | 1 + .../action/TransportPutRollupJobAction.java | 8 + .../xpack/rollup/job/IndexerUtils.java | 15 +- .../xpack/rollup/job/RollupIndexer.java | 1 + .../RollupResponseTranslationTests.java | 48 ++++ .../action/PutJobStateMachineTests.java | 55 +++- .../xpack/rollup/job/IndexerUtilsTests.java | 106 +++++++- .../xpack/test/rest/XPackRestIT.java | 2 +- .../xpack/restart/FullClusterRestartIT.java | 61 ++++- 14 files changed, 298 insertions(+), 263 deletions(-) diff --git a/x-pack/docs/en/rollup/understanding-groups.asciidoc b/x-pack/docs/en/rollup/understanding-groups.asciidoc index f57f905ae04c8..803555b2d73f7 100644 --- a/x-pack/docs/en/rollup/understanding-groups.asciidoc +++ b/x-pack/docs/en/rollup/understanding-groups.asciidoc @@ -121,16 +121,15 @@ if a field is useful for aggregating later, and how you might wish to use it (te === Grouping Limitations with heterogeneous indices -There is a known limitation to Rollup groups, due to some internal implementation details at this time. The Rollup feature leverages -the `composite` aggregation from Elasticsearch. At the moment, the composite agg only returns buckets when all keys in the tuple are non-null. -Put another way, if the you request keys `[A,B,C]` in the composite aggregation, the only documents that are aggregated are those that have -_all_ of the keys `A, B` and `C`. +There was previously a limitation in how Rollup could handle indices that had heterogeneous mappings (multiple, unrelated/non-overlapping +mappings). The recommendation at the time was to configure a separate job per data "type". For example, you might configure a separate +job for each Beats module that you had enabled (one for `process`, another for `filesystem`, etc). -Because Rollup uses the composite agg during the indexing process, it inherits this behavior. Practically speaking, if all of the documents -in your index are homogeneous (they have the same mapping), you can ignore this limitation and stop reading now. +This recommendation was driven by internal implementation details that caused document counts to be potentially incorrect if a single "merged" +job was used. -However, if you have a heterogeneous collection of documents that you wish to roll up, you may need to configure two or more jobs to -accurately cover the original data. +This limitation has since been alleviated. As of 6.4.0, it is now considered best practice to combine all rollup configurations +into a single job. As an example, if your index has two types of documents: @@ -157,7 +156,7 @@ and -------------------------------------------------- // NOTCONSOLE -it may be tempting to create a single, combined rollup job which covers both of these document types, something like this: +the best practice is to combine them into a single rollup job which covers both of these document types, like this: [source,js] -------------------------------------------------- @@ -191,222 +190,10 @@ PUT _xpack/rollup/job/combined -------------------------------------------------- // NOTCONSOLE -You can see that it includes a `terms` grouping on both "node" and "title", fields that are mutually exclusive in the document types. -*This will not work.* Because the `composite` aggregation (and by extension, Rollup) only returns buckets when all keys are non-null, -and there are no documents that have both a "node" field and a "title" field, this rollup job will not produce any rollups. - -Instead, you should configure two independent jobs (sharing the same index, or going to separate indices): - -[source,js] --------------------------------------------------- -PUT _xpack/rollup/job/sensor -{ - "index_pattern": "data-*", - "rollup_index": "data_rollup", - "cron": "*/30 * * * * ?", - "page_size" :1000, - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["node"] - } - }, - "metrics": [ - { - "field": "temperature", - "metrics": ["min", "max", "sum"] - } - ] -} --------------------------------------------------- -// NOTCONSOLE - -[source,js] --------------------------------------------------- -PUT _xpack/rollup/job/purchases -{ - "index_pattern": "data-*", - "rollup_index": "data_rollup", - "cron": "*/30 * * * * ?", - "page_size" :1000, - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["title"] - } - }, - "metrics": [ - { - "field": "price", - "metrics": ["avg"] - } - ] -} --------------------------------------------------- -// NOTCONSOLE - -Notice that each job now deals with a single "document type", and will not run into the limitations described above. We are working on changes -in core Elasticsearch to remove this limitation from the `composite` aggregation, and the documentation will be updated accordingly -when this particular scenario is fixed. - === Doc counts and overlapping jobs -There is an issue with doc counts, related to the above grouping limitation. Imagine you have two Rollup jobs saving to the same index, where -one job is a "subset" of another job. - -For example, you might have jobs with these two groupings: - -[source,js] --------------------------------------------------- -PUT _xpack/rollup/job/sensor-all -{ - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["node"] - } - }, - "metrics": [ - { - "field": "price", - "metrics": ["avg"] - } - ] - ... -} --------------------------------------------------- -// NOTCONSOLE - -and - -[source,js] --------------------------------------------------- -PUT _xpack/rollup/job/sensor-building -{ - "groups" : { - "date_histogram": { - "field": "timestamp", - "interval": "1h", - "delay": "7d" - }, - "terms": { - "fields": ["node", "building"] - } - } - ... -} --------------------------------------------------- -// NOTCONSOLE - - -The first job `sensor-all` contains the groupings and metrics that apply to all data in the index. The second job is rolling up a subset -of data (in different buildings) which also include a building identifier. You did this because combining them would run into the limitation -described in the previous section. - -This _mostly_ works, but can sometimes return incorrect `doc_counts` when you search. All metrics will be valid however. - -The issue arises from the composite agg limitation described before, combined with search-time optimization. Imagine you try to run the -following aggregation: - -[source,js] --------------------------------------------------- -"aggs" : { - "nodes": { - "terms": { - "field": "node" - } - } -} --------------------------------------------------- -// NOTCONSOLE - -This aggregation could be serviced by either `sensor-all` or `sensor-building` job, since they both group on the node field. So the RollupSearch -API will search both of them and merge results. This will result in *correct* doc_counts and *correct* metrics. No problem here. - -The issue arises from an aggregation that can _only_ be serviced by `sensor-building`, like this one: - -[source,js] --------------------------------------------------- -"aggs" : { - "nodes": { - "terms": { - "field": "node" - }, - "aggs": { - "building": { - "terms": { - "field": "building" - } - } - } - } -} --------------------------------------------------- -// NOTCONSOLE - -Now we run into a problem. The RollupSearch API will correctly identify that only `sensor-building` job has all the required components -to answer the aggregation, and will search it exclusively. Unfortunately, due to the composite aggregation limitation, that job only -rolled up documents that have both a "node" and a "building" field. Meaning that the doc_counts for the `"nodes"` aggregation will not -include counts for any document that doesn't have `[node, building]` fields. - -- The `doc_count` for `"nodes"` aggregation will be incorrect because it only contains counts for `nodes` that also have buildings -- The `doc_count` for `"buildings"` aggregation will be correct -- Any metrics, on any level, will be correct - -==== Workarounds - -There are two main workarounds if you find yourself with a schema like the above. - -Easiest and most robust method: use separate indices to store your rollups. The limitations arise because you have several document -schemas co-habitating in a single index, which makes it difficult for rollups to correctly summarize. If you make several rollup -jobs and store them in separate indices, these sorts of difficulties do not arise. It does, however, keep you from searching across several -different rollup indices at the same time. - -The other workaround is to include an "off-target" aggregation in the query, which pulls in the "superset" job and corrects the doc counts. -The RollupSearch API determines the best job to search for each "leaf node" in the aggregation tree. So if we include a metric agg on `price`, -which was only defined in the `sensor-all` job, that will "pull in" the other job: - -[source,js] --------------------------------------------------- -"aggs" : { - "nodes": { - "terms": { - "field": "node" - }, - "aggs": { - "building": { - "terms": { - "field": "building" - } - }, - "avg_price": { - "avg": { "field": "price" } <1> - } - } - } -} --------------------------------------------------- -// NOTCONSOLE -<1> Adding an avg aggregation here will fix the doc counts - -Because only `sensor-all` job had an `avg` on the price field, the RollupSearch API is forced to pull in that additional job for searching, -and will merge/correct the doc_counts as appropriate. This sort of workaround applies to any additional aggregation -- metric or bucketing -- -although it can be tedious to look through the jobs and determine the right one to add. - -==== Status +There was previously an issue with document counts on "overlapping" job configurations, driven by the same internal implementation detail. +If there were two Rollup jobs saving to the same index, where one job is a "subset" of another job, it was possible that document counts +could be incorrect for certain aggregation arrangements. -We realize this is an onerous limitation, and somewhat breaks the rollup contract of "pick the fields to rollup, we do the rest". We are -actively working to get the limitation to `composite` agg fixed, and the related issues in Rollup. The documentation will be updated when -the fix is implemented. \ No newline at end of file +This issue has also since been eliminated in 6.4.0. \ No newline at end of file diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistoGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistoGroupConfig.java index 4b4e4cf7b7c81..4a9fbde61d6be 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistoGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistoGroupConfig.java @@ -159,7 +159,6 @@ public List> toBuilders() { vsBuilder.dateHistogramInterval(interval); vsBuilder.field(field); vsBuilder.timeZone(timeZone); - return Collections.singletonList(vsBuilder); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java index 8b8d53b4ce9af..2b1511077d955 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java @@ -96,6 +96,7 @@ public List> toBuilders() { = new HistogramValuesSourceBuilder(RollupField.formatIndexerAggName(f, HistogramAggregationBuilder.NAME)); vsBuilder.interval(interval); vsBuilder.field(f); + vsBuilder.missingBucket(true); return vsBuilder; }).collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java index 2f1c35a73edb4..da73020f0087f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/TermsGroupConfig.java @@ -80,6 +80,7 @@ public List> toBuilders() { TermsValuesSourceBuilder vsBuilder = new TermsValuesSourceBuilder(RollupField.formatIndexerAggName(f, TermsAggregationBuilder.NAME)); vsBuilder.field(f); + vsBuilder.missingBucket(true); return vsBuilder; }).collect(Collectors.toList()); } diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java index 9938f3a41962b..ae171f138cf46 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/RollupRestTestStateCleaner.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.core.rollup; import org.apache.http.HttpStatus; -import org.apache.logging.log4j.Logger; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.xcontent.support.XContentMapValues; @@ -27,21 +26,13 @@ public class RollupRestTestStateCleaner { - private final Logger logger; - private final RestClient adminClient; - - public RollupRestTestStateCleaner(Logger logger, RestClient adminClient) { - this.logger = logger; - this.adminClient = adminClient; - } - - public void clearRollupMetadata() throws Exception { - deleteAllJobs(); - waitForPendingTasks(); + public static void clearRollupMetadata(RestClient adminClient) throws Exception { + deleteAllJobs(adminClient); + waitForPendingTasks(adminClient); // indices will be deleted by the ESRestTestCase class } - private void waitForPendingTasks() throws Exception { + private static void waitForPendingTasks(RestClient adminClient) throws Exception { ESTestCase.assertBusy(() -> { try { Response response = adminClient.performRequest("GET", "/_cat/tasks", @@ -71,7 +62,7 @@ private void waitForPendingTasks() throws Exception { } @SuppressWarnings("unchecked") - private void deleteAllJobs() throws Exception { + private static void deleteAllJobs(RestClient adminClient) throws Exception { Response response = adminClient.performRequest("GET", "/_xpack/rollup/job/_all"); Map jobs = ESRestTestCase.entityAsMap(response); @SuppressWarnings("unchecked") @@ -83,9 +74,7 @@ private void deleteAllJobs() throws Exception { } for (Map jobConfig : jobConfigs) { - logger.debug(jobConfig); String jobId = (String) ((Map) jobConfig.get("config")).get("id"); - logger.debug("Deleting job " + jobId); try { response = adminClient.performRequest("DELETE", "/_xpack/rollup/job/" + jobId); } catch (Exception e) { @@ -95,7 +84,8 @@ private void deleteAllJobs() throws Exception { } private static String responseEntityToString(Response response) throws Exception { - try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) { + try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), + StandardCharsets.UTF_8))) { return reader.lines().collect(Collectors.joining("\n")); } } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java index ba1002896c041..4042e98ef93fb 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/RollupResponseTranslator.java @@ -382,6 +382,7 @@ private static InternalAggregation unrollMultiBucket(InternalMultiBucketAggregat }); } else if (rolled instanceof StringTerms) { return unrollMultiBucket(rolled, original, currentTree, (bucket, bucketCount, subAggs) -> { + BytesRef key = new BytesRef(bucket.getKeyAsString().getBytes(StandardCharsets.UTF_8)); assert bucketCount >= 0; //TODO expose getFormatter(), keyed upstream in Core diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java index 081b97b4ee777..889dfa3ac8efc 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportPutRollupJobAction.java @@ -173,6 +173,14 @@ static void updateMapping(RollupJob job, ActionListener rollupMeta = (Map)((Map) m).get(RollupField.ROLLUP_META); + + String stringVersion = (String)((Map) m).get(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD); + if (stringVersion == null) { + listener.onFailure(new IllegalStateException("Could not determine version of existing rollup metadata for index [" + + indexName + "]")); + return; + } + if (rollupMeta.get(job.getConfig().getId()) != null) { String msg = "Cannot create rollup job [" + job.getConfig().getId() + "] because job was previously created (existing metadata)."; diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java index e180e34c4cc26..efac4c2d61b98 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/IndexerUtils.java @@ -45,7 +45,7 @@ class IndexerUtils { * @param rollupIndex The index that holds rollups for this job * @return A list of rolled documents derived from the response */ - static List processBuckets(CompositeAggregation agg, String rollupIndex, RollupJobStats stats, + static List processBuckets(CompositeAggregation agg, String rollupIndex, RollupJobStats stats, GroupConfig groupConfig, String jobId) { logger.debug("Buckets: [" + agg.getBuckets().size() + "][" + jobId + "]"); @@ -80,6 +80,7 @@ private static CRC32 processKeys(Map keys, Map d doc.put(k + "." + RollupField.COUNT_FIELD, count); if (k.endsWith("." + DateHistogramAggregationBuilder.NAME)) { + assert v != null; doc.put(k + "." + RollupField.TIMESTAMP, v); doc.put(k + "." + RollupField.INTERVAL, groupConfig.getDateHisto().getInterval()); doc.put(k + "." + DateHistoGroupConfig.TIME_ZONE, groupConfig.getDateHisto().getTimeZone().toString()); @@ -87,10 +88,18 @@ private static CRC32 processKeys(Map keys, Map d } else if (k.endsWith("." + HistogramAggregationBuilder.NAME)) { doc.put(k + "." + RollupField.VALUE, v); doc.put(k + "." + RollupField.INTERVAL, groupConfig.getHisto().getInterval()); - docID.update(Numbers.doubleToBytes((Double)v), 0, 8); + if (v == null) { + // Arbitrary value to update the doc ID with for nulls + docID.update(19); + } else { + docID.update(Numbers.doubleToBytes((Double) v), 0, 8); + } } else if (k.endsWith("." + TermsAggregationBuilder.NAME)) { doc.put(k + "." + RollupField.VALUE, v); - if (v instanceof String) { + if (v == null) { + // Arbitrary value to update the doc ID with for nulls + docID.update(19); + } else if (v instanceof String) { byte[] vs = ((String) v).getBytes(StandardCharsets.UTF_8); docID.update(vs, 0, vs.length); } else if (v instanceof Long) { diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index a07f1e7d32e7c..1711c0e34eb1e 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -401,6 +401,7 @@ private CompositeAggregationBuilder createCompositeBuilder(RollupJobConfig confi composite.setMetaData(metadata); } composite.size(config.getPageSize()); + return composite; } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java index 7b03d8e8d038d..98e3ad8197a51 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupResponseTranslationTests.java @@ -1082,6 +1082,54 @@ public void testStringTerms() throws IOException { assertThat(unrolled.toString(), not(equalTo(responses.get(1).toString()))); } + public void testStringTermsNullValue() throws IOException { + TermsAggregationBuilder nonRollupTerms = new TermsAggregationBuilder("terms", ValueType.STRING) + .field("stringField"); + + TermsAggregationBuilder rollupTerms = new TermsAggregationBuilder("terms", ValueType.STRING) + .field("stringfield.terms." + RollupField.VALUE) + .subAggregation(new SumAggregationBuilder("terms." + COUNT_FIELD) + .field("stringfield.terms." + RollupField.COUNT_FIELD)); + + KeywordFieldMapper.Builder nrBuilder = new KeywordFieldMapper.Builder("terms"); + KeywordFieldMapper.KeywordFieldType nrFTterm = nrBuilder.fieldType(); + nrFTterm.setHasDocValues(true); + nrFTterm.setName(nonRollupTerms.field()); + + KeywordFieldMapper.Builder rBuilder = new KeywordFieldMapper.Builder("terms"); + KeywordFieldMapper.KeywordFieldType rFTterm = rBuilder.fieldType(); + rFTterm.setHasDocValues(true); + rFTterm.setName(rollupTerms.field()); + + NumberFieldMapper.Builder valueBuilder = new NumberFieldMapper.Builder("terms." + RollupField.COUNT_FIELD, + NumberFieldMapper.NumberType.LONG); + MappedFieldType rFTvalue = valueBuilder.fieldType(); + rFTvalue.setHasDocValues(true); + rFTvalue.setName("stringfield.terms." + RollupField.COUNT_FIELD); + + List responses = doQueries(new MatchAllDocsQuery(), + iw -> { + iw.addDocument(stringValueDoc("abc")); + iw.addDocument(stringValueDoc("abc")); + iw.addDocument(stringValueDoc("abc")); + + // off target + Document doc = new Document(); + doc.add(new SortedSetDocValuesField("otherField", new BytesRef("other"))); + iw.addDocument(doc); + }, nonRollupTerms, + iw -> { + iw.addDocument(stringValueRollupDoc("abc", 3)); + }, rollupTerms, + new MappedFieldType[]{nrFTterm}, new MappedFieldType[]{rFTterm, rFTvalue}); + + InternalAggregation unrolled = RollupResponseTranslator.unrollAgg(responses.get(1), null, null, 0); + + // The null_value placeholder should be removed from the response and not visible here + assertThat(unrolled.toString(), equalTo(responses.get(0).toString())); + assertThat(unrolled.toString(), not(equalTo(responses.get(1).toString()))); + } + public void testLongTerms() throws IOException { TermsAggregationBuilder nonRollupTerms = new TermsAggregationBuilder("terms", ValueType.LONG) .field("longField"); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java index 64cf9d2e3fe21..58fa9d4533bc3 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/PutJobStateMachineTests.java @@ -28,9 +28,12 @@ import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; +import org.elasticsearch.xpack.rollup.Rollup; import org.mockito.ArgumentCaptor; import java.util.Collections; +import java.util.HashMap; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -203,6 +206,43 @@ public void testNoMetadataInMapping() { verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any()); } + @SuppressWarnings("unchecked") + public void testNoMappingVersion() { + RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); + + ActionListener testListener = ActionListener.wrap(response -> { + fail("Listener success should not have been triggered."); + }, e -> { + assertThat(e.getMessage(), equalTo("Could not determine version of existing rollup metadata for index [" + + job.getConfig().getRollupIndex() + "]")); + }); + + Logger logger = mock(Logger.class); + Client client = mock(Client.class); + + ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ActionListener.class); + doAnswer(invocation -> { + GetMappingsResponse response = mock(GetMappingsResponse.class); + Map m = new HashMap<>(2); + m.put(RollupField.ROLLUP_META, + Collections.singletonMap(job.getConfig().getId(), job.getConfig())); + MappingMetaData meta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", m)); + ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(1); + builder.put(RollupField.TYPE_NAME, meta); + + ImmutableOpenMap.Builder> builder2 = ImmutableOpenMap.builder(1); + builder2.put(job.getConfig().getRollupIndex(), builder.build()); + + when(response.getMappings()).thenReturn(builder2.build()); + requestCaptor.getValue().onResponse(response); + return null; + }).when(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), requestCaptor.capture()); + + TransportPutRollupJobAction.updateMapping(job, testListener, mock(PersistentTasksService.class), client, logger); + verify(client).execute(eq(GetMappingsAction.INSTANCE), any(GetMappingsRequest.class), any()); + } + @SuppressWarnings("unchecked") public void testJobAlreadyInMapping() { RollupJob job = new RollupJob(ConfigTestHelpers.getRollupJob("foo").build(), Collections.emptyMap()); @@ -219,10 +259,12 @@ public void testJobAlreadyInMapping() { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ActionListener.class); doAnswer(invocation -> { GetMappingsResponse response = mock(GetMappingsResponse.class); + Map m = new HashMap<>(2); + m.put(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD, Version.V_6_4_0); + m.put(RollupField.ROLLUP_META, + Collections.singletonMap(job.getConfig().getId(), job.getConfig())); MappingMetaData meta = new MappingMetaData(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", - Collections.singletonMap(RollupField.ROLLUP_META, - Collections.singletonMap(job.getConfig().getId(), job.getConfig())))); + Collections.singletonMap("_meta", m)); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(1); builder.put(RollupField.TYPE_NAME, meta); @@ -258,9 +300,12 @@ public void testAddJobToMapping() { ArgumentCaptor requestCaptor = ArgumentCaptor.forClass(ActionListener.class); doAnswer(invocation -> { GetMappingsResponse response = mock(GetMappingsResponse.class); + Map m = new HashMap<>(2); + m.put(Rollup.ROLLUP_TEMPLATE_VERSION_FIELD, Version.V_6_4_0); + m.put(RollupField.ROLLUP_META, + Collections.singletonMap(unrelatedJob.getId(), unrelatedJob)); MappingMetaData meta = new MappingMetaData(RollupField.TYPE_NAME, - Collections.singletonMap("_meta", Collections.singletonMap(RollupField.ROLLUP_META, - Collections.singletonMap(unrelatedJob.getId(), unrelatedJob)))); + Collections.singletonMap("_meta", m)); ImmutableOpenMap.Builder builder = ImmutableOpenMap.builder(1); builder.put(RollupField.TYPE_NAME, meta); diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java index 53421faa9bc38..07ad0af7f1c38 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/IndexerUtilsTests.java @@ -15,6 +15,7 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.store.Directory; import org.elasticsearch.action.index.IndexRequest; +import org.elasticsearch.common.Strings; import org.elasticsearch.index.mapper.DateFieldMapper; import org.elasticsearch.index.mapper.MappedFieldType; import org.elasticsearch.index.mapper.NumberFieldMapper; @@ -33,12 +34,13 @@ import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder; import org.elasticsearch.search.aggregations.metrics.sum.SumAggregationBuilder; +import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistoGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; import org.elasticsearch.xpack.core.rollup.job.MetricConfig; import org.elasticsearch.xpack.core.rollup.job.RollupJobStats; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; +import org.elasticsearch.xpack.core.rollup.job.TermsGroupConfig; import org.joda.time.DateTime; import org.mockito.stubbing.Answer; @@ -50,8 +52,8 @@ import java.util.List; import java.util.Map; -import static org.mockito.Mockito.mock; import static org.hamcrest.Matchers.equalTo; +import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; public class IndexerUtilsTests extends AggregatorTestCase { @@ -359,6 +361,106 @@ public void testKeyOrdering() { assertThat(docs.get(0).id(), equalTo("1237859798")); } + public void testNullKeys() { + CompositeAggregation composite = mock(CompositeAggregation.class); + + when(composite.getBuckets()).thenAnswer((Answer>) invocationOnMock -> { + List foos = new ArrayList<>(); + + CompositeAggregation.Bucket bucket = mock(CompositeAggregation.Bucket.class); + LinkedHashMap keys = new LinkedHashMap<>(3); + keys.put("bar.terms", null); + keys.put("abc.histogram", null); + when(bucket.getKey()).thenReturn(keys); + + Aggregations aggs = new Aggregations(Collections.emptyList()); + when(bucket.getAggregations()).thenReturn(aggs); + when(bucket.getDocCount()).thenReturn(1L); + + foos.add(bucket); + + return foos; + }); + + GroupConfig.Builder groupConfig = ConfigTestHelpers.getGroupConfig(); + groupConfig.setHisto(ConfigTestHelpers.getHisto().setFields(Collections.singletonList("abc")).build()); + + List docs = IndexerUtils.processBuckets(composite, "foo", new RollupJobStats(), groupConfig.build(), "foo"); + assertThat(docs.size(), equalTo(1)); + assertFalse(Strings.isNullOrEmpty(docs.get(0).id())); + } + + public void testMissingBuckets() throws IOException { + String indexName = randomAlphaOfLengthBetween(1, 10); + RollupJobStats stats= new RollupJobStats(0, 0, 0, 0); + + String metricField = "metric_field"; + String valueField = "value_field"; + + Directory directory = newDirectory(); + RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory); + + int numDocs = 10; + + for (int i = 0; i < numDocs; i++) { + Document document = new Document(); + + // Every other doc omit the valueField, so that we get some null buckets + if (i % 2 == 0) { + document.add(new SortedNumericDocValuesField(valueField, i)); + document.add(new LongPoint(valueField, i)); + } + document.add(new SortedNumericDocValuesField(metricField, i)); + document.add(new LongPoint(metricField, i)); + indexWriter.addDocument(document); + } + + indexWriter.close(); + + IndexReader indexReader = DirectoryReader.open(directory); + IndexSearcher indexSearcher = newIndexSearcher(indexReader); + + MappedFieldType valueFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + valueFieldType.setName(valueField); + valueFieldType.setHasDocValues(true); + valueFieldType.setName(valueField); + + MappedFieldType metricFieldType = new NumberFieldMapper.NumberFieldType(NumberFieldMapper.NumberType.LONG); + metricFieldType.setName(metricField); + metricFieldType.setHasDocValues(true); + metricFieldType.setName(metricField); + + // Setup the composite agg + TermsGroupConfig termsGroupConfig = new TermsGroupConfig.Builder().setFields(Collections.singletonList(valueField)).build(); + CompositeAggregationBuilder compositeBuilder = new CompositeAggregationBuilder(RollupIndexer.AGGREGATION_NAME, + termsGroupConfig.toBuilders()).size(numDocs*2); + + MetricConfig metricConfig = new MetricConfig.Builder().setField(metricField).setMetrics(Collections.singletonList("max")).build(); + metricConfig.toBuilders().forEach(compositeBuilder::subAggregation); + + Aggregator aggregator = createAggregator(compositeBuilder, indexSearcher, valueFieldType, metricFieldType); + aggregator.preCollection(); + indexSearcher.search(new MatchAllDocsQuery(), aggregator); + aggregator.postCollection(); + CompositeAggregation composite = (CompositeAggregation) aggregator.buildAggregation(0L); + indexReader.close(); + directory.close(); + + List docs = IndexerUtils.processBuckets(composite, indexName, stats, + ConfigTestHelpers.getGroupConfig().build(), "foo"); + + assertThat(docs.size(), equalTo(6)); + for (IndexRequest doc : docs) { + Map map = doc.sourceAsMap(); + Object value = map.get(valueField + "." + TermsAggregationBuilder.NAME + "." + RollupField.VALUE); + if (value == null) { + assertThat(map.get(valueField + "." + TermsAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(5)); + } else { + assertThat(map.get(valueField + "." + TermsAggregationBuilder.NAME + "." + RollupField.COUNT_FIELD), equalTo(1)); + } + } + } + interface Mock { List getBuckets(); } diff --git a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java index 412c75f0e639c..f1d9eb1fb3f24 100644 --- a/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java +++ b/x-pack/plugin/src/test/java/org/elasticsearch/xpack/test/rest/XPackRestIT.java @@ -263,7 +263,7 @@ private void clearMlState() throws Exception { */ private void clearRollupState() throws Exception { if (isRollupTest()) { - new RollupRestTestStateCleaner(logger, adminClient()).clearRollupMetadata(); + RollupRestTestStateCleaner.clearRollupMetadata(adminClient()); } } diff --git a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java index 5276abdbfb1d8..ba6f9e9167821 100644 --- a/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java +++ b/x-pack/qa/full-cluster-restart/src/test/java/org/elasticsearch/xpack/restart/FullClusterRestartIT.java @@ -18,6 +18,7 @@ import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; +import org.elasticsearch.common.xcontent.support.XContentMapValues; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.test.StreamsUtils; import org.elasticsearch.test.rest.ESRestTestCase; @@ -531,7 +532,10 @@ private void assertRollUpJob(final String rollupJob) throws Exception { // check that the rollup job is started using the RollUp API final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); Map getRollupJobResponse = toMap(client().performRequest(getRollupJobRequest)); - assertThat(ObjectPath.eval("jobs.0.status.job_state", getRollupJobResponse), expectedStates); + Map job = getJob(getRollupJobResponse, rollupJob); + if (job != null) { + assertThat(ObjectPath.eval("status.job_state", job), expectedStates); + } // check that the rollup job is started using the Tasks API final Request taskRequest = new Request("GET", "_tasks"); @@ -547,15 +551,27 @@ private void assertRollUpJob(final String rollupJob) throws Exception { // check that the rollup job is started using the Cluster State API final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata"); Map clusterStateResponse = toMap(client().performRequest(clusterStateRequest)); - Map rollupJobTask = ObjectPath.eval("metadata.persistent_tasks.tasks.0", clusterStateResponse); - assertThat(ObjectPath.eval("id", rollupJobTask), equalTo("rollup-job-test")); + List> rollupJobTasks = ObjectPath.eval("metadata.persistent_tasks.tasks", clusterStateResponse); + + boolean hasRollupTask = false; + for (Map task : rollupJobTasks) { + if (ObjectPath.eval("id", task).equals(rollupJob)) { + hasRollupTask = true; + + // Persistent task state field has been renamed in 6.4.0 from "status" to "state" + final String stateFieldName + = (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_4_0)) ? "status" : "state"; - // Persistent task state field has been renamed in 6.4.0 from "status" to "state" - final String stateFieldName = (runningAgainstOldCluster && oldClusterVersion.before(Version.V_6_4_0)) ? "status" : "state"; + final String jobStateField = "task.xpack/rollup/job." + stateFieldName + ".job_state"; + assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + task.get("id"), + ObjectPath.eval(jobStateField, task), expectedStates); + break; + } + } + if (hasRollupTask == false) { + fail("Expected persistent task for [" + rollupJob + "] but none found."); + } - final String jobStateField = "task.xpack/rollup/job." + stateFieldName + ".job_state"; - assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + rollupJobTask, - ObjectPath.eval(jobStateField, rollupJobTask), expectedStates); } private void waitForRollUpJob(final String rollupJob, final Matcher expectedStates) throws Exception { @@ -563,7 +579,34 @@ private void waitForRollUpJob(final String rollupJob, final Matcher expectedS final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob); Response getRollupJobResponse = client().performRequest(getRollupJobRequest); assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus())); - assertThat(ObjectPath.eval("jobs.0.status.job_state", toMap(getRollupJobResponse)), expectedStates); + + Map job = getJob(getRollupJobResponse, rollupJob); + if (job != null) { + assertThat(ObjectPath.eval("status.job_state", job), expectedStates); + } }, 30L, TimeUnit.SECONDS); } + + private Map getJob(Response response, String targetJobId) throws IOException { + return getJob(ESRestTestCase.entityAsMap(response), targetJobId); + } + + @SuppressWarnings("unchecked") + private Map getJob(Map jobsMap, String targetJobId) throws IOException { + + List> jobs = + (List>) XContentMapValues.extractValue("jobs", jobsMap); + + if (jobs == null) { + return null; + } + + for (Map job : jobs) { + String jobId = (String) ((Map) job.get("config")).get("id"); + if (jobId.equals(targetJobId)) { + return job; + } + } + return null; + } } From 35a6774b5d6c5a667b77b25ab9d4e296b1b12a66 Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 13 Jul 2018 09:33:29 -0500 Subject: [PATCH 019/107] Test: Fix a second case of bad watch creation There was still a case with a null text that allowed for 0 attachments to be created. This commit ensures that greater than zero are created if the text is null. Otherwise, it uses the same logic to create 0 to 3 random attachments. Closes #31948 --- .../notification/slack/message/SlackMessageTests.java | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java index 14d732064e5e0..10544e464ace5 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/slack/message/SlackMessageTests.java @@ -461,7 +461,6 @@ public void testTemplateParseSelfGenerated() throws Exception { assertThat(parsed, equalTo(template)); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31948") public void testTemplateRender() throws Exception { Settings settings = SlackMessageDefaultsTests.randomSettings(); SlackMessageDefaults defaults = new SlackMessageDefaults(settings); @@ -483,7 +482,9 @@ public void testTemplateRender() throws Exception { templateBuilder.setText(randomAlphaOfLength(10)); } if (templateBuilder.text == null || randomBoolean()) { - int count = randomIntBetween(0, 3); + // ensure at least one attachment in the event the text is null + int minimumAttachments = templateBuilder.text == null ? 1 : 0; + int count = randomIntBetween(minimumAttachments, 3); for (int i = 0; i < count; i++) { Attachment.Template.Builder attachmentBuilder = createRandomAttachmentTemplateBuilder(); templateBuilder.addAttachments(attachmentBuilder); From 9203e50960a13c16abae053b5a57196cf5c1b57f Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Fri, 13 Jul 2018 15:49:26 +0100 Subject: [PATCH 020/107] Remove deprecated AnalysisPlugin#requriesAnalysisSettings method (#32037) --- .../java/org/elasticsearch/plugins/AnalysisPlugin.java | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java b/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java index 27b5667d3bb20..c85981f8dcb91 100644 --- a/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java +++ b/server/src/main/java/org/elasticsearch/plugins/AnalysisPlugin.java @@ -128,16 +128,6 @@ default Map getHunspellD return emptyMap(); } - /** - * Mark an {@link AnalysisProvider} as requiring the index's settings. - * - * @deprecated use {@link #requiresAnalysisSettings(AnalysisProvider)} - */ - @Deprecated - static AnalysisProvider requriesAnalysisSettings(AnalysisProvider provider) { - return requiresAnalysisSettings(provider); - } - /** * Mark an {@link AnalysisProvider} as requiring the index's settings. */ From 849e6909b260e27f2d8d48446f7fd3847d9a5afa Mon Sep 17 00:00:00 2001 From: Mayya Sharipova Date: Fri, 13 Jul 2018 11:40:03 -0400 Subject: [PATCH 021/107] Add second level of field collapsing (#31808) * Put second level collapse under inner_hits Closes #24855 --- .../search/request/collapse.asciidoc | 102 +++++++++++++ .../search/115_multiple_field_collapsing.yml | 141 ++++++++++++++++++ .../action/search/ExpandSearchPhase.java | 8 +- .../index/query/InnerHitBuilder.java | 49 +++++- 4 files changed, 296 insertions(+), 4 deletions(-) create mode 100644 rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml diff --git a/docs/reference/search/request/collapse.asciidoc b/docs/reference/search/request/collapse.asciidoc index 97d8532933027..192495e5d6d0d 100644 --- a/docs/reference/search/request/collapse.asciidoc +++ b/docs/reference/search/request/collapse.asciidoc @@ -116,3 +116,105 @@ The default is based on the number of data nodes and the default search thread p WARNING: `collapse` cannot be used in conjunction with <>, <> or <>. + +==== Second level of collapsing + +Second level of collapsing is also supported and is applied to `inner_hits`. +For example, the following request finds the top scored tweets for +each country, and within each country finds the top scored tweets +for each user. + +[source,js] +-------------------------------------------------- +GET /twitter/_search +{ + "query": { + "match": { + "message": "elasticsearch" + } + }, + "collapse" : { + "field" : "country", + "inner_hits" : { + "name": "by_location", + "collapse" : {"field" : "user"}, + "size": 3 + } + } +} +-------------------------------------------------- +// NOTCONSOLE + + +Response: +[source,js] +-------------------------------------------------- +{ + ... + "hits": [ + { + "_index": "twitter", + "_type": "_doc", + "_id": "9", + "_score": ..., + "_source": {...}, + "fields": {"country": ["UK"]}, + "inner_hits":{ + "by_location": { + "hits": { + ..., + "hits": [ + { + ... + "fields": {"user" : ["user124"]} + }, + { + ... + "fields": {"user" : ["user589"]} + }, + { + ... + "fields": {"user" : ["user001"]} + } + ] + } + } + } + }, + { + "_index": "twitter", + "_type": "_doc", + "_id": "1", + "_score": .., + "_source": {...}, + "fields": {"country": ["Canada"]}, + "inner_hits":{ + "by_location": { + "hits": { + ..., + "hits": [ + { + ... + "fields": {"user" : ["user444"]} + }, + { + ... + "fields": {"user" : ["user1111"]} + }, + { + ... + "fields": {"user" : ["user999"]} + } + ] + } + } + } + + }, + .... + ] +} +-------------------------------------------------- +// NOTCONSOLE + +NOTE: Second level of of collapsing doesn't allow `inner_hits`. \ No newline at end of file diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml new file mode 100644 index 0000000000000..212ce6785a1ba --- /dev/null +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/115_multiple_field_collapsing.yml @@ -0,0 +1,141 @@ +--- +"two levels fields collapsing": + - skip: + version: " - 6.99.99" + reason: using multiple field collapsing from 7.0 on + - do: + indices.create: + index: addresses + body: + settings: + number_of_shards: 1 + number_of_replicas: 1 + mappings: + _doc: + properties: + country: {"type": "keyword"} + city: {"type": "keyword"} + address: {"type": "text"} + + - do: + bulk: + refresh: true + body: + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "1" } }' + - '{"country" : "Canada", "city" : "Saskatoon", "address" : "701 Victoria Avenue" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "2" } }' + - '{"country" : "Canada", "city" : "Toronto", "address" : "74 Victoria Street, Suite, 74 Victoria Street, Suite 300" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "3" } }' + - '{"country" : "Canada", "city" : "Toronto", "address" : "350 Victoria St" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "4" } }' + - '{"country" : "Canada", "city" : "Toronto", "address" : "20 Victoria Street" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "5" } }' + - '{"country" : "UK", "city" : "London", "address" : "58 Victoria Street" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "6" } }' + - '{"country" : "UK", "city" : "London", "address" : "Victoria Street Victoria Palace Theatre" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "7" } }' + - '{"country" : "UK", "city" : "Manchester", "address" : "75 Victoria street Westminster" }' + - '{ "index" : { "_index" : "addresses", "_type" : "_doc", "_id" : "8" } }' + - '{"country" : "UK", "city" : "London", "address" : "Victoria Station Victoria Arcade" }' + + + # ************* error if internal collapse contains inner_hits + - do: + catch: /parse_exception/ + search: + index: addresses + body: + query: { "match" : { "address" : "victoria" }} + collapse: + field: country + inner_hits: + collapse: + field : city + inner_hits: {} + + + # ************* error if internal collapse contains another collapse + - do: + catch: /parse_exception/ + search: + index: addresses + body: + query: { "match" : { "address" : "victoria" }} + collapse: + field: country + inner_hits: + collapse: + field : city + collapse: { field: city } + + + + # ************* top scored + - do: + search: + index: addresses + body: + query: { "match" : { "address" : "victoria" }} + collapse: + field: country + inner_hits: + name: by_location + size: 3 + collapse: + field : city + + - match: { hits.total: 8 } + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields.country: ["UK"] } + - match: { hits.hits.0.inner_hits.by_location.hits.total: 4 } + # 2 inner hits returned instead of requested 3 as they are collapsed by city + - length: { hits.hits.0.inner_hits.by_location.hits.hits : 2} + - match: { hits.hits.0.inner_hits.by_location.hits.hits.0._id: "8" } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.0.fields.city: ["London"] } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.1._id: "7" } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.1.fields.city: ["Manchester"] } + + - match: { hits.hits.1.fields.country: ["Canada"] } + - match: { hits.hits.1.inner_hits.by_location.hits.total: 4 } + # 2 inner hits returned instead of requested 3 as they are collapsed by city + - length: { hits.hits.1.inner_hits.by_location.hits.hits : 2 } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.0._id: "1" } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.0.fields.city: ["Saskatoon"] } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.1._id: "3" } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.1.fields.city: ["Toronto"] } + + + # ************* sorted + - do: + search: + index: addresses + body: + query: { "match" : { "address" : "victoria" }} + collapse: + field: country + inner_hits: + name: by_location + size: 3 + sort: [{ "city": "desc" }] + collapse: + field : city + + - match: { hits.total: 8 } + - length: { hits.hits: 2 } + - match: { hits.hits.0.fields.country: ["UK"] } + - match: { hits.hits.0.inner_hits.by_location.hits.total: 4 } + # 2 inner hits returned instead of requested 3 as they are collapsed by city + - length: { hits.hits.0.inner_hits.by_location.hits.hits : 2} + - match: { hits.hits.0.inner_hits.by_location.hits.hits.0._id: "7" } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.0.fields.city: ["Manchester"] } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.1._id: "5" } + - match: { hits.hits.0.inner_hits.by_location.hits.hits.1.fields.city: ["London"] } + + - match: { hits.hits.1.fields.country: ["Canada"] } + - match: { hits.hits.1.inner_hits.by_location.hits.total: 4 } + # 2 inner hits returned instead of requested 3 as they are collapsed by city + - length: { hits.hits.1.inner_hits.by_location.hits.hits : 2 } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.0._id: "2" } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.0.fields.city: ["Toronto"] } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.1._id: "1" } + - match: { hits.hits.1.inner_hits.by_location.hits.hits.1.fields.city: ["Saskatoon"] } diff --git a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java index a6a99137dc945..917ff06c5737c 100644 --- a/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java +++ b/server/src/main/java/org/elasticsearch/action/search/ExpandSearchPhase.java @@ -87,7 +87,8 @@ public void run() throws IOException { groupQuery.must(origQuery); } for (InnerHitBuilder innerHitBuilder : innerHitBuilders) { - SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(innerHitBuilder) + CollapseBuilder innerCollapseBuilder = innerHitBuilder.getInnerCollapseBuilder(); + SearchSourceBuilder sourceBuilder = buildExpandSearchSourceBuilder(innerHitBuilder, innerCollapseBuilder) .query(groupQuery) .postFilter(searchRequest.source().postFilter()); SearchRequest groupRequest = buildExpandSearchRequest(searchRequest, sourceBuilder); @@ -135,7 +136,7 @@ private SearchRequest buildExpandSearchRequest(SearchRequest orig, SearchSourceB return groupRequest; } - private SearchSourceBuilder buildExpandSearchSourceBuilder(InnerHitBuilder options) { + private SearchSourceBuilder buildExpandSearchSourceBuilder(InnerHitBuilder options, CollapseBuilder innerCollapseBuilder) { SearchSourceBuilder groupSource = new SearchSourceBuilder(); groupSource.from(options.getFrom()); groupSource.size(options.getSize()); @@ -167,6 +168,9 @@ private SearchSourceBuilder buildExpandSearchSourceBuilder(InnerHitBuilder optio groupSource.explain(options.isExplain()); groupSource.trackScores(options.isTrackScores()); groupSource.version(options.isVersion()); + if (innerCollapseBuilder != null) { + groupSource.collapse(innerCollapseBuilder); + } return groupSource; } } diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 92da1bc3b65f9..6bdc55d31cdc9 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -37,6 +37,7 @@ import org.elasticsearch.search.fetch.subphase.FetchSourceContext; import org.elasticsearch.search.fetch.subphase.highlight.HighlightBuilder; import org.elasticsearch.search.sort.SortBuilder; +import org.elasticsearch.search.collapse.CollapseBuilder; import java.io.IOException; import java.util.ArrayList; @@ -55,6 +56,8 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { public static final ParseField NAME_FIELD = new ParseField("name"); public static final ParseField IGNORE_UNMAPPED = new ParseField("ignore_unmapped"); public static final QueryBuilder DEFAULT_INNER_HIT_QUERY = new MatchAllQueryBuilder(); + public static final ParseField COLLAPSE_FIELD = new ParseField("collapse"); + public static final ParseField FIELD_FIELD = new ParseField("field"); private static final ObjectParser PARSER = new ObjectParser<>("inner_hits", InnerHitBuilder::new); @@ -91,6 +94,28 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { }, SearchSourceBuilder._SOURCE_FIELD, ObjectParser.ValueType.OBJECT_ARRAY_BOOLEAN_OR_STRING); PARSER.declareObject(InnerHitBuilder::setHighlightBuilder, (p, c) -> HighlightBuilder.fromXContent(p), SearchSourceBuilder.HIGHLIGHT_FIELD); + PARSER.declareField((parser, builder, context) -> { + Boolean isParsedCorrectly = false; + String field; + if (parser.currentToken() == XContentParser.Token.START_OBJECT) { + if (parser.nextToken() == XContentParser.Token.FIELD_NAME) { + if (FIELD_FIELD.match(parser.currentName(), parser.getDeprecationHandler())) { + if (parser.nextToken() == XContentParser.Token.VALUE_STRING){ + field = parser.text(); + if (parser.nextToken() == XContentParser.Token.END_OBJECT){ + isParsedCorrectly = true; + CollapseBuilder cb = new CollapseBuilder(field); + builder.setInnerCollapse(cb); + } + } + } + } + } + if (isParsedCorrectly == false) { + throw new ParsingException(parser.getTokenLocation(), "Invalid token in the inner collapse"); + } + + }, COLLAPSE_FIELD, ObjectParser.ValueType.OBJECT); } private String name; @@ -109,6 +134,7 @@ public final class InnerHitBuilder implements Writeable, ToXContentObject { private Set scriptFields; private HighlightBuilder highlightBuilder; private FetchSourceContext fetchSourceContext; + private CollapseBuilder innerCollapseBuilder = null; public InnerHitBuilder() { this.name = null; @@ -173,6 +199,9 @@ public InnerHitBuilder(StreamInput in) throws IOException { boolean hasChildren = in.readBoolean(); assert hasChildren == false; } + if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + this.innerCollapseBuilder = in.readOptionalWriteable(CollapseBuilder::new); + } } @Override @@ -218,6 +247,9 @@ public void writeTo(StreamOutput out) throws IOException { } } out.writeOptionalWriteable(highlightBuilder); + if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { + out.writeOptionalWriteable(innerCollapseBuilder); + } } /** @@ -501,6 +533,15 @@ QueryBuilder getQuery() { return query; } + public InnerHitBuilder setInnerCollapse(CollapseBuilder innerCollapseBuilder) { + this.innerCollapseBuilder = innerCollapseBuilder; + return this; + } + + public CollapseBuilder getInnerCollapseBuilder() { + return innerCollapseBuilder; + } + @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); @@ -550,6 +591,9 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws if (highlightBuilder != null) { builder.field(SearchSourceBuilder.HIGHLIGHT_FIELD.getPreferredName(), highlightBuilder, params); } + if (innerCollapseBuilder != null) { + builder.field(COLLAPSE_FIELD.getPreferredName(), innerCollapseBuilder); + } builder.endObject(); return builder; } @@ -572,13 +616,14 @@ public boolean equals(Object o) { Objects.equals(scriptFields, that.scriptFields) && Objects.equals(fetchSourceContext, that.fetchSourceContext) && Objects.equals(sorts, that.sorts) && - Objects.equals(highlightBuilder, that.highlightBuilder); + Objects.equals(highlightBuilder, that.highlightBuilder) && + Objects.equals(innerCollapseBuilder, that.innerCollapseBuilder); } @Override public int hashCode() { return Objects.hash(name, ignoreUnmapped, from, size, explain, version, trackScores, - storedFieldsContext, docValueFields, scriptFields, fetchSourceContext, sorts, highlightBuilder); + storedFieldsContext, docValueFields, scriptFields, fetchSourceContext, sorts, highlightBuilder, innerCollapseBuilder); } public static InnerHitBuilder fromXContent(XContentParser parser) throws IOException { From de213a890d3bb4ed0fea361ed4f451501cb83559 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Fri, 13 Jul 2018 17:41:28 +0200 Subject: [PATCH 022/107] Mute ML AutodetectMemoryLimitIT#testTooManyPartitions on Windows (#32044) Adding assumption to not run this test on Windows temporarily. Relates to #32033 --- .../xpack/ml/integration/AutodetectMemoryLimitIT.java | 2 ++ 1 file changed, 2 insertions(+) diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java index 2a332fb05d555..03860ea9ae044 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/AutodetectMemoryLimitIT.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.ml.integration; +import org.apache.lucene.util.Constants; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.ml.action.GetJobsStatsAction; import org.elasticsearch.xpack.core.ml.job.config.AnalysisConfig; @@ -38,6 +39,7 @@ public void cleanUpTest() { } public void testTooManyPartitions() throws Exception { + assumeFalse("AwaitsFix(bugUrl = \"https://github.com/elastic/elasticsearch/issues/32033\")", Constants.WINDOWS); Detector.Builder detector = new Detector.Builder("count", null); detector.setPartitionFieldName("user"); From 3bbc8c621b0fcc54e8f4d1ed383916342dddce3a Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 13 Jul 2018 11:12:03 -0500 Subject: [PATCH 023/107] Watcher: cleanup ensureWatchExists use (#31926) Previously, the ensureWatchExists was overridable. This commit makes it final so that it cannot be overridden, and cleans up some redundant code in the process. --- .../execution/WatchExecutionContext.java | 2 +- .../execution/ManualExecutionContext.java | 21 +++++-------------- 2 files changed, 6 insertions(+), 17 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java index 4cdd4bb0e3575..62216ff681e82 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java @@ -82,7 +82,7 @@ public Watch watch() { return watch; } - public void ensureWatchExists(CheckedSupplier supplier) throws Exception { + public final void ensureWatchExists(CheckedSupplier supplier) throws Exception { if (watch == null) { watch = supplier.get(); } diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java index c161b24e85619..abf1e5aec0da4 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/execution/ManualExecutionContext.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.watcher.execution; -import org.elasticsearch.common.CheckedSupplier; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.xpack.core.watcher.actions.Action; import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper; @@ -29,18 +28,19 @@ public class ManualExecutionContext extends WatchExecutionContext { private final Map actionModes; private final boolean recordExecution; private final boolean knownWatch; - private final Watch watch; ManualExecutionContext(Watch watch, boolean knownWatch, DateTime executionTime, ManualTriggerEvent triggerEvent, TimeValue defaultThrottlePeriod, Input.Result inputResult, Condition.Result conditionResult, - Map actionModes, boolean recordExecution) { + Map actionModes, boolean recordExecution) throws Exception { super(watch.id(), executionTime, triggerEvent, defaultThrottlePeriod); this.actionModes = actionModes; this.recordExecution = recordExecution; this.knownWatch = knownWatch; - this.watch = watch; + + // set the watch early to ensure calls to watch() below succeed. + super.ensureWatchExists(() -> watch); if (inputResult != null) { onInputResult(inputResult); @@ -66,12 +66,6 @@ public class ManualExecutionContext extends WatchExecutionContext { } } - // a noop operation, as the watch is already loaded via ctor - @Override - public void ensureWatchExists(CheckedSupplier supplier) throws Exception { - super.ensureWatchExists(() -> watch); - } - @Override public boolean knownWatch() { return knownWatch; @@ -107,11 +101,6 @@ public final boolean recordExecution() { return recordExecution; } - @Override - public Watch watch() { - return watch; - } - public static Builder builder(Watch watch, boolean knownWatch, ManualTriggerEvent event, TimeValue defaultThrottlePeriod) { return new Builder(watch, knownWatch, event, defaultThrottlePeriod); } @@ -173,7 +162,7 @@ public Builder withCondition(Condition.Result conditionResult) { return this; } - public ManualExecutionContext build() { + public ManualExecutionContext build() throws Exception { if (executionTime == null) { executionTime = DateTime.now(DateTimeZone.UTC); } From 4440df56d04385c48201c58c72cec8844d328135 Mon Sep 17 00:00:00 2001 From: Michael Basnight Date: Fri, 13 Jul 2018 11:13:10 -0500 Subject: [PATCH 024/107] Add secure setting for watcher email password (#31620) Other watcher actions already account for secure settings in their sensitive settings, whereas the email sending action did not. This adds the ability to optionally set a secure_password for email accounts. --- .../watcher/notification/email/Account.java | 40 ++++++++++++++++--- .../notification/email/EmailService.java | 10 ++++- .../notification/email/AccountTests.java | 30 +++++++++++++- 3 files changed, 72 insertions(+), 8 deletions(-) diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java index 8ba8d030524e1..02c0e1167dd95 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/Account.java @@ -7,6 +7,9 @@ import org.apache.logging.log4j.Logger; import org.elasticsearch.SpecialPermission; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; +import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.settings.SettingsException; import org.elasticsearch.common.unit.TimeValue; @@ -24,10 +27,13 @@ import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.Properties; +import java.util.Set; public class Account { static final String SMTP_PROTOCOL = "smtp"; + private static final String SMTP_PASSWORD = "password"; + private static final Setting SECURE_PASSWORD_SETTING = SecureSetting.secureString("secure_" + SMTP_PASSWORD, null); static { SecurityManager sm = System.getSecurityManager(); @@ -101,7 +107,7 @@ public Email send(Email email, Authentication auth, Profile profile) throws Mess if (auth != null && auth.password() != null) { password = new String(auth.password().text(cryptoService)); } else if (config.smtp.password != null) { - password = new String(config.smtp.password); + password = new String(config.smtp.password.getChars()); } if (profile == null) { @@ -199,18 +205,40 @@ static class Smtp { final String host; final int port; final String user; - final char[] password; + final SecureString password; final Properties properties; Smtp(Settings settings) { host = settings.get("host", settings.get("localaddress", settings.get("local_address"))); + port = settings.getAsInt("port", settings.getAsInt("localport", settings.getAsInt("local_port", 25))); user = settings.get("user", settings.get("from", null)); - String passStr = settings.get("password", null); - password = passStr != null ? passStr.toCharArray() : null; + password = getSecureSetting(SMTP_PASSWORD, settings, SECURE_PASSWORD_SETTING); + //password = passStr != null ? passStr.toCharArray() : null; properties = loadSmtpProperties(settings); } + /** + * Finds a setting, and then a secure setting if the setting is null, or returns null if one does not exist. This differs + * from other getSetting calls in that it allows for null whereas the other methods throw an exception. + * + * Note: if your setting was not previously secure, than the string reference that is in the setting object is still + * insecure. This is only constructing a new SecureString with the char[] of the insecure setting. + */ + private static SecureString getSecureSetting(String settingName, Settings settings, Setting secureSetting) { + String value = settings.get(settingName); + if (value == null) { + SecureString secureString = secureSetting.get(settings); + if (secureString != null && secureString.length() > 0) { + return secureString; + } else { + return null; + } + } else { + return new SecureString(value.toCharArray()); + } + } + /** * loads the standard Java Mail properties as settings from the given account settings. * The standard settings are not that readable, therefore we enabled the user to configure @@ -231,7 +259,9 @@ static Properties loadSmtpProperties(Settings settings) { settings = builder.build(); Properties props = new Properties(); - for (String key : settings.keySet()) { + // Secure strings can not be retreived out of a settings object and should be handled differently + Set insecureSettings = settings.filter(s -> s.startsWith("secure_") == false).keySet(); + for (String key : insecureSettings) { props.setProperty(SMTP_SETTINGS_PREFIX + key, settings.get(key)); } return props; diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java index 3d2ea583eddf2..15859a5e044c5 100644 --- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java +++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java @@ -7,6 +7,8 @@ import org.elasticsearch.common.Nullable; import org.elasticsearch.common.settings.ClusterSettings; +import org.elasticsearch.common.settings.SecureSetting; +import org.elasticsearch.common.settings.SecureString; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.settings.Setting.Property; import org.elasticsearch.common.settings.Settings; @@ -63,6 +65,10 @@ public class EmailService extends NotificationService { Setting.affixKeySetting("xpack.notification.email.account.", "smtp.password", (key) -> Setting.simpleString(key, Property.Dynamic, Property.NodeScope, Property.Filtered)); + private static final Setting.AffixSetting SETTING_SECURE_PASSWORD = + Setting.affixKeySetting("xpack.notification.email.account.", "smtp.secure_password", + (key) -> SecureSetting.secureString(key, null)); + private static final Setting.AffixSetting SETTING_SMTP_TIMEOUT = Setting.affixKeySetting("xpack.notification.email.account.", "smtp.timeout", (key) -> Setting.timeSetting(key, TimeValue.timeValueMinutes(2), Property.Dynamic, Property.NodeScope)); @@ -111,6 +117,7 @@ public EmailService(Settings settings, @Nullable CryptoService cryptoService, Cl clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PORT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_USER, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_PASSWORD, (s, o) -> {}, (s, o) -> {}); + clusterSettings.addAffixUpdateConsumer(SETTING_SECURE_PASSWORD, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_CONNECTION_TIMEOUT, (s, o) -> {}, (s, o) -> {}); clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WRITE_TIMEOUT, (s, o) -> {}, (s, o) -> {}); @@ -172,7 +179,8 @@ public static List> getSettings() { return Arrays.asList(SETTING_DEFAULT_ACCOUNT, SETTING_PROFILE, SETTING_EMAIL_DEFAULTS, SETTING_SMTP_AUTH, SETTING_SMTP_HOST, SETTING_SMTP_PASSWORD, SETTING_SMTP_PORT, SETTING_SMTP_STARTTLS_ENABLE, SETTING_SMTP_USER, SETTING_SMTP_STARTTLS_REQUIRED, SETTING_SMTP_TIMEOUT, SETTING_SMTP_CONNECTION_TIMEOUT, SETTING_SMTP_WRITE_TIMEOUT, SETTING_SMTP_LOCAL_ADDRESS, - SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT, SETTING_SMTP_SSL_TRUST_ADDRESS); + SETTING_SMTP_LOCAL_PORT, SETTING_SMTP_SEND_PARTIAL, SETTING_SMTP_WAIT_ON_QUIT, SETTING_SMTP_SSL_TRUST_ADDRESS, + SETTING_SECURE_PASSWORD); } } diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java index 8e83d30ffa5b6..1cbaecef8fec5 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/email/AccountTests.java @@ -5,6 +5,7 @@ */ package org.elasticsearch.xpack.watcher.notification.email; +import org.elasticsearch.common.settings.MockSecureSettings; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.TimeValue; import org.elasticsearch.test.ESTestCase; @@ -16,7 +17,6 @@ import javax.mail.Address; import javax.mail.Message; import javax.mail.internet.InternetAddress; - import java.util.Properties; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; @@ -149,7 +149,7 @@ public void testConfig() throws Exception { assertThat(config.smtp.host, is(host)); assertThat(config.smtp.user, is(user)); if (password != null) { - assertThat(config.smtp.password, is(password.toCharArray())); + assertThat(config.smtp.password.getChars(), is(password.toCharArray())); } else { assertThat(config.smtp.password, nullValue()); } @@ -292,4 +292,30 @@ public void testAccountTimeoutsConfiguredAsNumberAreRejected() { .build()), null, logger); }); } + + public void testEnsurePasswordSetAsSecureSetting() { + String password = "password"; + MockSecureSettings secureSettings = new MockSecureSettings(); + secureSettings.setString("smtp.secure_password", password); + + Settings settings = Settings.builder() + .put("smtp.host", "localhost") + .put("smtp.port", server.port()) + .put("smtp.connection_timeout", TimeValue.timeValueMinutes(4)) + .setSecureSettings(secureSettings) + .build(); + + Account.Config config = new Account.Config("default", settings); + assertThat(config.smtp.password.getChars(), equalTo(password.toCharArray())); + + settings = Settings.builder() + .put("smtp.host", "localhost") + .put("smtp.port", server.port()) + .put("smtp.connection_timeout", TimeValue.timeValueMinutes(4)) + .put("smtp.password", password) + .build(); + + config = new Account.Config("default", settings); + assertThat(config.smtp.password.getChars(), equalTo(password.toCharArray())); + } } From 2ab7db30ae9e0d177584e399ade7ee29dc36bfad Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Fri, 13 Jul 2018 09:33:27 -0700 Subject: [PATCH 025/107] HLRC: Add xpack usage api (#31975) This commit adds the _xpack/usage api to the high level rest client. Currently in the transport api, the usage data is exposed in a limited fashion, at most giving one level of helper methods for the inner keys of data, but then exposing thos subobjects as maps of objects. Rather than making parsers for every set of usage data from each feature, this PR exposes the entire set of usage data as a map of maps. --- .../client/RequestConverters.java | 8 +++ .../org/elasticsearch/client/XPackClient.java | 23 ++++++++ .../MiscellaneousDocumentationIT.java | 49 ++++++++++++++++ .../miscellaneous/x-pack-usage.asciidoc | 54 ++++++++++++++++++ .../action/TransportXPackUsageAction.java | 1 + .../xpack/core/action/XPackUsageRequest.java | 18 ------ .../core/action/XPackUsageRequestBuilder.java | 1 + .../cluster/ClusterStatsCollectorTests.java | 2 +- .../watcher/WatcherXpackUsageStatsTests.java | 2 +- .../protocol/xpack/XPackUsageRequest.java | 31 ++++++++++ .../protocol/xpack/XPackUsageResponse.java | 57 +++++++++++++++++++ 11 files changed, 226 insertions(+), 20 deletions(-) create mode 100644 docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc delete mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java create mode 100644 x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java index 126a9c7d4b4ec..9dbd4916c774b 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RequestConverters.java @@ -106,6 +106,7 @@ import org.elasticsearch.index.VersionType; import org.elasticsearch.index.rankeval.RankEvalRequest; import org.elasticsearch.protocol.xpack.XPackInfoRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.rest.action.search.RestSearchAction; import org.elasticsearch.script.mustache.MultiSearchTemplateRequest; import org.elasticsearch.script.mustache.SearchTemplateRequest; @@ -1096,6 +1097,13 @@ static Request xPackInfo(XPackInfoRequest infoRequest) { return request; } + static Request xpackUsage(XPackUsageRequest usageRequest) { + Request request = new Request(HttpGet.METHOD_NAME, "/_xpack/usage"); + Params parameters = new Params(request); + parameters.withMasterTimeout(usageRequest.masterNodeTimeout()); + return request; + } + private static HttpEntity createEntity(ToXContent toXContent, XContentType xContentType) throws IOException { BytesRef source = XContentHelper.toXContent(toXContent, xContentType, false).toBytesRef(); return new ByteArrayEntity(source.bytes, source.offset, source.length, createContentType(xContentType)); diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java index 5942bfa35a477..a497619b987bd 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/XPackClient.java @@ -22,6 +22,8 @@ import org.elasticsearch.action.ActionListener; import org.elasticsearch.protocol.xpack.XPackInfoRequest; import org.elasticsearch.protocol.xpack.XPackInfoResponse; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageResponse; import java.io.IOException; @@ -70,4 +72,25 @@ public void infoAsync(XPackInfoRequest request, RequestOptions options, restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xPackInfo, options, XPackInfoResponse::fromXContent, listener, emptySet()); } + + /** + * Fetch usage information about X-Pack features from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public XPackUsageResponse usage(XPackUsageRequest request, RequestOptions options) throws IOException { + return restHighLevelClient.performRequestAndParseEntity(request, RequestConverters::xpackUsage, options, + XPackUsageResponse::fromXContent, emptySet()); + } + + /** + * Asynchronously fetch usage information about X-Pack features from the cluster. + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void usageAsync(XPackUsageRequest request, RequestOptions options, ActionListener listener) { + restHighLevelClient.performRequestAsyncAndParseEntity(request, RequestConverters::xpackUsage, options, + XPackUsageResponse::fromXContent, listener, emptySet()); + } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java index 75c14097c4581..a99b991620a25 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/MiscellaneousDocumentationIT.java @@ -35,12 +35,17 @@ import org.elasticsearch.protocol.xpack.XPackInfoResponse.BuildInfo; import org.elasticsearch.protocol.xpack.XPackInfoResponse.FeatureSetsInfo; import org.elasticsearch.protocol.xpack.XPackInfoResponse.LicenseInfo; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageResponse; import java.io.IOException; import java.util.EnumSet; +import java.util.Map; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import static org.hamcrest.Matchers.is; + /** * Documentation for miscellaneous APIs in the high level java client. * Code wrapped in {@code tag} and {@code end} tags is included in the docs. @@ -129,6 +134,50 @@ public void onFailure(Exception e) { } } + public void testXPackUsage() throws Exception { + RestHighLevelClient client = highLevelClient(); + { + //tag::x-pack-usage-execute + XPackUsageRequest request = new XPackUsageRequest(); + XPackUsageResponse response = client.xpack().usage(request, RequestOptions.DEFAULT); + //end::x-pack-usage-execute + + //tag::x-pack-usage-response + Map> usages = response.getUsages(); + Map monitoringUsage = usages.get("monitoring"); + assertThat(monitoringUsage.get("available"), is(true)); + assertThat(monitoringUsage.get("enabled"), is(true)); + assertThat(monitoringUsage.get("collection_enabled"), is(false)); + //end::x-pack-usage-response + } + { + XPackUsageRequest request = new XPackUsageRequest(); + // tag::x-pack-usage-execute-listener + ActionListener listener = new ActionListener() { + @Override + public void onResponse(XPackUsageResponse response) { + // <1> + } + + @Override + public void onFailure(Exception e) { + // <2> + } + }; + // end::x-pack-usage-execute-listener + + // Replace the empty listener by a blocking listener in test + final CountDownLatch latch = new CountDownLatch(1); + listener = new LatchedActionListener<>(listener, latch); + + // tag::x-pack-usage-execute-async + client.xpack().usageAsync(request, RequestOptions.DEFAULT, listener); // <1> + // end::x-pack-usage-execute-async + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + } + } + public void testInitializationFromClientBuilder() throws IOException { //tag::rest-high-level-client-init RestHighLevelClient client = new RestHighLevelClient( diff --git a/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc new file mode 100644 index 0000000000000..0927ae71c0bf5 --- /dev/null +++ b/docs/java-rest/high-level/miscellaneous/x-pack-usage.asciidoc @@ -0,0 +1,54 @@ +[[java-rest-high-x-pack-usage]] +=== X-Pack Usage API + +[[java-rest-high-x-pack-usage-execution]] +==== Execution + +Detailed information about the usage of features from {xpack} can be +retrieved using the `usage()` method: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute] +-------------------------------------------------- + +[[java-rest-high-x-pack-info-response]] +==== Response + +The returned `XPackUsageResponse` contains a `Map` keyed by feature name. +Every feature map has an `available` key, indicating whether that +feature is available given the current license, and an `enabled` key, +indicating whether that feature is currently enabled. Other keys +are specific to each feature. + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-response] +-------------------------------------------------- + +[[java-rest-high-x-pack-usage-async]] +==== Asynchronous Execution + +This request can be executed asynchronously: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute-async] +-------------------------------------------------- +<1> The call to execute the usage api and the `ActionListener` to use when +the execution completes + +The asynchronous method does not block and returns immediately. Once it is +completed the `ActionListener` is called back using the `onResponse` method +if the execution successfully completed or using the `onFailure` method if +it failed. + +A typical listener for `XPackUsageResponse` looks like: + +["source","java",subs="attributes,callouts,macros"] +-------------------------------------------------- +include-tagged::{doc-tests}/MiscellaneousDocumentationIT.java[x-pack-usage-execute-listener] +-------------------------------------------------- +<1> Called when the execution is successfully completed. The response is +provided as an argument +<2> Called in case of failure. The raised exception is provided as an argument diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java index f3abad5e68bb3..6b7d5b96d2024 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/TransportXPackUsageAction.java @@ -14,6 +14,7 @@ import org.elasticsearch.cluster.service.ClusterService; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import org.elasticsearch.xpack.core.XPackFeatureSet; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java deleted file mode 100644 index d578249c147c3..0000000000000 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequest.java +++ /dev/null @@ -1,18 +0,0 @@ -/* - * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one - * or more contributor license agreements. Licensed under the Elastic License; - * you may not use this file except in compliance with the Elastic License. - */ -package org.elasticsearch.xpack.core.action; - -import org.elasticsearch.action.ActionRequestValidationException; -import org.elasticsearch.action.support.master.MasterNodeRequest; - -public class XPackUsageRequest extends MasterNodeRequest { - - @Override - public ActionRequestValidationException validate() { - return null; - } - -} diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java index 789460f133969..92c2ba75ec170 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/action/XPackUsageRequestBuilder.java @@ -7,6 +7,7 @@ import org.elasticsearch.action.support.master.MasterNodeOperationRequestBuilder; import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; public class XPackUsageRequestBuilder extends MasterNodeOperationRequestBuilder { diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java index 6784b00361bc1..49355d51495ec 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/cluster/ClusterStatsCollectorTests.java @@ -26,7 +26,7 @@ import org.elasticsearch.xpack.core.XPackField; import org.elasticsearch.xpack.core.XPackSettings; import org.elasticsearch.xpack.core.action.XPackUsageAction; -import org.elasticsearch.xpack.core.action.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.monitoring.MonitoredSystem; import org.elasticsearch.xpack.core.monitoring.MonitoringFeatureSetUsage; diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java index 3a314640d742a..7c07c98eb4725 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherXpackUsageStatsTests.java @@ -7,7 +7,7 @@ import org.elasticsearch.xpack.core.XPackFeatureSet; import org.elasticsearch.xpack.core.action.XPackUsageAction; -import org.elasticsearch.xpack.core.action.XPackUsageRequest; +import org.elasticsearch.protocol.xpack.XPackUsageRequest; import org.elasticsearch.xpack.core.action.XPackUsageResponse; import org.elasticsearch.xpack.core.watcher.WatcherFeatureSetUsage; import org.elasticsearch.xpack.watcher.test.AbstractWatcherIntegrationTestCase; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java new file mode 100644 index 0000000000000..f5f6d9d949b7f --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageRequest.java @@ -0,0 +1,31 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.support.master.MasterNodeRequest; + +public class XPackUsageRequest extends MasterNodeRequest { + + @Override + public ActionRequestValidationException validate() { + return null; + } + +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java new file mode 100644 index 0000000000000..3459403bd6124 --- /dev/null +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackUsageResponse.java @@ -0,0 +1,57 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.protocol.xpack; + +import org.elasticsearch.common.xcontent.XContentParser; + +import java.io.IOException; +import java.util.Map; +import java.util.stream.Collectors; + +/** + * Response object from calling the xpack usage api. + * + * Usage information for each feature is accessible through {@link #getUsages()}. + */ +public class XPackUsageResponse { + + private final Map> usages; + + private XPackUsageResponse(Map> usages) throws IOException { + this.usages = usages; + } + + @SuppressWarnings("unchecked") + private static Map castMap(Object value) { + return (Map)value; + } + + /** Return a map from feature name to usage information for that feature. */ + public Map> getUsages() { + return usages; + } + + public static XPackUsageResponse fromXContent(XContentParser parser) throws IOException { + Map rawMap = parser.map(); + Map> usages = rawMap.entrySet().stream().collect( + Collectors.toMap(Map.Entry::getKey, e -> castMap(e.getValue()))); + return new XPackUsageResponse(usages); + } +} From 2183fffaead000abbde264de8d17279d2c296712 Mon Sep 17 00:00:00 2001 From: Colin Goodheart-Smithe Date: Fri, 13 Jul 2018 18:08:35 +0100 Subject: [PATCH 026/107] Adds a new auto-interval date histogram (#28993) * Adds a new auto-interval date histogram This change adds a new type of histogram aggregation called `auto_date_histogram` where you can specify the target number of buckets you require and it will find an appropriate interval for the returned buckets. The aggregation works by first collecting documents in buckets at second interval, when it has created more than the target number of buckets it merges these buckets into minute interval bucket and continues collecting until it reaches the target number of buckets again. It will keep merging buckets when it exceeds the target until either collection is finished or the highest interval (currently years) is reached. A similar process happens at reduce time. This aggregation intentionally does not support min_doc_count, offest and extended_bounds to keep the already complex logic from becoming more complex. The aggregation accepts sub-aggregations but will always operate in `breadth_first` mode deferring the computation of sub-aggregations until the final buckets from the shard are known. min_doc_count is effectively hard-coded to zero meaning that we will insert empty buckets where necessary. Closes #9572 * Adds documentation * Added sub aggregator test * Fixes failing docs test * Brings branch up to date with master changes * trying to get tests to pass again * Fixes multiBucketConsumer accounting * Collects more buckets than needed on shards This gives us more options at reduce time in terms of how we do the final merge of the buckeets to produce the final result * Revert "Collects more buckets than needed on shards" This reverts commit 993c782d117892af9a3c86a51921cdee630a3ac5. * Adds ability to merge within a rounding * Fixes nonn-timezone doc test failure * Fix time zone tests * iterates on tests * Adds test case and documentation changes Added some notes in the documentation about the intervals that can bbe returned. Also added a test case that utilises the merging of conseecutive buckets * Fixes performance bug The bug meant that getAppropriate rounding look a huge amount of time if the range of the data was large but also sparsely populated. In these situations the rounding would be very low so iterating through the rounding values from the min key to the max keey look a long time (~120 seconds in one test). The solution is to add a rough estimate first which chooses the rounding based just on the long values of the min and max keeys alone but selects the rounding one lower than the one it thinks is appropriate so the accurate method can choose the final rounding taking into account the fact that intervals are not always fixed length. Thee commit also adds more tests * Changes to only do complex reduction on final reduce * merge latest with master * correct tests and add a new test case for 10k buckets * refactor to perform bucket number check in innerBuild * correctly derive bucket setting, update tests to increase bucket threshold * fix checkstyle * address code review comments * add documentation for default buckets * fix typo --- .../client/RestHighLevelClient.java | 3 + docs/reference/aggregations/bucket.asciidoc | 2 + .../autodatehistogram-aggregation.asciidoc | 283 ++++ .../elasticsearch/search/SearchModule.java | 4 + .../bucket/BucketsAggregator.java | 13 + .../MergingBucketsDeferringCollector.java | 236 +++ .../AutoDateHistogramAggregationBuilder.java | 218 +++ .../AutoDateHistogramAggregator.java | 199 +++ .../AutoDateHistogramAggregatorFactory.java | 72 + .../histogram/DateHistogramAggregator.java | 4 +- .../histogram/InternalAutoDateHistogram.java | 601 ++++++++ .../histogram/InternalDateHistogram.java | 2 +- .../histogram/ParsedAutoDateHistogram.java | 91 ++ .../aggregations/AggregationsTests.java | 2 + .../bucket/AutoDateHistogramTests.java | 44 + .../AutoDateHistogramAggregatorTests.java | 1332 +++++++++++++++++ .../InternalAutoDateHistogramTests.java | 154 ++ .../aggregations/AggregatorTestCase.java | 3 +- .../test/InternalAggregationTestCase.java | 3 + ...nternalMultiBucketAggregationTestCase.java | 3 +- 20 files changed, 3263 insertions(+), 6 deletions(-) create mode 100644 docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java create mode 100644 server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java create mode 100644 server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index df674ea898ed1..b9e41b879328f 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -85,8 +85,10 @@ import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoHashGrid; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.ParsedGlobal; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.ParsedAutoDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.ParsedHistogram; import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; @@ -1004,6 +1006,7 @@ static List getDefaultNamedXContents() { map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c)); map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c)); map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)); + map.put(AutoDateHistogramAggregationBuilder.NAME, (p, c) -> ParsedAutoDateHistogram.fromXContent(p, (String) c)); map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)); map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)); map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)); diff --git a/docs/reference/aggregations/bucket.asciidoc b/docs/reference/aggregations/bucket.asciidoc index 1233e0d9b7398..ddb55e8d34c8e 100644 --- a/docs/reference/aggregations/bucket.asciidoc +++ b/docs/reference/aggregations/bucket.asciidoc @@ -19,6 +19,8 @@ the limit will fail with an exception. include::bucket/adjacency-matrix-aggregation.asciidoc[] +include::bucket/autodatehistogram-aggregation.asciidoc[] + include::bucket/children-aggregation.asciidoc[] include::bucket/composite-aggregation.asciidoc[] diff --git a/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc new file mode 100644 index 0000000000000..28cb65ce6cc48 --- /dev/null +++ b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc @@ -0,0 +1,283 @@ +[[search-aggregations-bucket-autodatehistogram-aggregation]] +=== Auto-interval Date Histogram Aggregation + +A multi-bucket aggregation similar to the <> except +instead of providing an interval to use as the width of each bucket, a target number of buckets is provided +indicating the number of buckets needed and the interval of the buckets is automatically chosen to best achieve +that target. The number of buckets returned will always be less than or equal to this target number. + +The buckets field is optional, and will default to 10 buckets if not specified. + +Requesting a target of 10 buckets. + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sales_over_time" : { + "auto_date_histogram" : { + "field" : "date", + "buckets" : 10 + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +==== Keys + +Internally, a date is represented as a 64 bit number representing a timestamp +in milliseconds-since-the-epoch. These timestamps are returned as the bucket +++key++s. The `key_as_string` is the same timestamp converted to a formatted +date string using the format specified with the `format` parameter: + +TIP: If no `format` is specified, then it will use the first date +<> specified in the field mapping. + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sales_over_time" : { + "auto_date_histogram" : { + "field" : "date", + "buckets" : 5, + "format" : "yyyy-MM-dd" <1> + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +<1> Supports expressive date <> + +Response: + +[source,js] +-------------------------------------------------- +{ + ... + "aggregations": { + "sales_over_time": { + "buckets": [ + { + "key_as_string": "2015-01-01", + "key": 1420070400000, + "doc_count": 3 + }, + { + "key_as_string": "2015-02-01", + "key": 1422748800000, + "doc_count": 2 + }, + { + "key_as_string": "2015-03-01", + "key": 1425168000000, + "doc_count": 2 + } + ] + } + } +} +-------------------------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + +=== Intervals + +The interval of the returned buckets is selected based on the data collected by the +aggregation so that the number of buckets returned is less than or equal to the number +requested. The possible intervals returned are: + +[horizontal] +seconds:: In multiples of 1, 5, 10 and 30 +minutes:: In multiples of 1, 5, 10 and 30 +hours:: In multiples of 1, 3 and 12 +days:: In multiples of 1, and 7 +months:: In multiples of 1, and 3 +years:: In multiples of 1, 5, 10, 20, 50 and 100 + +In the worst case, where the number of daily buckets are too many for the requested +number of buckets, the number of buckets returned will be 1/7th of the number of +buckets requested. + +==== Time Zone + +Date-times are stored in Elasticsearch in UTC. By default, all bucketing and +rounding is also done in UTC. The `time_zone` parameter can be used to indicate +that bucketing should use a different time zone. + +Time zones may either be specified as an ISO 8601 UTC offset (e.g. `+01:00` or +`-08:00`) or as a timezone id, an identifier used in the TZ database like +`America/Los_Angeles`. + +Consider the following example: + +[source,js] +--------------------------------- +PUT my_index/log/1?refresh +{ + "date": "2015-10-01T00:30:00Z" +} + +PUT my_index/log/2?refresh +{ + "date": "2015-10-01T01:30:00Z" +} + +PUT my_index/log/3?refresh +{ + "date": "2015-10-01T02:30:00Z" +} + +GET my_index/_search?size=0 +{ + "aggs": { + "by_day": { + "auto_date_histogram": { + "field": "date", + "buckets" : 3 + } + } + } +} +--------------------------------- +// CONSOLE + +UTC is used if no time zone is specified, three 1-hour buckets are returned +starting at midnight UTC on 1 October 2015: + +[source,js] +--------------------------------- +{ + ... + "aggregations": { + "by_day": { + "buckets": [ + { + "key_as_string": "2015-10-01T00:00:00.000Z", + "key": 1443657600000, + "doc_count": 1 + }, + { + "key_as_string": "2015-10-01T01:00:00.000Z", + "key": 1443661200000, + "doc_count": 1 + }, + { + "key_as_string": "2015-10-01T02:00:00.000Z", + "key": 1443664800000, + "doc_count": 1 + } + ] + } + } +} +--------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + +If a `time_zone` of `-01:00` is specified, then midnight starts at one hour before +midnight UTC: + +[source,js] +--------------------------------- +GET my_index/_search?size=0 +{ + "aggs": { + "by_day": { + "auto_date_histogram": { + "field": "date", + "buckets" : 3, + "time_zone": "-01:00" + } + } + } +} +--------------------------------- +// CONSOLE +// TEST[continued] + + +Now three 1-hour buckets are still returned but the first bucket starts at +11:00pm on 30 September 2015 since that is the local time for the bucket in +the specified time zone. + +[source,js] +--------------------------------- +{ + ... + "aggregations": { + "by_day": { + "buckets": [ + { + "key_as_string": "2015-09-30T23:00:00.000-01:00", + "key": 1443657600000, + "doc_count": 1 + }, + { + "key_as_string": "2015-10-01T00:00:00.000-01:00", + "key": 1443661200000, + "doc_count": 1 + }, + { + "key_as_string": "2015-10-01T01:00:00.000-01:00", + "key": 1443664800000, + "doc_count": 1 + } + ] + } + } +} +--------------------------------- +// TESTRESPONSE[s/\.\.\./"took": $body.took,"timed_out": false,"_shards": $body._shards,"hits": $body.hits,/] + +<1> The `key_as_string` value represents midnight on each day + in the specified time zone. + +WARNING: When using time zones that follow DST (daylight savings time) changes, +buckets close to the moment when those changes happen can have slightly different +sizes than neighbouring buckets. +For example, consider a DST start in the `CET` time zone: on 27 March 2016 at 2am, +clocks were turned forward 1 hour to 3am local time. If the result of the aggregation +was daily buckets, the bucket covering that day will only hold data for 23 hours +instead of the usual 24 hours for other buckets. The same is true for shorter intervals +like e.g. 12h. Here, we will have only a 11h bucket on the morning of 27 March when the +DST shift happens. + +==== Scripts + +Like with the normal <>, both document level +scripts and value level scripts are supported. This aggregation does not however, support the `min_doc_count`, +`extended_bounds` and `order` parameters. + +==== Missing value + +The `missing` parameter defines how documents that are missing a value should be treated. +By default they will be ignored but it is also possible to treat them as if they +had a value. + +[source,js] +-------------------------------------------------- +POST /sales/_search?size=0 +{ + "aggs" : { + "sale_date" : { + "auto_date_histogram" : { + "field" : "date", + "buckets": 10, + "missing": "2000/01/01" <1> + } + } + } +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sales] + +<1> Documents without a value in the `publish_date` field will fall into the same bucket as documents that have the value `2000-01-01`. + diff --git a/server/src/main/java/org/elasticsearch/search/SearchModule.java b/server/src/main/java/org/elasticsearch/search/SearchModule.java index 199d2278bf76b..efef1aeb04f76 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchModule.java +++ b/server/src/main/java/org/elasticsearch/search/SearchModule.java @@ -109,8 +109,10 @@ import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGrid; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobal; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogram; import org.elasticsearch.search.aggregations.bucket.missing.InternalMissing; @@ -395,6 +397,8 @@ private void registerAggregations(List plugins) { HistogramAggregationBuilder::parse).addResultReader(InternalHistogram::new)); registerAggregation(new AggregationSpec(DateHistogramAggregationBuilder.NAME, DateHistogramAggregationBuilder::new, DateHistogramAggregationBuilder::parse).addResultReader(InternalDateHistogram::new)); + registerAggregation(new AggregationSpec(AutoDateHistogramAggregationBuilder.NAME, AutoDateHistogramAggregationBuilder::new, + AutoDateHistogramAggregationBuilder::parse).addResultReader(InternalAutoDateHistogram::new)); registerAggregation(new AggregationSpec(GeoDistanceAggregationBuilder.NAME, GeoDistanceAggregationBuilder::new, GeoDistanceAggregationBuilder::parse).addResultReader(InternalGeoDistance::new)); registerAggregation(new AggregationSpec(GeoGridAggregationBuilder.NAME, GeoGridAggregationBuilder::new, diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java index 504758e7a4ec2..7b09ac9d61895 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/BucketsAggregator.java @@ -84,6 +84,19 @@ public final void collectExistingBucket(LeafBucketCollector subCollector, int do subCollector.collect(doc, bucketOrd); } + public final void mergeBuckets(long[] mergeMap, long newNumBuckets) { + try (IntArray oldDocCounts = docCounts) { + docCounts = bigArrays.newIntArray(newNumBuckets, true); + docCounts.fill(0, newNumBuckets, 0); + for (int i = 0; i < oldDocCounts.size(); i++) { + int docCount = oldDocCounts.get(i); + if (docCount != 0) { + docCounts.increment(mergeMap[i], docCount); + } + } + } + } + public IntArray getDocCounts() { return docCounts; } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java new file mode 100644 index 0000000000000..f357e9d286f54 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/MergingBucketsDeferringCollector.java @@ -0,0 +1,236 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.DocIdSetIterator; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.util.packed.PackedInts; +import org.apache.lucene.util.packed.PackedLongValues; +import org.elasticsearch.common.util.BigArrays; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.BucketCollector; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.List; + +/** + * A specialization of {@link DeferringBucketCollector} that collects all + * matches and then is able to replay a given subset of buckets. Exposes + * mergeBuckets, which can be invoked by the aggregator when increasing the + * rounding interval. + */ +public class MergingBucketsDeferringCollector extends DeferringBucketCollector { + + List entries = new ArrayList<>(); + BucketCollector collector; + final SearchContext searchContext; + LeafReaderContext context; + PackedLongValues.Builder docDeltas; + PackedLongValues.Builder buckets; + long maxBucket = -1; + boolean finished = false; + LongHash selectedBuckets; + + public MergingBucketsDeferringCollector(SearchContext context) { + this.searchContext = context; + } + + @Override + public void setDeferredCollector(Iterable deferredCollectors) { + this.collector = BucketCollector.wrap(deferredCollectors); + } + + @Override + public boolean needsScores() { + if (collector == null) { + throw new IllegalStateException(); + } + return collector.needsScores(); + } + + @Override + public void preCollection() throws IOException { + collector.preCollection(); + } + + private void finishLeaf() { + if (context != null) { + entries.add(new Entry(context, docDeltas.build(), buckets.build())); + } + context = null; + docDeltas = null; + buckets = null; + } + + @Override + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx) throws IOException { + finishLeaf(); + + context = ctx; + docDeltas = PackedLongValues.packedBuilder(PackedInts.DEFAULT); + buckets = PackedLongValues.packedBuilder(PackedInts.DEFAULT); + + return new LeafBucketCollector() { + int lastDoc = 0; + + @Override + public void collect(int doc, long bucket) { + docDeltas.add(doc - lastDoc); + buckets.add(bucket); + lastDoc = doc; + maxBucket = Math.max(maxBucket, bucket); + } + }; + } + + public void mergeBuckets(long[] mergeMap) { + + List newEntries = new ArrayList<>(entries.size()); + for (Entry sourceEntry : entries) { + PackedLongValues.Builder newBuckets = PackedLongValues.packedBuilder(PackedInts.DEFAULT); + for (PackedLongValues.Iterator itr = sourceEntry.buckets.iterator(); itr.hasNext();) { + long bucket = itr.next(); + newBuckets.add(mergeMap[Math.toIntExact(bucket)]); + } + newEntries.add(new Entry(sourceEntry.context, sourceEntry.docDeltas, newBuckets.build())); + } + entries = newEntries; + + // if there are buckets that have been collected in the current segment + // we need to update the bucket ordinals there too + if (buckets.size() > 0) { + PackedLongValues currentBuckets = buckets.build(); + PackedLongValues.Builder newBuckets = PackedLongValues.packedBuilder(PackedInts.DEFAULT); + for (PackedLongValues.Iterator itr = currentBuckets.iterator(); itr.hasNext();) { + long bucket = itr.next(); + newBuckets.add(mergeMap[Math.toIntExact(bucket)]); + } + buckets = newBuckets; + } + } + + @Override + public void postCollection() { + finishLeaf(); + finished = true; + } + + /** + * Replay the wrapped collector, but only on a selection of buckets. + */ + @Override + public void prepareSelectedBuckets(long... selectedBuckets) throws IOException { + if (finished == false) { + throw new IllegalStateException("Cannot replay yet, collection is not finished: postCollect() has not been called"); + } + if (this.selectedBuckets != null) { + throw new IllegalStateException("Already been replayed"); + } + + final LongHash hash = new LongHash(selectedBuckets.length, BigArrays.NON_RECYCLING_INSTANCE); + for (long bucket : selectedBuckets) { + hash.add(bucket); + } + this.selectedBuckets = hash; + + boolean needsScores = collector.needsScores(); + Weight weight = null; + if (needsScores) { + weight = searchContext.searcher().createNormalizedWeight(searchContext.query(), true); + } + for (Entry entry : entries) { + final LeafBucketCollector leafCollector = collector.getLeafCollector(entry.context); + DocIdSetIterator docIt = null; + if (needsScores && entry.docDeltas.size() > 0) { + Scorer scorer = weight.scorer(entry.context); + // We don't need to check if the scorer is null + // since we are sure that there are documents to replay + // (entry.docDeltas it not empty). + docIt = scorer.iterator(); + leafCollector.setScorer(scorer); + } + final PackedLongValues.Iterator docDeltaIterator = entry.docDeltas.iterator(); + final PackedLongValues.Iterator buckets = entry.buckets.iterator(); + int doc = 0; + for (long i = 0, end = entry.docDeltas.size(); i < end; ++i) { + doc += docDeltaIterator.next(); + final long bucket = buckets.next(); + final long rebasedBucket = hash.find(bucket); + if (rebasedBucket != -1) { + if (needsScores) { + if (docIt.docID() < doc) { + docIt.advance(doc); + } + // aggregations should only be replayed on matching + // documents + assert docIt.docID() == doc; + } + leafCollector.collect(doc, rebasedBucket); + } + } + } + + collector.postCollection(); + } + + /** + * Wrap the provided aggregator so that it behaves (almost) as if it had + * been collected directly. + */ + @Override + public Aggregator wrap(final Aggregator in) { + + return new WrappedAggregator(in) { + + @Override + public InternalAggregation buildAggregation(long bucket) throws IOException { + if (selectedBuckets == null) { + throw new IllegalStateException("Collection has not been replayed yet."); + } + final long rebasedBucket = selectedBuckets.find(bucket); + if (rebasedBucket == -1) { + throw new IllegalStateException("Cannot build for a bucket which has not been collected [" + bucket + "]"); + } + return in.buildAggregation(rebasedBucket); + } + + }; + } + + private static class Entry { + final LeafReaderContext context; + final PackedLongValues docDeltas; + final PackedLongValues buckets; + + Entry(LeafReaderContext context, PackedLongValues docDeltas, PackedLongValues buckets) { + this.context = context; + this.docDeltas = docDeltas; + this.buckets = buckets; + } + } + +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java new file mode 100644 index 0000000000000..366060835d891 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregationBuilder.java @@ -0,0 +1,218 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.rounding.DateTimeUnit; +import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.AggregationBuilder; +import org.elasticsearch.search.aggregations.AggregatorFactories.Builder; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService; +import org.elasticsearch.search.aggregations.support.ValueType; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregationBuilder; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.aggregations.support.ValuesSourceParserHelper; +import org.elasticsearch.search.aggregations.support.ValuesSourceType; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Map; +import java.util.Objects; + +public class AutoDateHistogramAggregationBuilder + extends ValuesSourceAggregationBuilder { + + public static final String NAME = "auto_date_histogram"; + + public static final ParseField NUM_BUCKETS_FIELD = new ParseField("buckets"); + + private static final ObjectParser PARSER; + static { + PARSER = new ObjectParser<>(AutoDateHistogramAggregationBuilder.NAME); + ValuesSourceParserHelper.declareNumericFields(PARSER, true, true, true); + + PARSER.declareInt(AutoDateHistogramAggregationBuilder::setNumBuckets, NUM_BUCKETS_FIELD); + } + + public static AutoDateHistogramAggregationBuilder parse(String aggregationName, XContentParser parser) throws IOException { + return PARSER.parse(parser, new AutoDateHistogramAggregationBuilder(aggregationName), null); + } + + private int numBuckets = 10; + + /** Create a new builder with the given name. */ + public AutoDateHistogramAggregationBuilder(String name) { + super(name, ValuesSourceType.NUMERIC, ValueType.DATE); + } + + /** Read from a stream, for internal use only. */ + public AutoDateHistogramAggregationBuilder(StreamInput in) throws IOException { + super(in, ValuesSourceType.NUMERIC, ValueType.DATE); + numBuckets = in.readVInt(); + } + + protected AutoDateHistogramAggregationBuilder(AutoDateHistogramAggregationBuilder clone, Builder factoriesBuilder, + Map metaData) { + super(clone, factoriesBuilder, metaData); + this.numBuckets = clone.numBuckets; + } + + @Override + protected AggregationBuilder shallowCopy(Builder factoriesBuilder, Map metaData) { + return new AutoDateHistogramAggregationBuilder(this, factoriesBuilder, metaData); + } + + @Override + protected void innerWriteTo(StreamOutput out) throws IOException { + out.writeVInt(numBuckets); + } + + @Override + public String getType() { + return NAME; + } + + public AutoDateHistogramAggregationBuilder setNumBuckets(int numBuckets) { + if (numBuckets <= 0) { + throw new IllegalArgumentException(NUM_BUCKETS_FIELD.getPreferredName() + " must be greater than 0 for [" + name + "]"); + } + this.numBuckets = numBuckets; + return this; + } + + public int getNumBuckets() { + return numBuckets; + } + + @Override + protected ValuesSourceAggregatorFactory innerBuild(SearchContext context, ValuesSourceConfig config, + AggregatorFactory parent, Builder subFactoriesBuilder) throws IOException { + RoundingInfo[] roundings = new RoundingInfo[6]; + roundings[0] = new RoundingInfo(createRounding(DateTimeUnit.SECOND_OF_MINUTE), 1000L, 1, 5, 10, 30); + roundings[1] = new RoundingInfo(createRounding(DateTimeUnit.MINUTES_OF_HOUR), 60 * 1000L, 1, 5, 10, 30); + roundings[2] = new RoundingInfo(createRounding(DateTimeUnit.HOUR_OF_DAY), 60 * 60 * 1000L, 1, 3, 12); + roundings[3] = new RoundingInfo(createRounding(DateTimeUnit.DAY_OF_MONTH), 24 * 60 * 60 * 1000L, 1, 7); + roundings[4] = new RoundingInfo(createRounding(DateTimeUnit.MONTH_OF_YEAR), 30 * 24 * 60 * 60 * 1000L, 1, 3); + roundings[5] = new RoundingInfo(createRounding(DateTimeUnit.YEAR_OF_CENTURY), 365 * 24 * 60 * 60 * 1000L, 1, 5, 10, 20, 50, 100); + + int maxRoundingInterval = Arrays.stream(roundings,0, roundings.length-1) + .map(rounding -> rounding.innerIntervals) + .flatMapToInt(Arrays::stream) + .boxed() + .reduce(Integer::max).get(); + Settings settings = context.getQueryShardContext().getIndexSettings().getNodeSettings(); + int maxBuckets = MultiBucketConsumerService.MAX_BUCKET_SETTING.get(settings); + int bucketCeiling = maxBuckets / maxRoundingInterval; + if (numBuckets > bucketCeiling) { + throw new IllegalArgumentException(NUM_BUCKETS_FIELD.getPreferredName()+ + " must be less than " + bucketCeiling); + } + return new AutoDateHistogramAggregatorFactory(name, config, numBuckets, roundings, context, parent, subFactoriesBuilder, metaData); + } + + private Rounding createRounding(DateTimeUnit interval) { + Rounding.Builder tzRoundingBuilder = Rounding.builder(interval); + if (timeZone() != null) { + tzRoundingBuilder.timeZone(timeZone()); + } + Rounding rounding = tzRoundingBuilder.build(); + return rounding; + } + + @Override + protected XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.field(NUM_BUCKETS_FIELD.getPreferredName(), numBuckets); + return builder; + } + + @Override + protected int innerHashCode() { + return Objects.hash(numBuckets); + } + + @Override + protected boolean innerEquals(Object obj) { + AutoDateHistogramAggregationBuilder other = (AutoDateHistogramAggregationBuilder) obj; + return Objects.equals(numBuckets, other.numBuckets); + } + + public static class RoundingInfo implements Writeable { + final Rounding rounding; + final int[] innerIntervals; + final long roughEstimateDurationMillis; + + public RoundingInfo(Rounding rounding, long roughEstimateDurationMillis, int... innerIntervals) { + this.rounding = rounding; + this.roughEstimateDurationMillis = roughEstimateDurationMillis; + this.innerIntervals = innerIntervals; + } + + public RoundingInfo(StreamInput in) throws IOException { + rounding = Rounding.Streams.read(in); + roughEstimateDurationMillis = in.readVLong(); + innerIntervals = in.readIntArray(); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + Rounding.Streams.write(rounding, out); + out.writeVLong(roughEstimateDurationMillis); + out.writeIntArray(innerIntervals); + } + + public int getMaximumInnerInterval() { + return innerIntervals[innerIntervals.length - 1]; + } + + public long getRoughEstimateDurationMillis() { + return roughEstimateDurationMillis; + } + + @Override + public int hashCode() { + return Objects.hash(rounding, Arrays.hashCode(innerIntervals)); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (obj.getClass() != getClass()) { + return false; + } + RoundingInfo other = (RoundingInfo) obj; + return Objects.equals(rounding, other.rounding) && + Objects.deepEquals(innerIntervals, other.innerIntervals); + } + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java new file mode 100644 index 0000000000000..f86145386f1df --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregator.java @@ -0,0 +1,199 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.index.SortedNumericDocValues; +import org.apache.lucene.util.CollectionUtil; +import org.elasticsearch.common.Nullable; +import org.elasticsearch.common.lease.Releasables; +import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.util.LongHash; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BucketOrder; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.LeafBucketCollector; +import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; +import org.elasticsearch.search.aggregations.bucket.DeferableBucketAggregator; +import org.elasticsearch.search.aggregations.bucket.DeferringBucketCollector; +import org.elasticsearch.search.aggregations.bucket.MergingBucketsDeferringCollector; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Collections; +import java.util.List; +import java.util.Map; + +/** + * An aggregator for date values. Every date is rounded down using a configured + * {@link Rounding}. + * + * @see Rounding + */ +class AutoDateHistogramAggregator extends DeferableBucketAggregator { + + private final ValuesSource.Numeric valuesSource; + private final DocValueFormat formatter; + private final RoundingInfo[] roundingInfos; + private int roundingIdx = 0; + + private LongHash bucketOrds; + private int targetBuckets; + private MergingBucketsDeferringCollector deferringCollector; + + AutoDateHistogramAggregator(String name, AggregatorFactories factories, int numBuckets, RoundingInfo[] roundingInfos, + @Nullable ValuesSource.Numeric valuesSource, DocValueFormat formatter, SearchContext aggregationContext, Aggregator parent, + List pipelineAggregators, Map metaData) throws IOException { + + super(name, factories, aggregationContext, parent, pipelineAggregators, metaData); + this.targetBuckets = numBuckets; + this.valuesSource = valuesSource; + this.formatter = formatter; + this.roundingInfos = roundingInfos; + + bucketOrds = new LongHash(1, aggregationContext.bigArrays()); + + } + + @Override + public boolean needsScores() { + return (valuesSource != null && valuesSource.needsScores()) || super.needsScores(); + } + + @Override + protected boolean shouldDefer(Aggregator aggregator) { + return true; + } + + @Override + public DeferringBucketCollector getDeferringCollector() { + deferringCollector = new MergingBucketsDeferringCollector(context); + return deferringCollector; + } + + @Override + public LeafBucketCollector getLeafCollector(LeafReaderContext ctx, + final LeafBucketCollector sub) throws IOException { + if (valuesSource == null) { + return LeafBucketCollector.NO_OP_COLLECTOR; + } + final SortedNumericDocValues values = valuesSource.longValues(ctx); + return new LeafBucketCollectorBase(sub, values) { + @Override + public void collect(int doc, long bucket) throws IOException { + assert bucket == 0; + if (values.advanceExact(doc)) { + final int valuesCount = values.docValueCount(); + + long previousRounded = Long.MIN_VALUE; + for (int i = 0; i < valuesCount; ++i) { + long value = values.nextValue(); + long rounded = roundingInfos[roundingIdx].rounding.round(value); + assert rounded >= previousRounded; + if (rounded == previousRounded) { + continue; + } + long bucketOrd = bucketOrds.add(rounded); + if (bucketOrd < 0) { // already seen + bucketOrd = -1 - bucketOrd; + collectExistingBucket(sub, doc, bucketOrd); + } else { + collectBucket(sub, doc, bucketOrd); + while (roundingIdx < roundingInfos.length - 1 + && bucketOrds.size() > (targetBuckets * roundingInfos[roundingIdx].getMaximumInnerInterval())) { + increaseRounding(); + } + } + previousRounded = rounded; + } + } + } + + private void increaseRounding() { + try (LongHash oldBucketOrds = bucketOrds) { + LongHash newBucketOrds = new LongHash(1, context.bigArrays()); + long[] mergeMap = new long[(int) oldBucketOrds.size()]; + Rounding newRounding = roundingInfos[++roundingIdx].rounding; + for (int i = 0; i < oldBucketOrds.size(); i++) { + long oldKey = oldBucketOrds.get(i); + long newKey = newRounding.round(oldKey); + long newBucketOrd = newBucketOrds.add(newKey); + if (newBucketOrd >= 0) { + mergeMap[i] = newBucketOrd; + } else { + mergeMap[i] = -1 - newBucketOrd; + } + } + mergeBuckets(mergeMap, newBucketOrds.size()); + if (deferringCollector != null) { + deferringCollector.mergeBuckets(mergeMap); + } + bucketOrds = newBucketOrds; + } + } + }; + } + + @Override + public InternalAggregation buildAggregation(long owningBucketOrdinal) throws IOException { + assert owningBucketOrdinal == 0; + consumeBucketsAndMaybeBreak((int) bucketOrds.size()); + + long[] bucketOrdArray = new long[(int) bucketOrds.size()]; + for (int i = 0; i < bucketOrds.size(); i++) { + bucketOrdArray[i] = i; + } + + runDeferredCollections(bucketOrdArray); + + List buckets = new ArrayList<>((int) bucketOrds.size()); + for (long i = 0; i < bucketOrds.size(); i++) { + buckets.add(new InternalAutoDateHistogram.Bucket(bucketOrds.get(i), bucketDocCount(i), formatter, bucketAggregations(i))); + } + + // the contract of the histogram aggregation is that shards must return + // buckets ordered by key in ascending order + CollectionUtil.introSort(buckets, BucketOrder.key(true).comparator(this)); + + // value source will be null for unmapped fields + InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx, + buildEmptySubAggregations()); + + return new InternalAutoDateHistogram(name, buckets, targetBuckets, emptyBucketInfo, formatter, pipelineAggregators(), metaData()); + } + + @Override + public InternalAggregation buildEmptyAggregation() { + InternalAutoDateHistogram.BucketInfo emptyBucketInfo = new InternalAutoDateHistogram.BucketInfo(roundingInfos, roundingIdx, + buildEmptySubAggregations()); + return new InternalAutoDateHistogram(name, Collections.emptyList(), targetBuckets, emptyBucketInfo, formatter, + pipelineAggregators(), metaData()); + } + + @Override + public void doClose() { + Releasables.close(bucketOrds); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java new file mode 100644 index 0000000000000..051f2f9f6e7c7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorFactory.java @@ -0,0 +1,72 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.search.aggregations.Aggregator; +import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.AggregatorFactory; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.search.aggregations.support.ValuesSource; +import org.elasticsearch.search.aggregations.support.ValuesSource.Numeric; +import org.elasticsearch.search.aggregations.support.ValuesSourceAggregatorFactory; +import org.elasticsearch.search.aggregations.support.ValuesSourceConfig; +import org.elasticsearch.search.internal.SearchContext; + +import java.io.IOException; +import java.util.List; +import java.util.Map; + +public final class AutoDateHistogramAggregatorFactory + extends ValuesSourceAggregatorFactory { + + private final int numBuckets; + private RoundingInfo[] roundingInfos; + + public AutoDateHistogramAggregatorFactory(String name, ValuesSourceConfig config, int numBuckets, RoundingInfo[] roundingInfos, + SearchContext context, AggregatorFactory parent, AggregatorFactories.Builder subFactoriesBuilder, + Map metaData) throws IOException { + super(name, config, context, parent, subFactoriesBuilder, metaData); + this.numBuckets = numBuckets; + this.roundingInfos = roundingInfos; + } + + @Override + protected Aggregator doCreateInternal(ValuesSource.Numeric valuesSource, Aggregator parent, boolean collectsFromSingleBucket, + List pipelineAggregators, Map metaData) throws IOException { + if (collectsFromSingleBucket == false) { + return asMultiBucketAggregator(this, context, parent); + } + return createAggregator(valuesSource, parent, pipelineAggregators, metaData); + } + + private Aggregator createAggregator(ValuesSource.Numeric valuesSource, Aggregator parent, List pipelineAggregators, + Map metaData) throws IOException { + return new AutoDateHistogramAggregator(name, factories, numBuckets, roundingInfos, valuesSource, config.format(), context, parent, + pipelineAggregators, + metaData); + } + + @Override + protected Aggregator createUnmapped(Aggregator parent, List pipelineAggregators, Map metaData) + throws IOException { + return createAggregator(null, parent, pipelineAggregators, metaData); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java index e0b64d2cd5b9e..8b1f0c4642160 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/DateHistogramAggregator.java @@ -28,13 +28,13 @@ import org.elasticsearch.search.DocValueFormat; import org.elasticsearch.search.aggregations.Aggregator; import org.elasticsearch.search.aggregations.AggregatorFactories; +import org.elasticsearch.search.aggregations.BucketOrder; import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.LeafBucketCollector; import org.elasticsearch.search.aggregations.LeafBucketCollectorBase; import org.elasticsearch.search.aggregations.bucket.BucketsAggregator; import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; -import org.elasticsearch.search.aggregations.BucketOrder; -import org.elasticsearch.search.aggregations.InternalOrder; import org.elasticsearch.search.aggregations.support.ValuesSource; import org.elasticsearch.search.internal.SearchContext; diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java new file mode 100644 index 0000000000000..27c195cbdae75 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogram.java @@ -0,0 +1,601 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.apache.lucene.util.PriorityQueue; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.Aggregations; +import org.elasticsearch.search.aggregations.InternalAggregation; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.InternalMultiBucketAggregation; +import org.elasticsearch.search.aggregations.KeyComparable; +import org.elasticsearch.search.aggregations.bucket.MultiBucketsAggregation; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.Iterator; +import java.util.List; +import java.util.ListIterator; +import java.util.Map; +import java.util.Objects; + +/** + * Implementation of {@link Histogram}. + */ +public final class InternalAutoDateHistogram extends + InternalMultiBucketAggregation implements Histogram, HistogramFactory { + + public static class Bucket extends InternalMultiBucketAggregation.InternalBucket implements Histogram.Bucket, KeyComparable { + + final long key; + final long docCount; + final InternalAggregations aggregations; + protected final transient DocValueFormat format; + + public Bucket(long key, long docCount, DocValueFormat format, + InternalAggregations aggregations) { + this.format = format; + this.key = key; + this.docCount = docCount; + this.aggregations = aggregations; + } + + /** + * Read from a stream. + */ + public Bucket(StreamInput in, DocValueFormat format) throws IOException { + this.format = format; + key = in.readLong(); + docCount = in.readVLong(); + aggregations = InternalAggregations.readAggregations(in); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || obj.getClass() != InternalAutoDateHistogram.Bucket.class) { + return false; + } + InternalAutoDateHistogram.Bucket that = (InternalAutoDateHistogram.Bucket) obj; + // No need to take the keyed and format parameters into account, + // they are already stored and tested on the InternalDateHistogram object + return key == that.key + && docCount == that.docCount + && Objects.equals(aggregations, that.aggregations); + } + + @Override + public int hashCode() { + return Objects.hash(getClass(), key, docCount, aggregations); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeLong(key); + out.writeVLong(docCount); + aggregations.writeTo(out); + } + + @Override + public String getKeyAsString() { + return format.format(key).toString(); + } + + @Override + public Object getKey() { + return new DateTime(key, DateTimeZone.UTC); + } + + @Override + public long getDocCount() { + return docCount; + } + + @Override + public Aggregations getAggregations() { + return aggregations; + } + + Bucket reduce(List buckets, Rounding rounding, ReduceContext context) { + List aggregations = new ArrayList<>(buckets.size()); + long docCount = 0; + for (Bucket bucket : buckets) { + docCount += bucket.docCount; + aggregations.add((InternalAggregations) bucket.getAggregations()); + } + InternalAggregations aggs = InternalAggregations.reduce(aggregations, context); + return new InternalAutoDateHistogram.Bucket(rounding.round(key), docCount, format, aggs); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + String keyAsString = format.format(key).toString(); + builder.startObject(); + if (format != DocValueFormat.RAW) { + builder.field(CommonFields.KEY_AS_STRING.getPreferredName(), keyAsString); + } + builder.field(CommonFields.KEY.getPreferredName(), key); + builder.field(CommonFields.DOC_COUNT.getPreferredName(), docCount); + aggregations.toXContentInternal(builder, params); + builder.endObject(); + return builder; + } + + @Override + public int compareKey(Bucket other) { + return Long.compare(key, other.key); + } + + public DocValueFormat getFormatter() { + return format; + } + } + + static class BucketInfo { + + final RoundingInfo[] roundingInfos; + final int roundingIdx; + final InternalAggregations emptySubAggregations; + + BucketInfo(RoundingInfo[] roundings, int roundingIdx, InternalAggregations subAggregations) { + this.roundingInfos = roundings; + this.roundingIdx = roundingIdx; + this.emptySubAggregations = subAggregations; + } + + BucketInfo(StreamInput in) throws IOException { + int size = in.readVInt(); + roundingInfos = new RoundingInfo[size]; + for (int i = 0; i < size; i++) { + roundingInfos[i] = new RoundingInfo(in); + } + roundingIdx = in.readVInt(); + emptySubAggregations = InternalAggregations.readAggregations(in); + } + + void writeTo(StreamOutput out) throws IOException { + out.writeVInt(roundingInfos.length); + for (RoundingInfo roundingInfo : roundingInfos) { + roundingInfo.writeTo(out); + } + out.writeVInt(roundingIdx); + emptySubAggregations.writeTo(out); + } + + @Override + public boolean equals(Object obj) { + if (obj == null || getClass() != obj.getClass()) { + return false; + } + BucketInfo that = (BucketInfo) obj; + return Objects.deepEquals(roundingInfos, that.roundingInfos) + && Objects.equals(roundingIdx, that.roundingIdx) + && Objects.equals(emptySubAggregations, that.emptySubAggregations); + } + + @Override + public int hashCode() { + return Objects.hash(getClass(), Arrays.hashCode(roundingInfos), roundingIdx, emptySubAggregations); + } + } + + private final List buckets; + private final DocValueFormat format; + private final BucketInfo bucketInfo; + private final int targetBuckets; + + + InternalAutoDateHistogram(String name, List buckets, int targetBuckets, BucketInfo emptyBucketInfo, DocValueFormat formatter, + List pipelineAggregators, Map metaData) { + super(name, pipelineAggregators, metaData); + this.buckets = buckets; + this.bucketInfo = emptyBucketInfo; + this.format = formatter; + this.targetBuckets = targetBuckets; + } + + /** + * Stream from a stream. + */ + public InternalAutoDateHistogram(StreamInput in) throws IOException { + super(in); + bucketInfo = new BucketInfo(in); + format = in.readNamedWriteable(DocValueFormat.class); + buckets = in.readList(stream -> new Bucket(stream, format)); + this.targetBuckets = in.readVInt(); + } + + @Override + protected void doWriteTo(StreamOutput out) throws IOException { + bucketInfo.writeTo(out); + out.writeNamedWriteable(format); + out.writeList(buckets); + out.writeVInt(targetBuckets); + } + + @Override + public String getWriteableName() { + return AutoDateHistogramAggregationBuilder.NAME; + } + + @Override + public List getBuckets() { + return Collections.unmodifiableList(buckets); + } + + DocValueFormat getFormatter() { + return format; + } + + public int getTargetBuckets() { + return targetBuckets; + } + + public BucketInfo getBucketInfo() { + return bucketInfo; + } + + @Override + public InternalAutoDateHistogram create(List buckets) { + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators(), metaData); + } + + @Override + public Bucket createBucket(InternalAggregations aggregations, Bucket prototype) { + return new Bucket(prototype.key, prototype.docCount, prototype.format, aggregations); + } + + private static class IteratorAndCurrent { + + private final Iterator iterator; + private Bucket current; + + IteratorAndCurrent(Iterator iterator) { + this.iterator = iterator; + current = iterator.next(); + } + + } + + /** + * This method works almost exactly the same as + * InternalDateHistogram#reduceBuckets(List, ReduceContext), the different + * here is that we need to round all the keys we see using the highest level + * rounding returned across all the shards so the resolution of the buckets + * is the same and they can be reduced together. + */ + private BucketReduceResult reduceBuckets(List aggregations, ReduceContext reduceContext) { + + // First we need to find the highest level rounding used across all the + // shards + int reduceRoundingIdx = 0; + for (InternalAggregation aggregation : aggregations) { + int aggRoundingIdx = ((InternalAutoDateHistogram) aggregation).bucketInfo.roundingIdx; + if (aggRoundingIdx > reduceRoundingIdx) { + reduceRoundingIdx = aggRoundingIdx; + } + } + // This rounding will be used to reduce all the buckets + RoundingInfo reduceRoundingInfo = bucketInfo.roundingInfos[reduceRoundingIdx]; + Rounding reduceRounding = reduceRoundingInfo.rounding; + + final PriorityQueue pq = new PriorityQueue(aggregations.size()) { + @Override + protected boolean lessThan(IteratorAndCurrent a, IteratorAndCurrent b) { + return a.current.key < b.current.key; + } + }; + for (InternalAggregation aggregation : aggregations) { + InternalAutoDateHistogram histogram = (InternalAutoDateHistogram) aggregation; + if (histogram.buckets.isEmpty() == false) { + pq.add(new IteratorAndCurrent(histogram.buckets.iterator())); + } + } + + List reducedBuckets = new ArrayList<>(); + if (pq.size() > 0) { + // list of buckets coming from different shards that have the same key + List currentBuckets = new ArrayList<>(); + double key = reduceRounding.round(pq.top().current.key); + + do { + final IteratorAndCurrent top = pq.top(); + + if (reduceRounding.round(top.current.key) != key) { + // the key changes, reduce what we already buffered and reset the buffer for current buckets + final Bucket reduced = currentBuckets.get(0).reduce(currentBuckets, reduceRounding, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + currentBuckets.clear(); + key = reduceRounding.round(top.current.key); + } + + currentBuckets.add(top.current); + + if (top.iterator.hasNext()) { + final Bucket next = top.iterator.next(); + assert next.key > top.current.key : "shards must return data sorted by key"; + top.current = next; + pq.updateTop(); + } else { + pq.pop(); + } + } while (pq.size() > 0); + + if (currentBuckets.isEmpty() == false) { + final Bucket reduced = currentBuckets.get(0).reduce(currentBuckets, reduceRounding, reduceContext); + reduceContext.consumeBucketsAndMaybeBreak(1); + reducedBuckets.add(reduced); + } + } + + return mergeBucketsIfNeeded(reducedBuckets, reduceRoundingIdx, reduceRoundingInfo, reduceContext); + } + + private BucketReduceResult mergeBucketsIfNeeded(List reducedBuckets, int reduceRoundingIdx, RoundingInfo reduceRoundingInfo, + ReduceContext reduceContext) { + while (reducedBuckets.size() > (targetBuckets * reduceRoundingInfo.getMaximumInnerInterval()) + && reduceRoundingIdx < bucketInfo.roundingInfos.length - 1) { + reduceRoundingIdx++; + reduceRoundingInfo = bucketInfo.roundingInfos[reduceRoundingIdx]; + reducedBuckets = mergeBuckets(reducedBuckets, reduceRoundingInfo.rounding, reduceContext); + } + return new BucketReduceResult(reducedBuckets, reduceRoundingInfo, reduceRoundingIdx); + } + + private List mergeBuckets(List reducedBuckets, Rounding reduceRounding, ReduceContext reduceContext) { + List mergedBuckets = new ArrayList<>(); + + List sameKeyedBuckets = new ArrayList<>(); + double key = Double.NaN; + for (Bucket bucket : reducedBuckets) { + long roundedBucketKey = reduceRounding.round(bucket.key); + if (Double.isNaN(key)) { + key = roundedBucketKey; + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); + sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + } else if (roundedBucketKey == key) { + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); + sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + } else { + reduceContext.consumeBucketsAndMaybeBreak(1); + mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, reduceRounding, reduceContext)); + sameKeyedBuckets.clear(); + key = roundedBucketKey; + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); + sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + } + } + if (sameKeyedBuckets.isEmpty() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); + mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, reduceRounding, reduceContext)); + } + reducedBuckets = mergedBuckets; + return reducedBuckets; + } + + private static class BucketReduceResult { + List buckets; + RoundingInfo roundingInfo; + int roundingIdx; + + BucketReduceResult(List buckets, RoundingInfo roundingInfo, int roundingIdx) { + this.buckets = buckets; + this.roundingInfo = roundingInfo; + this.roundingIdx = roundingIdx; + + } + } + + private BucketReduceResult addEmptyBuckets(BucketReduceResult currentResult, ReduceContext reduceContext) { + List list = currentResult.buckets; + if (list.isEmpty()) { + return currentResult; + } + int roundingIdx = getAppropriateRounding(list.get(0).key, list.get(list.size() - 1).key, currentResult.roundingIdx, + bucketInfo.roundingInfos); + RoundingInfo roundingInfo = bucketInfo.roundingInfos[roundingIdx]; + Rounding rounding = roundingInfo.rounding; + // merge buckets using the new rounding + list = mergeBuckets(list, rounding, reduceContext); + + Bucket lastBucket = null; + ListIterator iter = list.listIterator(); + InternalAggregations reducedEmptySubAggs = InternalAggregations.reduce(Collections.singletonList(bucketInfo.emptySubAggregations), + reduceContext); + + // Add the empty buckets within the data, + // e.g. if the data series is [1,2,3,7] there're 3 empty buckets that will be created for 4,5,6 + while (iter.hasNext()) { + Bucket nextBucket = list.get(iter.nextIndex()); + if (lastBucket != null) { + long key = rounding.nextRoundingValue(lastBucket.key); + while (key < nextBucket.key) { + reduceContext.consumeBucketsAndMaybeBreak(1); + iter.add(new InternalAutoDateHistogram.Bucket(key, 0, format, reducedEmptySubAggs)); + key = rounding.nextRoundingValue(key); + } + assert key == nextBucket.key : "key: " + key + ", nextBucket.key: " + nextBucket.key; + } + lastBucket = iter.next(); + } + return new BucketReduceResult(list, roundingInfo, roundingIdx); + } + + private int getAppropriateRounding(long minKey, long maxKey, int roundingIdx, RoundingInfo[] roundings) { + if (roundingIdx == roundings.length - 1) { + return roundingIdx; + } + int currentRoundingIdx = roundingIdx; + + // Getting the accurate number of required buckets can be slow for large + // ranges at low roundings so get a rough estimate of the rounding first + // so we are at most 1 away from the correct rounding and then get the + // accurate rounding value + for (int i = currentRoundingIdx + 1; i < roundings.length; i++) { + long dataDuration = maxKey - minKey; + long roughEstimateRequiredBuckets = dataDuration / roundings[i].getRoughEstimateDurationMillis(); + if (roughEstimateRequiredBuckets < targetBuckets * roundings[i].getMaximumInnerInterval()) { + currentRoundingIdx = i - 1; + break; + } else if (i == roundingIdx - 1) { + currentRoundingIdx = i; + break; + } + } + + int requiredBuckets = 0; + do { + Rounding currentRounding = roundings[currentRoundingIdx].rounding; + long currentKey = minKey; + requiredBuckets = 0; + while (currentKey < maxKey) { + requiredBuckets++; + currentKey = currentRounding.nextRoundingValue(currentKey); + } + currentRoundingIdx++; + } while (requiredBuckets > (targetBuckets * roundings[roundingIdx].getMaximumInnerInterval()) + && currentRoundingIdx < roundings.length); + // The loop will increase past the correct rounding index here so we + // need to subtract one to get the rounding index we need + return currentRoundingIdx - 1; + } + + @Override + public InternalAggregation doReduce(List aggregations, ReduceContext reduceContext) { + BucketReduceResult reducedBucketsResult = reduceBuckets(aggregations, reduceContext); + + if (reduceContext.isFinalReduce()) { + // adding empty buckets if needed + reducedBucketsResult = addEmptyBuckets(reducedBucketsResult, reduceContext); + + // Adding empty buckets may have tipped us over the target so merge the buckets again if needed + reducedBucketsResult = mergeBucketsIfNeeded(reducedBucketsResult.buckets, reducedBucketsResult.roundingIdx, + reducedBucketsResult.roundingInfo, reduceContext); + + // Now finally see if we need to merge consecutive buckets together to make a coarser interval at the same rounding + reducedBucketsResult = maybeMergeConsecutiveBuckets(reducedBucketsResult, reduceContext); + } + + BucketInfo bucketInfo = new BucketInfo(this.bucketInfo.roundingInfos, reducedBucketsResult.roundingIdx, + this.bucketInfo.emptySubAggregations); + + return new InternalAutoDateHistogram(getName(), reducedBucketsResult.buckets, targetBuckets, bucketInfo, format, + pipelineAggregators(), getMetaData()); + } + + private BucketReduceResult maybeMergeConsecutiveBuckets(BucketReduceResult reducedBucketsResult, ReduceContext reduceContext) { + List buckets = reducedBucketsResult.buckets; + RoundingInfo roundingInfo = reducedBucketsResult.roundingInfo; + int roundingIdx = reducedBucketsResult.roundingIdx; + if (buckets.size() > targetBuckets) { + for (int interval : roundingInfo.innerIntervals) { + int resultingBuckets = buckets.size() / interval; + if (resultingBuckets <= targetBuckets) { + return mergeConsecutiveBuckets(buckets, interval, roundingIdx, roundingInfo, reduceContext); + } + } + } + return reducedBucketsResult; + } + + private BucketReduceResult mergeConsecutiveBuckets(List reducedBuckets, int mergeInterval, int roundingIdx, + RoundingInfo roundingInfo, ReduceContext reduceContext) { + List mergedBuckets = new ArrayList<>(); + List sameKeyedBuckets = new ArrayList<>(); + + double key = roundingInfo.rounding.round(reducedBuckets.get(0).key); + for (int i = 0; i < reducedBuckets.size(); i++) { + Bucket bucket = reducedBuckets.get(i); + if (i % mergeInterval == 0 && sameKeyedBuckets.isEmpty() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); + mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, roundingInfo.rounding, reduceContext)); + sameKeyedBuckets.clear(); + key = roundingInfo.rounding.round(bucket.key); + } + reduceContext.consumeBucketsAndMaybeBreak(-countInnerBucket(bucket) - 1); + sameKeyedBuckets.add(createBucket(key, bucket.docCount, bucket.aggregations)); + } + if (sameKeyedBuckets.isEmpty() == false) { + reduceContext.consumeBucketsAndMaybeBreak(1); + mergedBuckets.add(sameKeyedBuckets.get(0).reduce(sameKeyedBuckets, roundingInfo.rounding, reduceContext)); + } + return new BucketReduceResult(mergedBuckets, roundingInfo, roundingIdx); + } + + @Override + public XContentBuilder doXContentBody(XContentBuilder builder, Params params) throws IOException { + builder.startArray(CommonFields.BUCKETS.getPreferredName()); + for (Bucket bucket : buckets) { + bucket.toXContent(builder, params); + } + builder.endArray(); + return builder; + } + + // HistogramFactory method impls + + @Override + public Number getKey(MultiBucketsAggregation.Bucket bucket) { + return ((Bucket) bucket).key; + } + + @Override + public Number nextKey(Number key) { + return bucketInfo.roundingInfos[bucketInfo.roundingIdx].rounding.nextRoundingValue(key.longValue()); + } + + @Override + public InternalAggregation createAggregation(List buckets) { + // convert buckets to the right type + List buckets2 = new ArrayList<>(buckets.size()); + for (Object b : buckets) { + buckets2.add((Bucket) b); + } + buckets2 = Collections.unmodifiableList(buckets2); + return new InternalAutoDateHistogram(name, buckets2, targetBuckets, bucketInfo, format, pipelineAggregators(), getMetaData()); + } + + @Override + public Bucket createBucket(Number key, long docCount, InternalAggregations aggregations) { + return new Bucket(key.longValue(), docCount, format, aggregations); + } + + @Override + protected boolean doEquals(Object obj) { + InternalAutoDateHistogram that = (InternalAutoDateHistogram) obj; + return Objects.equals(buckets, that.buckets) + && Objects.equals(format, that.format) + && Objects.equals(bucketInfo, that.bucketInfo); + } + + @Override + protected int doHashCode() { + return Objects.hash(buckets, format, bucketInfo); + } +} diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java index 84dec2c983e28..669bda5574d31 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalDateHistogram.java @@ -424,7 +424,7 @@ private void addEmptyBuckets(List list, ReduceContext reduceContext) { iter.add(new InternalDateHistogram.Bucket(key, 0, keyed, format, reducedEmptySubAggs)); key = nextKey(key).longValue(); } - assert key == nextBucket.key; + assert key == nextBucket.key : "key: " + key + ", nextBucket.key: " + nextBucket.key; } lastBucket = iter.next(); } diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java new file mode 100644 index 0000000000000..caca44f9f2ea7 --- /dev/null +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/histogram/ParsedAutoDateHistogram.java @@ -0,0 +1,91 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.common.xcontent.ObjectParser; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; + +import java.io.IOException; +import java.util.List; + +public class ParsedAutoDateHistogram extends ParsedMultiBucketAggregation implements Histogram { + + @Override + public String getType() { + return AutoDateHistogramAggregationBuilder.NAME; + } + + @Override + public List getBuckets() { + return buckets; + } + + private static ObjectParser PARSER = + new ObjectParser<>(ParsedAutoDateHistogram.class.getSimpleName(), true, ParsedAutoDateHistogram::new); + static { + declareMultiBucketAggregationFields(PARSER, + parser -> ParsedBucket.fromXContent(parser, false), + parser -> ParsedBucket.fromXContent(parser, true)); + } + + public static ParsedAutoDateHistogram fromXContent(XContentParser parser, String name) throws IOException { + ParsedAutoDateHistogram aggregation = PARSER.parse(parser, null); + aggregation.setName(name); + return aggregation; + } + + public static class ParsedBucket extends ParsedMultiBucketAggregation.ParsedBucket implements Histogram.Bucket { + + private Long key; + + @Override + public Object getKey() { + if (key != null) { + return new DateTime(key, DateTimeZone.UTC); + } + return null; + } + + @Override + public String getKeyAsString() { + String keyAsString = super.getKeyAsString(); + if (keyAsString != null) { + return keyAsString; + } + if (key != null) { + return Long.toString(key); + } + return null; + } + + @Override + protected XContentBuilder keyToXContent(XContentBuilder builder) throws IOException { + return builder.field(CommonFields.KEY.getPreferredName(), key); + } + + static ParsedBucket fromXContent(XContentParser parser, boolean keyed) throws IOException { + return parseXContent(parser, keyed, ParsedBucket::new, (p, bucket) -> bucket.key = p.longValue()); + } + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java index 79984f5894904..fcafce3936e90 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/AggregationsTests.java @@ -37,6 +37,7 @@ import org.elasticsearch.search.aggregations.bucket.filter.InternalFiltersTests; import org.elasticsearch.search.aggregations.bucket.geogrid.InternalGeoHashGridTests; import org.elasticsearch.search.aggregations.bucket.global.InternalGlobalTests; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogramTests; import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogramTests; import org.elasticsearch.search.aggregations.bucket.histogram.InternalHistogramTests; import org.elasticsearch.search.aggregations.bucket.missing.InternalMissingTests; @@ -125,6 +126,7 @@ private static List> getAggsTests() { aggsTests.add(new InternalGeoCentroidTests()); aggsTests.add(new InternalHistogramTests()); aggsTests.add(new InternalDateHistogramTests()); + aggsTests.add(new InternalAutoDateHistogramTests()); aggsTests.add(new LongTermsTests()); aggsTests.add(new DoubleTermsTests()); aggsTests.add(new StringTermsTests()); diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java new file mode 100644 index 0000000000000..3a10edf183376 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/AutoDateHistogramTests.java @@ -0,0 +1,44 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket; + +import org.elasticsearch.search.aggregations.BaseAggregationTestCase; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; + +public class AutoDateHistogramTests extends BaseAggregationTestCase { + + @Override + protected AutoDateHistogramAggregationBuilder createTestAggregatorBuilder() { + AutoDateHistogramAggregationBuilder builder = new AutoDateHistogramAggregationBuilder(randomAlphaOfLengthBetween(1, 10)); + builder.field(INT_FIELD_NAME); + builder.setNumBuckets(randomIntBetween(1, 100000)); + if (randomBoolean()) { + builder.format("###.##"); + } + if (randomBoolean()) { + builder.missing(randomIntBetween(0, 10)); + } + if (randomBoolean()) { + builder.timeZone(randomDateTimeZone()); + } + return builder; + } + +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java new file mode 100644 index 0000000000000..7cf29e3aa9cc5 --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/AutoDateHistogramAggregatorTests.java @@ -0,0 +1,1332 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.apache.lucene.document.Document; +import org.apache.lucene.document.LongPoint; +import org.apache.lucene.document.SortedNumericDocValuesField; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.RandomIndexWriter; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.MatchAllDocsQuery; +import org.apache.lucene.search.MatchNoDocsQuery; +import org.apache.lucene.search.Query; +import org.apache.lucene.store.Directory; +import org.elasticsearch.Version; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.index.IndexSettings; +import org.elasticsearch.index.mapper.DateFieldMapper; +import org.elasticsearch.search.aggregations.AggregationBuilders; +import org.elasticsearch.search.aggregations.AggregatorTestCase; +import org.elasticsearch.search.aggregations.MultiBucketConsumerService; +import org.elasticsearch.search.aggregations.metrics.stats.Stats; +import org.hamcrest.Matchers; +import org.joda.time.DateTime; +import org.joda.time.DateTimeZone; +import org.joda.time.chrono.ISOChronology; +import org.joda.time.format.DateTimeFormat; +import org.joda.time.format.DateTimeFormatter; + +import java.io.IOException; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; +import java.util.function.Consumer; + +import static org.hamcrest.Matchers.containsString; + +public class AutoDateHistogramAggregatorTests extends AggregatorTestCase { + + private static final String DATE_FIELD = "date"; + private static final String INSTANT_FIELD = "instant"; + + private static final List dataset = Arrays.asList( + "2010-03-12T01:07:45", + "2010-04-27T03:43:34", + "2012-05-18T04:11:00", + "2013-05-29T05:11:31", + "2013-10-31T08:24:05", + "2015-02-13T13:09:32", + "2015-06-24T13:47:43", + "2015-11-13T16:14:34", + "2016-03-04T17:09:50", + "2017-12-12T22:55:46"); + + public void testMatchNoDocs() throws IOException { + testBothCases(new MatchNoDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + } + + public void testMatchAllDocs() throws IOException { + Query query = new MatchAllDocsQuery(); + + testSearchCase(query, dataset, + aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), + histogram -> assertEquals(10, histogram.getBuckets().size()) + ); + testSearchAndReduceCase(query, dataset, + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), + histogram -> assertEquals(8, histogram.getBuckets().size()) + ); + } + + public void testSubAggregations() throws IOException { + Query query = new MatchAllDocsQuery(); + testSearchAndReduceCase(query, dataset, + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD) + .subAggregation(AggregationBuilders.stats("stats").field(DATE_FIELD)), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(8, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2010-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + Stats stats = bucket.getAggregations().get("stats"); + assertEquals("2010-03-12T01:07:45.000Z", stats.getMinAsString()); + assertEquals("2010-04-27T03:43:34.000Z", stats.getMaxAsString()); + assertEquals(2L, stats.getCount()); + + bucket = buckets.get(1); + assertEquals("2011-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertTrue(Double.isInfinite(stats.getMin())); + assertTrue(Double.isInfinite(stats.getMax())); + assertEquals(0L, stats.getCount()); + + bucket = buckets.get(2); + assertEquals("2012-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2012-05-18T04:11:00.000Z", stats.getMinAsString()); + assertEquals("2012-05-18T04:11:00.000Z", stats.getMaxAsString()); + assertEquals(1L, stats.getCount()); + + bucket = buckets.get(3); + assertEquals("2013-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2013-05-29T05:11:31.000Z", stats.getMinAsString()); + assertEquals("2013-10-31T08:24:05.000Z", stats.getMaxAsString()); + assertEquals(2L, stats.getCount()); + + bucket = buckets.get(4); + assertEquals("2014-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertTrue(Double.isInfinite(stats.getMin())); + assertTrue(Double.isInfinite(stats.getMax())); + assertEquals(0L, stats.getCount()); + + bucket = buckets.get(5); + assertEquals("2015-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2015-02-13T13:09:32.000Z", stats.getMinAsString()); + assertEquals("2015-11-13T16:14:34.000Z", stats.getMaxAsString()); + assertEquals(3L, stats.getCount()); + + bucket = buckets.get(6); + assertEquals("2016-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2016-03-04T17:09:50.000Z", stats.getMinAsString()); + assertEquals("2016-03-04T17:09:50.000Z", stats.getMaxAsString()); + assertEquals(1L, stats.getCount()); + + bucket = buckets.get(7); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + stats = bucket.getAggregations().get("stats"); + assertEquals("2017-12-12T22:55:46.000Z", stats.getMinAsString()); + assertEquals("2017-12-12T22:55:46.000Z", stats.getMaxAsString()); + assertEquals(1L, stats.getCount()); + }); + } + + public void testNoDocs() throws IOException { + Query query = new MatchNoDocsQuery(); + List dates = Collections.emptyList(); + Consumer aggregation = agg -> agg.setNumBuckets(10).field(DATE_FIELD); + + testSearchCase(query, dates, aggregation, + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + testSearchAndReduceCase(query, dates, aggregation, + histogram -> assertNull(histogram) + ); + } + + public void testAggregateWrongField() throws IOException { + testBothCases(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(10).field("wrong_field"), + histogram -> assertEquals(0, histogram.getBuckets().size()) + ); + } + + public void testIntervalYear() throws IOException { + testSearchCase(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(5, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2015-02-13T13:09:32.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2015-06-24T13:47:43.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2015-11-13T16:14:34.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2016-03-04T17:09:50.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-12-12T22:55:46.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(LongPoint.newRangeQuery(INSTANT_FIELD, asLong("2015-01-01"), asLong("2017-12-31")), dataset, + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2015-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2016-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + } + + public void testIntervalMonth() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(6, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-03-04T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-03-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-03-06T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList("2017-01-01", "2017-02-02", "2017-02-03", "2017-03-04", "2017-03-05", "2017-03-06"), + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-01-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-03-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + public void testWithLargeNumberOfBuckets() { + Query query = new MatchAllDocsQuery(); + IllegalArgumentException exception = expectThrows(IllegalArgumentException.class, + () -> testSearchCase(query, dataset, + aggregation -> aggregation.setNumBuckets(MultiBucketConsumerService.DEFAULT_MAX_BUCKETS+1).field(DATE_FIELD), + // since an exception is thrown, this assertion won't be invoked. + histogram -> assertTrue(false) + )); + assertThat(exception.getMessage(), containsString("must be less than")); + } + + public void testIntervalDay() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(4, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01", + "2017-02-02", + "2017-02-02", + "2017-02-03", + "2017-02-03", + "2017-02-03", + "2017-02-05" + ), + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(5, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-02T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-03T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-04T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-05T00:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + } + + public void testIntervalDayWithTZ() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(4, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-01-31T23:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T23:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-02T23:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-04T23:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList("2017-02-01", "2017-02-02", "2017-02-02", "2017-02-03", "2017-02-03", "2017-02-03", "2017-02-05"), + aggregation -> aggregation.setNumBuckets(5).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(5, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-01-31T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-02T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-03T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-04T00:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + }); + } + + public void testIntervalHour() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:35:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T10:15:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T13:06:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T14:04:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T14:05:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T15:59:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T16:06:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(8); + assertEquals("2017-02-01T16:48:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(9); + assertEquals("2017-02-01T16:59:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(8, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T10:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T11:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T12:00:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T13:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T14:00:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T15:00:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T16:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + public void testIntervalHourWithTZ() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T08:02:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T08:35:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T09:15:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T12:06:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T13:04:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T13:05:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T14:59:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T15:06:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(8); + assertEquals("2017-02-01T15:48:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(9); + assertEquals("2017-02-01T15:59:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD).timeZone(DateTimeZone.forOffsetHours(-1)), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(8, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T08:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T10:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T11:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T12:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T13:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T14:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T15:00:00.000-01:00", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + public void testAllSecondIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusSeconds(i); + dataset.add(format.print(date)); + } + + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusSeconds(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(120, buckets.size()); + for (int i = 0; i < 120; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusSeconds(i * 5), bucket.getKey()); + assertEquals(5, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(100).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(60, buckets.size()); + for (int i = 0; i < 60; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusSeconds(i * 10), bucket.getKey()); + assertEquals(10, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(50).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(20, buckets.size()); + for (int i = 0; i < 20; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusSeconds(i * 30), bucket.getKey()); + assertEquals(30, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + for (int i = 0; i < 10; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i), bucket.getKey()); + assertEquals(60, bucket.getDocCount()); + } + }); + } + + public void testAllMinuteIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusMinutes(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(120, buckets.size()); + for (int i = 0; i < 120; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i * 5), bucket.getKey()); + assertEquals(5, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(100).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(60, buckets.size()); + for (int i = 0; i < 60; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i * 10), bucket.getKey()); + assertEquals(10, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(50).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(20, buckets.size()); + for (int i = 0; i < 20; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMinutes(i * 30), bucket.getKey()); + assertEquals(30, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + for (int i = 0; i < 10; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusHours(i), bucket.getKey()); + assertEquals(60, bucket.getDocCount()); + } + }); + } + + public void testAllHourIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusHours(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusHours(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(200, buckets.size()); + for (int i = 0; i < 200; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusHours(i * 3), bucket.getKey()); + assertEquals(3, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(100).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(50, buckets.size()); + for (int i = 0; i < 50; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusHours(i * 12), bucket.getKey()); + assertEquals(12, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(30).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(25, buckets.size()); + for (int i = 0; i < 25; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusDays(i), bucket.getKey()); + assertEquals(24, bucket.getDocCount()); + } + }); + } + + public void testAllDayIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 700; i++) { + DateTime date = startDate.plusDays(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(700).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(700, buckets.size()); + for (int i = 0; i < 700; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusDays(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(100, buckets.size()); + for (int i = 0; i < 100; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusDays(i * 7), bucket.getKey()); + assertEquals(7, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(30).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(24, buckets.size()); + for (int i = 0; i < 24; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMonths(i), bucket.getKey()); + assertThat(bucket.getDocCount(), Matchers.lessThanOrEqualTo(31L)); + } + }); + } + + public void testAllMonthIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusMonths(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMonths(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(200, buckets.size()); + for (int i = 0; i < 200; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusMonths(i * 3), bucket.getKey()); + assertEquals(3, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, + aggregation -> aggregation.setNumBuckets(60).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(50, buckets.size()); + for (int i = 0; i < 50; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i), bucket.getKey()); + assertEquals(12, bucket.getDocCount()); + } + }); + } + + public void testAllYearIntervals() throws IOException { + DateTimeFormatter format = DateTimeFormat.forPattern("yyyy-MM-dd'T'HH:mm:ss.SSSZ"); + List dataset = new ArrayList<>(); + DateTime startDate = new DateTime(2017, 01, 01, 00, 00, 00, ISOChronology.getInstanceUTC()); + for (int i = 0; i < 600; i++) { + DateTime date = startDate.plusYears(i); + dataset.add(format.print(date)); + } + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(600).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(600, buckets.size()); + for (int i = 0; i < 600; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i), bucket.getKey()); + assertEquals(1, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(300).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(120, buckets.size()); + for (int i = 0; i < 120; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 5), bucket.getKey()); + assertEquals(5, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(100).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(60, buckets.size()); + for (int i = 0; i < 60; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 10), bucket.getKey()); + assertEquals(10, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(50).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(30, buckets.size()); + for (int i = 0; i < 30; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 20), bucket.getKey()); + assertEquals(20, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(20).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(12, buckets.size()); + for (int i = 0; i < 12; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 50), bucket.getKey()); + assertEquals(50, bucket.getDocCount()); + } + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), dataset, aggregation -> aggregation.setNumBuckets(10).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(6, buckets.size()); + for (int i = 0; i < 6; i++) { + Histogram.Bucket bucket = buckets.get(i); + assertEquals(startDate.plusYears(i * 100), bucket.getKey()); + assertEquals(100, bucket.getDocCount()); + } + }); + } + + public void testInterval3Hour() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(8).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(10, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:35:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T10:15:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T13:06:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T14:04:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T14:05:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T15:59:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T16:06:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(8); + assertEquals("2017-02-01T16:48:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(9); + assertEquals("2017-02-01T16:59:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:00.000Z", + "2017-02-01T09:35:00.000Z", + "2017-02-01T10:15:00.000Z", + "2017-02-01T13:06:00.000Z", + "2017-02-01T14:04:00.000Z", + "2017-02-01T14:05:00.000Z", + "2017-02-01T15:59:00.000Z", + "2017-02-01T16:06:00.000Z", + "2017-02-01T16:48:00.000Z", + "2017-02-01T16:59:00.000Z" + ), + aggregation -> aggregation.setNumBuckets(6).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T12:00:00.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T15:00:00.000Z", bucket.getKeyAsString()); + assertEquals(4, bucket.getDocCount()); + } + ); + } + + public void testIntervalMinute() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:35.000Z", + "2017-02-01T09:02:59.000Z", + "2017-02-01T09:15:37.000Z", + "2017-02-01T09:16:04.000Z", + "2017-02-01T09:16:42.000Z" + ), + aggregation -> aggregation.setNumBuckets(4).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(5, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:35.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:02:59.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T09:15:37.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T09:16:04.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T09:16:42.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + } + ); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T09:02:35.000Z", + "2017-02-01T09:02:59.000Z", + "2017-02-01T09:15:37.000Z", + "2017-02-01T09:16:04.000Z", + "2017-02-01T09:16:42.000Z" + ), + aggregation -> aggregation.setNumBuckets(15).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(15, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T09:02:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T09:03:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T09:04:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T09:05:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T09:06:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T09:07:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T09:08:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(7); + assertEquals("2017-02-01T09:09:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(8); + assertEquals("2017-02-01T09:10:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(9); + assertEquals("2017-02-01T09:11:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(10); + assertEquals("2017-02-01T09:12:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(11); + assertEquals("2017-02-01T09:13:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(12); + assertEquals("2017-02-01T09:14:00.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(13); + assertEquals("2017-02-01T09:15:00.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(14); + assertEquals("2017-02-01T09:16:00.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + } + ); + } + + public void testIntervalSecond() throws IOException { + testSearchCase(new MatchAllDocsQuery(), + Arrays.asList("2017-02-01T00:00:05.015Z", "2017-02-01T00:00:07.299Z", "2017-02-01T00:00:07.074Z", + "2017-02-01T00:00:11.688Z", "2017-02-01T00:00:11.210Z", "2017-02-01T00:00:11.380Z"), + aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD), histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(3, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:07.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T00:00:11.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + }); + testSearchAndReduceCase(new MatchAllDocsQuery(), + Arrays.asList( + "2017-02-01T00:00:05.015Z", + "2017-02-01T00:00:07.299Z", + "2017-02-01T00:00:07.074Z", + "2017-02-01T00:00:11.688Z", + "2017-02-01T00:00:11.210Z", + "2017-02-01T00:00:11.380Z" + ), + aggregation -> aggregation.setNumBuckets(7).field(DATE_FIELD), + histogram -> { + List buckets = histogram.getBuckets(); + assertEquals(7, buckets.size()); + + Histogram.Bucket bucket = buckets.get(0); + assertEquals("2017-02-01T00:00:05.000Z", bucket.getKeyAsString()); + assertEquals(1, bucket.getDocCount()); + + bucket = buckets.get(1); + assertEquals("2017-02-01T00:00:06.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(2); + assertEquals("2017-02-01T00:00:07.000Z", bucket.getKeyAsString()); + assertEquals(2, bucket.getDocCount()); + + bucket = buckets.get(3); + assertEquals("2017-02-01T00:00:08.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(4); + assertEquals("2017-02-01T00:00:09.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(5); + assertEquals("2017-02-01T00:00:10.000Z", bucket.getKeyAsString()); + assertEquals(0, bucket.getDocCount()); + + bucket = buckets.get(6); + assertEquals("2017-02-01T00:00:11.000Z", bucket.getKeyAsString()); + assertEquals(3, bucket.getDocCount()); + } + ); + } + + private void testSearchCase(Query query, List dataset, + Consumer configure, + Consumer verify) throws IOException { + executeTestCase(false, query, dataset, configure, verify); + } + + private void testSearchAndReduceCase(Query query, List dataset, + Consumer configure, + Consumer verify) throws IOException { + executeTestCase(true, query, dataset, configure, verify); + } + + private void testBothCases(Query query, List dataset, + Consumer configure, + Consumer verify) throws IOException { + testSearchCase(query, dataset, configure, verify); + testSearchAndReduceCase(query, dataset, configure, verify); + } + + @Override + protected IndexSettings createIndexSettings() { + Settings nodeSettings = Settings.builder() + .put("search.max_buckets", 100000).build(); + return new IndexSettings( + IndexMetaData.builder("_index").settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)) + .numberOfShards(1) + .numberOfReplicas(0) + .creationDate(System.currentTimeMillis()) + .build(), + nodeSettings + ); + } + + private void executeTestCase(boolean reduced, Query query, List dataset, + Consumer configure, + Consumer verify) throws IOException { + + try (Directory directory = newDirectory()) { + try (RandomIndexWriter indexWriter = new RandomIndexWriter(random(), directory)) { + Document document = new Document(); + for (String date : dataset) { + if (frequently()) { + indexWriter.commit(); + } + + long instant = asLong(date); + document.add(new SortedNumericDocValuesField(DATE_FIELD, instant)); + document.add(new LongPoint(INSTANT_FIELD, instant)); + indexWriter.addDocument(document); + document.clear(); + } + } + + try (IndexReader indexReader = DirectoryReader.open(directory)) { + IndexSearcher indexSearcher = newSearcher(indexReader, true, true); + + AutoDateHistogramAggregationBuilder aggregationBuilder = new AutoDateHistogramAggregationBuilder("_name"); + if (configure != null) { + configure.accept(aggregationBuilder); + } + + DateFieldMapper.Builder builder = new DateFieldMapper.Builder("_name"); + DateFieldMapper.DateFieldType fieldType = builder.fieldType(); + fieldType.setHasDocValues(true); + fieldType.setName(aggregationBuilder.field()); + + InternalAutoDateHistogram histogram; + if (reduced) { + histogram = searchAndReduce(indexSearcher, query, aggregationBuilder, fieldType); + } else { + histogram = search(indexSearcher, query, aggregationBuilder, fieldType); + } + verify.accept(histogram); + } + } + } + + private static long asLong(String dateTime) { + return DateFieldMapper.DEFAULT_DATE_TIME_FORMATTER.parser().parseDateTime(dateTime).getMillis(); + } +} diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java new file mode 100644 index 0000000000000..389371efd79aa --- /dev/null +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/histogram/InternalAutoDateHistogramTests.java @@ -0,0 +1,154 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.search.aggregations.bucket.histogram; + +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.rounding.DateTimeUnit; +import org.elasticsearch.common.rounding.Rounding; +import org.elasticsearch.search.DocValueFormat; +import org.elasticsearch.search.aggregations.InternalAggregations; +import org.elasticsearch.search.aggregations.ParsedMultiBucketAggregation; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder.RoundingInfo; +import org.elasticsearch.search.aggregations.bucket.histogram.InternalAutoDateHistogram.BucketInfo; +import org.elasticsearch.search.aggregations.pipeline.PipelineAggregator; +import org.elasticsearch.test.InternalMultiBucketAggregationTestCase; +import org.joda.time.DateTime; + +import java.util.ArrayList; +import java.util.Collections; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; + +import static org.elasticsearch.common.unit.TimeValue.timeValueHours; +import static org.elasticsearch.common.unit.TimeValue.timeValueMinutes; +import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds; + +public class InternalAutoDateHistogramTests extends InternalMultiBucketAggregationTestCase { + + private DocValueFormat format; + private RoundingInfo[] roundingInfos; + + @Override + public void setUp() throws Exception { + super.setUp(); + format = randomNumericDocValueFormat(); + + roundingInfos = new RoundingInfo[6]; + roundingInfos[0] = new RoundingInfo(Rounding.builder(DateTimeUnit.SECOND_OF_MINUTE).build(), 1, 5, 10, 30); + roundingInfos[1] = new RoundingInfo(Rounding.builder(DateTimeUnit.MINUTES_OF_HOUR).build(), 1, 5, 10, 30); + roundingInfos[2] = new RoundingInfo(Rounding.builder(DateTimeUnit.HOUR_OF_DAY).build(), 1, 3, 12); + roundingInfos[3] = new RoundingInfo(Rounding.builder(DateTimeUnit.DAY_OF_MONTH).build(), 1, 7); + roundingInfos[4] = new RoundingInfo(Rounding.builder(DateTimeUnit.MONTH_OF_YEAR).build(), 1, 3); + roundingInfos[5] = new RoundingInfo(Rounding.builder(DateTimeUnit.YEAR_OF_CENTURY).build(), 1, 10, 20, 50, 100); + } + + @Override + protected InternalAutoDateHistogram createTestInstance(String name, + List pipelineAggregators, + Map metaData, + InternalAggregations aggregations) { + int nbBuckets = randomNumberOfBuckets(); + int targetBuckets = randomIntBetween(1, nbBuckets * 2 + 1); + List buckets = new ArrayList<>(nbBuckets); + long startingDate = System.currentTimeMillis(); + + long interval = randomIntBetween(1, 3); + long intervalMillis = randomFrom(timeValueSeconds(interval), timeValueMinutes(interval), timeValueHours(interval)).getMillis(); + + for (int i = 0; i < nbBuckets; i++) { + long key = startingDate + (intervalMillis * i); + buckets.add(i, new InternalAutoDateHistogram.Bucket(key, randomIntBetween(1, 100), format, aggregations)); + } + InternalAggregations subAggregations = new InternalAggregations(Collections.emptyList()); + BucketInfo bucketInfo = new BucketInfo(roundingInfos, randomIntBetween(0, roundingInfos.length - 1), subAggregations); + + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); + } + + @Override + protected void assertReduced(InternalAutoDateHistogram reduced, List inputs) { + int roundingIdx = 0; + for (InternalAutoDateHistogram histogram : inputs) { + if (histogram.getBucketInfo().roundingIdx > roundingIdx) { + roundingIdx = histogram.getBucketInfo().roundingIdx; + } + } + Map expectedCounts = new TreeMap<>(); + for (Histogram histogram : inputs) { + for (Histogram.Bucket bucket : histogram.getBuckets()) { + expectedCounts.compute(roundingInfos[roundingIdx].rounding.round(((DateTime) bucket.getKey()).getMillis()), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + } + } + Map actualCounts = new TreeMap<>(); + for (Histogram.Bucket bucket : reduced.getBuckets()) { + actualCounts.compute(((DateTime) bucket.getKey()).getMillis(), + (key, oldValue) -> (oldValue == null ? 0 : oldValue) + bucket.getDocCount()); + } + assertEquals(expectedCounts, actualCounts); + } + + @Override + protected Writeable.Reader instanceReader() { + return InternalAutoDateHistogram::new; + } + + @Override + protected Class implementationClass() { + return ParsedAutoDateHistogram.class; + } + + @Override + protected InternalAutoDateHistogram mutateInstance(InternalAutoDateHistogram instance) { + String name = instance.getName(); + List buckets = instance.getBuckets(); + int targetBuckets = instance.getTargetBuckets(); + BucketInfo bucketInfo = instance.getBucketInfo(); + List pipelineAggregators = instance.pipelineAggregators(); + Map metaData = instance.getMetaData(); + switch (between(0, 3)) { + case 0: + name += randomAlphaOfLength(5); + break; + case 1: + buckets = new ArrayList<>(buckets); + buckets.add(new InternalAutoDateHistogram.Bucket(randomNonNegativeLong(), randomIntBetween(1, 100), format, + InternalAggregations.EMPTY)); + break; + case 2: + int roundingIdx = bucketInfo.roundingIdx == bucketInfo.roundingInfos.length - 1 ? 0 : bucketInfo.roundingIdx + 1; + bucketInfo = new BucketInfo(bucketInfo.roundingInfos, roundingIdx, bucketInfo.emptySubAggregations); + break; + case 3: + if (metaData == null) { + metaData = new HashMap<>(1); + } else { + metaData = new HashMap<>(instance.getMetaData()); + } + metaData.put(randomAlphaOfLength(15), randomInt()); + break; + default: + throw new AssertionError("Illegal randomisation branch"); + } + return new InternalAutoDateHistogram(name, buckets, targetBuckets, bucketInfo, format, pipelineAggregators, metaData); + } +} diff --git a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java index 07f25986655d7..e84f2a99a115d 100644 --- a/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/search/aggregations/AggregatorTestCase.java @@ -87,7 +87,6 @@ import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import static org.elasticsearch.test.InternalAggregationTestCase.DEFAULT_MAX_BUCKETS; /** * Base class for testing {@link Aggregator} implementations. @@ -229,7 +228,7 @@ public boolean shouldCache(Query query) throws IOException { }); when(searchContext.bitsetFilterCache()).thenReturn(new BitsetFilterCache(indexSettings, mock(Listener.class))); doAnswer(invocation -> { - /* Store the releasables so we can release them at the end of the test case. This is important because aggregations don't + /* Store the release-ables so we can release them at the end of the test case. This is important because aggregations don't * close their sub-aggregations. This is fairly similar to what the production code does. */ releasables.add((Releasable) invocation.getArguments()[0]); return null; diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java index 838b0e315ea0e..15e44853a97ba 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalAggregationTestCase.java @@ -53,8 +53,10 @@ import org.elasticsearch.search.aggregations.bucket.geogrid.ParsedGeoHashGrid; import org.elasticsearch.search.aggregations.bucket.global.GlobalAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.global.ParsedGlobal; +import org.elasticsearch.search.aggregations.bucket.histogram.AutoDateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramAggregationBuilder; import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder; +import org.elasticsearch.search.aggregations.bucket.histogram.ParsedAutoDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.ParsedDateHistogram; import org.elasticsearch.search.aggregations.bucket.histogram.ParsedHistogram; import org.elasticsearch.search.aggregations.bucket.missing.MissingAggregationBuilder; @@ -181,6 +183,7 @@ public abstract class InternalAggregationTestCase map.put(GeoCentroidAggregationBuilder.NAME, (p, c) -> ParsedGeoCentroid.fromXContent(p, (String) c)); map.put(HistogramAggregationBuilder.NAME, (p, c) -> ParsedHistogram.fromXContent(p, (String) c)); map.put(DateHistogramAggregationBuilder.NAME, (p, c) -> ParsedDateHistogram.fromXContent(p, (String) c)); + map.put(AutoDateHistogramAggregationBuilder.NAME, (p, c) -> ParsedAutoDateHistogram.fromXContent(p, (String) c)); map.put(StringTerms.NAME, (p, c) -> ParsedStringTerms.fromXContent(p, (String) c)); map.put(LongTerms.NAME, (p, c) -> ParsedLongTerms.fromXContent(p, (String) c)); map.put(DoubleTerms.NAME, (p, c) -> ParsedDoubleTerms.fromXContent(p, (String) c)); diff --git a/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java index 952b6c027945b..6f0aebe23966b 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/InternalMultiBucketAggregationTestCase.java @@ -149,7 +149,8 @@ private void assertMultiBucketsAggregations(Aggregation expected, Aggregation ac protected void assertMultiBucketsAggregation(MultiBucketsAggregation expected, MultiBucketsAggregation actual, boolean checkOrder) { Class parsedClass = implementationClass(); assertNotNull("Parsed aggregation class must not be null", parsedClass); - assertTrue(parsedClass.isInstance(actual)); + assertTrue("Unexpected parsed class, expected instance of: " + actual + ", but was: " + parsedClass, + parsedClass.isInstance(actual)); assertTrue(expected instanceof InternalAggregation); assertEquals(expected.getName(), actual.getName()); From 0e7a6b4e2975983b7d2ad6a1e0df6399d542b3b2 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Fri, 13 Jul 2018 20:05:49 +0200 Subject: [PATCH 027/107] lazy snapshot repository initialization (#31606) lazy snapshot repository initialization --- .../repositories/url/URLRepository.java | 31 ++-- .../repositories/url/URLRepositoryTests.java | 51 ++++++- .../repositories/azure/AzureRepository.java | 42 ++++-- .../azure/AzureRepositorySettingsTests.java | 29 ++-- .../gcs/GoogleCloudStorageRepository.java | 24 ++-- ...eCloudStorageBlobStoreRepositoryTests.java | 10 +- .../repositories/hdfs/HdfsRepository.java | 26 ++-- .../repositories/s3/S3Repository.java | 65 ++++++--- .../repositories/s3/S3RepositoryPlugin.java | 4 +- .../s3/RepositoryCredentialsTests.java | 26 +++- .../s3/S3BlobStoreRepositoryTests.java | 15 +- .../repositories/s3/S3RepositoryTests.java | 38 +++-- .../repositories/RepositoriesService.java | 73 ++++++---- .../VerifyNodeRepositoryAction.java | 2 +- .../blobstore/BlobStoreRepository.java | 136 ++++++++++++++---- .../repositories/fs/FsRepository.java | 32 +++-- ...ClusterStateServiceRandomUpdatesTests.java | 2 +- .../BlobStoreRepositoryRestoreTests.java | 11 +- .../blobstore/BlobStoreRepositoryTests.java | 35 ++++- .../fs}/FsBlobStoreRepositoryIT.java | 11 +- .../SharedClusterSnapshotRestoreIT.java | 23 ++- .../snapshots/mockstore/MockRepository.java | 13 +- .../ESBlobStoreRepositoryIntegTestCase.java | 56 ++++++-- 23 files changed, 539 insertions(+), 216 deletions(-) rename server/src/test/java/org/elasticsearch/{snapshots => repositories/fs}/FsBlobStoreRepositoryIT.java (79%) diff --git a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java index c1128fd683a70..98b8c0a1945a5 100644 --- a/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java +++ b/modules/repository-url/src/main/java/org/elasticsearch/repositories/url/URLRepository.java @@ -20,6 +20,7 @@ package org.elasticsearch.repositories.url; import org.elasticsearch.cluster.metadata.RepositoryMetaData; +import org.elasticsearch.common.blobstore.BlobContainer; import org.elasticsearch.common.blobstore.BlobPath; import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.blobstore.url.URLBlobStore; @@ -31,7 +32,6 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import java.io.IOException; import java.net.MalformedURLException; import java.net.URISyntaxException; import java.net.URL; @@ -71,33 +71,44 @@ public class URLRepository extends BlobStoreRepository { private final Environment environment; - private final URLBlobStore blobStore; - private final BlobPath basePath; + private final URL url; + /** * Constructs a read-only URL-based repository */ public URLRepository(RepositoryMetaData metadata, Environment environment, - NamedXContentRegistry namedXContentRegistry) throws IOException { + NamedXContentRegistry namedXContentRegistry) { super(metadata, environment.settings(), namedXContentRegistry); if (URL_SETTING.exists(metadata.settings()) == false && REPOSITORIES_URL_SETTING.exists(settings) == false) { throw new RepositoryException(metadata.name(), "missing url"); } + this.environment = environment; supportedProtocols = SUPPORTED_PROTOCOLS_SETTING.get(settings); urlWhiteList = ALLOWED_URLS_SETTING.get(settings).toArray(new URIPattern[]{}); - this.environment = environment; + basePath = BlobPath.cleanPath(); + url = URL_SETTING.exists(metadata.settings()) + ? URL_SETTING.get(metadata.settings()) : REPOSITORIES_URL_SETTING.get(settings); + } - URL url = URL_SETTING.exists(metadata.settings()) ? URL_SETTING.get(metadata.settings()) : REPOSITORIES_URL_SETTING.get(settings); + @Override + protected BlobStore createBlobStore() { URL normalizedURL = checkURL(url); - blobStore = new URLBlobStore(settings, normalizedURL); - basePath = BlobPath.cleanPath(); + return new URLBlobStore(settings, normalizedURL); + } + + // only use for testing + @Override + protected BlobContainer blobContainer() { + return super.blobContainer(); } + // only use for testing @Override - protected BlobStore blobStore() { - return blobStore; + protected BlobStore getBlobStore() { + return super.getBlobStore(); } @Override diff --git a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java index 1af4c1eaba9ad..2de4c132673db 100644 --- a/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java +++ b/modules/repository-url/src/test/java/org/elasticsearch/repositories/url/URLRepositoryTests.java @@ -31,8 +31,22 @@ import java.nio.file.Path; import java.util.Collections; +import static org.hamcrest.CoreMatchers.is; +import static org.hamcrest.CoreMatchers.not; +import static org.hamcrest.CoreMatchers.nullValue; + public class URLRepositoryTests extends ESTestCase { + private URLRepository createRepository(Settings baseSettings, RepositoryMetaData repositoryMetaData) { + return new URLRepository(repositoryMetaData, TestEnvironment.newEnvironment(baseSettings), + new NamedXContentRegistry(Collections.emptyList())) { + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo manually on test/main threads + } + }; + } + public void testWhiteListingRepoURL() throws IOException { String repoPath = createTempDir().resolve("repository").toUri().toURL().toString(); Settings baseSettings = Settings.builder() @@ -41,8 +55,12 @@ public void testWhiteListingRepoURL() throws IOException { .put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), repoPath) .build(); RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings); - new URLRepository(repositoryMetaData, TestEnvironment.newEnvironment(baseSettings), - new NamedXContentRegistry(Collections.emptyList())); + final URLRepository repository = createRepository(baseSettings, repositoryMetaData); + repository.start(); + + assertThat("blob store has to be lazy initialized", repository.getBlobStore(), is(nullValue())); + repository.blobContainer(); + assertThat("blobContainer has to initialize blob store", repository.getBlobStore(), not(nullValue())); } public void testIfNotWhiteListedMustSetRepoURL() throws IOException { @@ -52,9 +70,10 @@ public void testIfNotWhiteListedMustSetRepoURL() throws IOException { .put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), repoPath) .build(); RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings); + final URLRepository repository = createRepository(baseSettings, repositoryMetaData); + repository.start(); try { - new URLRepository(repositoryMetaData, TestEnvironment.newEnvironment(baseSettings), - new NamedXContentRegistry(Collections.emptyList())); + repository.blobContainer(); fail("RepositoryException should have been thrown."); } catch (RepositoryException e) { String msg = "[url] file url [" + repoPath @@ -73,13 +92,33 @@ public void testMustBeSupportedProtocol() throws IOException { .put(URLRepository.SUPPORTED_PROTOCOLS_SETTING.getKey(), "http,https") .build(); RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings); + final URLRepository repository = createRepository(baseSettings, repositoryMetaData); + repository.start(); try { - new URLRepository(repositoryMetaData, TestEnvironment.newEnvironment(baseSettings), - new NamedXContentRegistry(Collections.emptyList())); + repository.blobContainer(); fail("RepositoryException should have been thrown."); } catch (RepositoryException e) { assertEquals("[url] unsupported url protocol [file] from URL [" + repoPath +"]", e.getMessage()); } } + public void testNonNormalizedUrl() throws IOException { + Settings baseSettings = Settings.builder() + .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(URLRepository.ALLOWED_URLS_SETTING.getKey(), "file:/tmp/") + .put(URLRepository.REPOSITORIES_URL_SETTING.getKey(), "file:/var/" ) + .build(); + RepositoryMetaData repositoryMetaData = new RepositoryMetaData("url", URLRepository.TYPE, baseSettings); + final URLRepository repository = createRepository(baseSettings, repositoryMetaData); + repository.start(); + try { + repository.blobContainer(); + fail("RepositoryException should have been thrown."); + } catch (RepositoryException e) { + assertEquals("[url] file url [file:/var/] doesn't match any of the locations " + + "specified by path.repo or repositories.url.allowed_urls", + e.getMessage()); + } + } + } diff --git a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java index 47b398a4c2fd3..0797c78af33bb 100644 --- a/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java +++ b/plugins/repository-azure/src/main/java/org/elasticsearch/repositories/azure/AzureRepository.java @@ -38,7 +38,6 @@ import org.elasticsearch.snapshots.SnapshotCreationException; import org.elasticsearch.snapshots.SnapshotId; -import java.io.IOException; import java.net.URISyntaxException; import java.util.List; import java.util.Locale; @@ -78,25 +77,21 @@ public static final class Repository { public static final Setting READONLY_SETTING = Setting.boolSetting("readonly", false, Property.NodeScope); } - private final AzureBlobStore blobStore; private final BlobPath basePath; private final ByteSizeValue chunkSize; private final boolean compress; + private final Environment environment; + private final AzureStorageService storageService; private final boolean readonly; public AzureRepository(RepositoryMetaData metadata, Environment environment, NamedXContentRegistry namedXContentRegistry, - AzureStorageService storageService) throws IOException, URISyntaxException, StorageException { + AzureStorageService storageService) { super(metadata, environment.settings(), namedXContentRegistry); - this.blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); this.chunkSize = Repository.CHUNK_SIZE_SETTING.get(metadata.settings()); this.compress = Repository.COMPRESS_SETTING.get(metadata.settings()); - // If the user explicitly did not define a readonly value, we set it by ourselves depending on the location mode setting. - // For secondary_only setting, the repository should be read only - if (Repository.READONLY_SETTING.exists(metadata.settings())) { - this.readonly = Repository.READONLY_SETTING.get(metadata.settings()); - } else { - this.readonly = this.blobStore.getLocationMode() == LocationMode.SECONDARY_ONLY; - } + this.environment = environment; + this.storageService = storageService; + final String basePath = Strings.trimLeadingCharacter(Repository.BASE_PATH_SETTING.get(metadata.settings()), '/'); if (Strings.hasLength(basePath)) { // Remove starting / if any @@ -108,15 +103,33 @@ public AzureRepository(RepositoryMetaData metadata, Environment environment, Nam } else { this.basePath = BlobPath.cleanPath(); } - logger.debug((org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( - "using container [{}], chunk_size [{}], compress [{}], base_path [{}]", blobStore, chunkSize, compress, basePath)); + + // If the user explicitly did not define a readonly value, we set it by ourselves depending on the location mode setting. + // For secondary_only setting, the repository should be read only + final LocationMode locationMode = Repository.LOCATION_MODE_SETTING.get(metadata.settings()); + if (Repository.READONLY_SETTING.exists(metadata.settings())) { + this.readonly = Repository.READONLY_SETTING.get(metadata.settings()); + } else { + this.readonly = locationMode == LocationMode.SECONDARY_ONLY; + } + } + + // only use for testing + @Override + protected BlobStore getBlobStore() { + return super.getBlobStore(); } /** * {@inheritDoc} */ @Override - protected BlobStore blobStore() { + protected AzureBlobStore createBlobStore() throws URISyntaxException, StorageException { + final AzureBlobStore blobStore = new AzureBlobStore(metadata, environment.settings(), storageService); + + logger.debug((org.apache.logging.log4j.util.Supplier) () -> new ParameterizedMessage( + "using container [{}], chunk_size [{}], compress [{}], base_path [{}]", + blobStore, chunkSize, compress, basePath)); return blobStore; } @@ -144,6 +157,7 @@ protected ByteSizeValue chunkSize() { @Override public void initializeSnapshot(SnapshotId snapshotId, List indices, MetaData clusterMetadata) { try { + final AzureBlobStore blobStore = (AzureBlobStore) blobStore(); if (blobStore.containerExist() == false) { throw new IllegalArgumentException("The bucket [" + blobStore + "] does not exist. Please create it before " + " creating an azure snapshot repository backed by it."); diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java index 639905042cf87..b4b71577cbcdc 100644 --- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java +++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.repositories.azure; import com.microsoft.azure.storage.LocationMode; -import com.microsoft.azure.storage.StorageException; import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -30,76 +29,76 @@ import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; -import java.net.URISyntaxException; - import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.nullValue; import static org.mockito.Mockito.mock; public class AzureRepositorySettingsTests extends ESTestCase { - private AzureRepository azureRepository(Settings settings) throws StorageException, IOException, URISyntaxException { + private AzureRepository azureRepository(Settings settings) { Settings internalSettings = Settings.builder() .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath()) .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()) .put(settings) .build(); - return new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), + final AzureRepository azureRepository = new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), TestEnvironment.newEnvironment(internalSettings), NamedXContentRegistry.EMPTY, mock(AzureStorageService.class)); + assertThat(azureRepository.getBlobStore(), is(nullValue())); + return azureRepository; } - public void testReadonlyDefault() throws StorageException, IOException, URISyntaxException { + public void testReadonlyDefault() { assertThat(azureRepository(Settings.EMPTY).isReadOnly(), is(false)); } - public void testReadonlyDefaultAndReadonlyOn() throws StorageException, IOException, URISyntaxException { + public void testReadonlyDefaultAndReadonlyOn() { assertThat(azureRepository(Settings.builder() .put("readonly", true) .build()).isReadOnly(), is(true)); } - public void testReadonlyWithPrimaryOnly() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithPrimaryOnly() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.PRIMARY_ONLY.name()) .build()).isReadOnly(), is(false)); } - public void testReadonlyWithPrimaryOnlyAndReadonlyOn() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithPrimaryOnlyAndReadonlyOn() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.PRIMARY_ONLY.name()) .put("readonly", true) .build()).isReadOnly(), is(true)); } - public void testReadonlyWithSecondaryOnlyAndReadonlyOn() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithSecondaryOnlyAndReadonlyOn() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.SECONDARY_ONLY.name()) .put("readonly", true) .build()).isReadOnly(), is(true)); } - public void testReadonlyWithSecondaryOnlyAndReadonlyOff() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithSecondaryOnlyAndReadonlyOff() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.SECONDARY_ONLY.name()) .put("readonly", false) .build()).isReadOnly(), is(false)); } - public void testReadonlyWithPrimaryAndSecondaryOnlyAndReadonlyOn() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithPrimaryAndSecondaryOnlyAndReadonlyOn() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.PRIMARY_THEN_SECONDARY.name()) .put("readonly", true) .build()).isReadOnly(), is(true)); } - public void testReadonlyWithPrimaryAndSecondaryOnlyAndReadonlyOff() throws StorageException, IOException, URISyntaxException { + public void testReadonlyWithPrimaryAndSecondaryOnlyAndReadonlyOff() { assertThat(azureRepository(Settings.builder() .put(AzureRepository.Repository.LOCATION_MODE_SETTING.getKey(), LocationMode.PRIMARY_THEN_SECONDARY.name()) .put("readonly", false) .build()).isReadOnly(), is(false)); } - public void testChunkSize() throws StorageException, IOException, URISyntaxException { + public void testChunkSize() { // default chunk size AzureRepository azureRepository = azureRepository(Settings.EMPTY); assertEquals(AzureStorageService.MAX_CHUNK_SIZE, azureRepository.chunkSize()); diff --git a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java index 83d48eeda20aa..fe6c8889bd238 100644 --- a/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java +++ b/plugins/repository-gcs/src/main/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageRepository.java @@ -22,7 +22,6 @@ import org.elasticsearch.cluster.metadata.RepositoryMetaData; import org.elasticsearch.common.Strings; import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.settings.Setting; import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; @@ -56,18 +55,19 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { byteSizeSetting("chunk_size", MAX_CHUNK_SIZE, MIN_CHUNK_SIZE, MAX_CHUNK_SIZE, Property.NodeScope, Property.Dynamic); static final Setting CLIENT_NAME = new Setting<>("client", "default", Function.identity()); - private final ByteSizeValue chunkSize; - private final boolean compress; + private final GoogleCloudStorageService storageService; private final BlobPath basePath; - private final GoogleCloudStorageBlobStore blobStore; + private final boolean compress; + private final ByteSizeValue chunkSize; + private final String bucket; + private final String clientName; GoogleCloudStorageRepository(RepositoryMetaData metadata, Environment environment, NamedXContentRegistry namedXContentRegistry, - GoogleCloudStorageService storageService) throws Exception { + GoogleCloudStorageService storageService) { super(metadata, environment.settings(), namedXContentRegistry); + this.storageService = storageService; - String bucket = getSetting(BUCKET, metadata); - String clientName = CLIENT_NAME.get(metadata.settings()); String basePath = BASE_PATH.get(metadata.settings()); if (Strings.hasLength(basePath)) { BlobPath path = new BlobPath(); @@ -81,16 +81,14 @@ class GoogleCloudStorageRepository extends BlobStoreRepository { this.compress = getSetting(COMPRESS, metadata); this.chunkSize = getSetting(CHUNK_SIZE, metadata); - + this.bucket = getSetting(BUCKET, metadata); + this.clientName = CLIENT_NAME.get(metadata.settings()); logger.debug("using bucket [{}], base_path [{}], chunk_size [{}], compress [{}]", bucket, basePath, chunkSize, compress); - - this.blobStore = new GoogleCloudStorageBlobStore(settings, bucket, clientName, storageService); } - @Override - protected BlobStore blobStore() { - return blobStore; + protected GoogleCloudStorageBlobStore createBlobStore() { + return new GoogleCloudStorageBlobStore(settings, bucket, clientName, storageService); } @Override diff --git a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java index 3692b26f2bbb7..6d5c1bbf85310 100644 --- a/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java +++ b/plugins/repository-gcs/src/test/java/org/elasticsearch/repositories/gcs/GoogleCloudStorageBlobStoreRepositoryTests.java @@ -25,6 +25,7 @@ import org.elasticsearch.common.unit.ByteSizeUnit; import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.repositories.Repository; import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase; import org.junit.AfterClass; @@ -34,6 +35,7 @@ import java.util.concurrent.ConcurrentMap; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; +import static org.hamcrest.Matchers.instanceOf; public class GoogleCloudStorageBlobStoreRepositoryTests extends ESBlobStoreRepositoryIntegTestCase { @@ -49,9 +51,10 @@ protected Collection> nodePlugins() { } @Override - protected void createTestRepository(String name) { + protected void createTestRepository(String name, boolean verify) { assertAcked(client().admin().cluster().preparePutRepository(name) .setType(GoogleCloudStorageRepository.TYPE) + .setVerify(verify) .setSettings(Settings.builder() .put("bucket", BUCKET) .put("base_path", GoogleCloudStorageBlobStoreRepositoryTests.class.getSimpleName()) @@ -59,6 +62,11 @@ protected void createTestRepository(String name) { .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES))); } + @Override + protected void afterCreationCheck(Repository repository) { + assertThat(repository, instanceOf(GoogleCloudStorageRepository.class)); + } + @AfterClass public static void wipeRepository() { blobs.clear(); diff --git a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java index 5ef1c7d18d666..97285f9cecb0d 100644 --- a/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java +++ b/plugins/repository-hdfs/src/main/java/org/elasticsearch/repositories/hdfs/HdfsRepository.java @@ -42,7 +42,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.SuppressForbidden; import org.elasticsearch.common.blobstore.BlobPath; -import org.elasticsearch.common.blobstore.BlobStore; import org.elasticsearch.common.logging.Loggers; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.unit.ByteSizeUnit; @@ -61,29 +60,26 @@ public final class HdfsRepository extends BlobStoreRepository { private final ByteSizeValue chunkSize; private final boolean compress; private final BlobPath basePath = BlobPath.cleanPath(); - - private HdfsBlobStore blobStore; + private final URI uri; + private final String pathSetting; // buffer size passed to HDFS read/write methods // TODO: why 100KB? private static final ByteSizeValue DEFAULT_BUFFER_SIZE = new ByteSizeValue(100, ByteSizeUnit.KB); public HdfsRepository(RepositoryMetaData metadata, Environment environment, - NamedXContentRegistry namedXContentRegistry) throws IOException { + NamedXContentRegistry namedXContentRegistry) { super(metadata, environment.settings(), namedXContentRegistry); this.environment = environment; this.chunkSize = metadata.settings().getAsBytesSize("chunk_size", null); this.compress = metadata.settings().getAsBoolean("compress", false); - } - @Override - protected void doStart() { String uriSetting = getMetadata().settings().get("uri"); if (Strings.hasText(uriSetting) == false) { throw new IllegalArgumentException("No 'uri' defined for hdfs snapshot/restore"); } - URI uri = URI.create(uriSetting); + uri = URI.create(uriSetting); if ("hdfs".equalsIgnoreCase(uri.getScheme()) == false) { throw new IllegalArgumentException(String.format(Locale.ROOT, "Invalid scheme [%s] specified in uri [%s]; only 'hdfs' uri allowed for hdfs snapshot/restore", uri.getScheme(), uriSetting)); @@ -93,16 +89,11 @@ protected void doStart() { "Use 'path' option to specify a path [%s], not the uri [%s] for hdfs snapshot/restore", uri.getPath(), uriSetting)); } - String pathSetting = getMetadata().settings().get("path"); + pathSetting = getMetadata().settings().get("path"); // get configuration if (pathSetting == null) { throw new IllegalArgumentException("No 'path' defined for hdfs snapshot/restore"); } - - // initialize our blobstore using elevated privileges. - SpecialPermission.check(); - blobStore = AccessController.doPrivileged((PrivilegedAction) () -> createBlobstore(uri, pathSetting, getMetadata().settings())); - super.doStart(); } private HdfsBlobStore createBlobstore(URI uri, String path, Settings repositorySettings) { @@ -229,7 +220,12 @@ private static String getHostName() { } @Override - protected BlobStore blobStore() { + protected HdfsBlobStore createBlobStore() { + // initialize our blobstore using elevated privileges. + SpecialPermission.check(); + final HdfsBlobStore blobStore = + AccessController.doPrivileged((PrivilegedAction) + () -> createBlobstore(uri, pathSetting, getMetadata().settings())); return blobStore; } diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java index f6f949aa4d012..ec60536f135b2 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3Repository.java @@ -35,7 +35,6 @@ import org.elasticsearch.repositories.RepositoryException; import org.elasticsearch.repositories.blobstore.BlobStoreRepository; -import java.io.IOException; import java.util.Map; import java.util.function.Function; @@ -144,30 +143,43 @@ class S3Repository extends BlobStoreRepository { */ static final Setting BASE_PATH_SETTING = Setting.simpleString("base_path"); - private final S3BlobStore blobStore; + private final S3Service service; - private final BlobPath basePath; + private final String bucket; + + private final ByteSizeValue bufferSize; private final ByteSizeValue chunkSize; private final boolean compress; + private final BlobPath basePath; + + private final boolean serverSideEncryption; + + private final String storageClass; + + private final String cannedACL; + + private final String clientName; + /** * Constructs an s3 backed repository */ S3Repository(final RepositoryMetaData metadata, final Settings settings, final NamedXContentRegistry namedXContentRegistry, - final S3Service service) throws IOException { + final S3Service service) { super(metadata, settings, namedXContentRegistry); + this.service = service; - final String bucket = BUCKET_SETTING.get(metadata.settings()); + // Parse and validate the user's S3 Storage Class setting + this.bucket = BUCKET_SETTING.get(metadata.settings()); if (bucket == null) { throw new RepositoryException(metadata.name(), "No bucket defined for s3 repository"); } - final boolean serverSideEncryption = SERVER_SIDE_ENCRYPTION_SETTING.get(metadata.settings()); - final ByteSizeValue bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings()); + this.bufferSize = BUFFER_SIZE_SETTING.get(metadata.settings()); this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings()); this.compress = COMPRESS_SETTING.get(metadata.settings()); @@ -177,33 +189,44 @@ class S3Repository extends BlobStoreRepository { ") can't be lower than " + BUFFER_SIZE_SETTING.getKey() + " (" + bufferSize + ")."); } - // Parse and validate the user's S3 Storage Class setting - final String storageClass = STORAGE_CLASS_SETTING.get(metadata.settings()); - final String cannedACL = CANNED_ACL_SETTING.get(metadata.settings()); - final String clientName = CLIENT_NAME.get(metadata.settings()); + final String basePath = BASE_PATH_SETTING.get(metadata.settings()); + if (Strings.hasLength(basePath)) { + this.basePath = new BlobPath().add(basePath); + } else { + this.basePath = BlobPath.cleanPath(); + } + + this.serverSideEncryption = SERVER_SIDE_ENCRYPTION_SETTING.get(metadata.settings()); + + this.storageClass = STORAGE_CLASS_SETTING.get(metadata.settings()); + this.cannedACL = CANNED_ACL_SETTING.get(metadata.settings()); + this.clientName = CLIENT_NAME.get(metadata.settings()); logger.debug("using bucket [{}], chunk_size [{}], server_side_encryption [{}], " + - "buffer_size [{}], cannedACL [{}], storageClass [{}]", + "buffer_size [{}], cannedACL [{}], storageClass [{}]", bucket, chunkSize, serverSideEncryption, bufferSize, cannedACL, storageClass); - // deprecated behavior: override client credentials from the cluster state // (repository settings) if (S3ClientSettings.checkDeprecatedCredentials(metadata.settings())) { overrideCredentialsFromClusterState(service); } - blobStore = new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); + } - final String basePath = BASE_PATH_SETTING.get(metadata.settings()); - if (Strings.hasLength(basePath)) { - this.basePath = new BlobPath().add(basePath); - } else { - this.basePath = BlobPath.cleanPath(); - } + @Override + protected S3BlobStore createBlobStore() { + return new S3BlobStore(settings, service, clientName, bucket, serverSideEncryption, bufferSize, cannedACL, storageClass); } + // only use for testing @Override protected BlobStore blobStore() { - return blobStore; + return super.blobStore(); + } + + // only use for testing + @Override + protected BlobStore getBlobStore() { + return super.getBlobStore(); } @Override diff --git a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java index 79a5187059f38..da3219f2aef08 100644 --- a/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java +++ b/plugins/repository-s3/src/main/java/org/elasticsearch/repositories/s3/S3RepositoryPlugin.java @@ -61,7 +61,7 @@ public class S3RepositoryPlugin extends Plugin implements RepositoryPlugin, Relo }); } - private final S3Service service; + protected final S3Service service; public S3RepositoryPlugin(final Settings settings) { this(settings, new S3Service(settings)); @@ -77,7 +77,7 @@ public S3RepositoryPlugin(final Settings settings) { // proxy method for testing protected S3Repository createRepository(final RepositoryMetaData metadata, final Settings settings, - final NamedXContentRegistry registry) throws IOException { + final NamedXContentRegistry registry) { return new S3Repository(metadata, settings, registry, service); } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java index 744a27dc48e32..7eb603b4b78e5 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/RepositoryCredentialsTests.java @@ -80,6 +80,16 @@ AmazonS3 buildClient(AWSCredentialsProvider credentials, ClientConfiguration con ProxyS3RepositoryPlugin(Settings settings) { super(settings, new ProxyS3Service(settings)); } + + @Override + protected S3Repository createRepository(RepositoryMetaData metadata, Settings settings, NamedXContentRegistry registry) { + return new S3Repository(metadata, settings, registry, service){ + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo manually on test/main threads + } + }; + } } public void testRepositoryCredentialsOverrideSecureCredentials() throws IOException { @@ -102,8 +112,8 @@ public void testRepositoryCredentialsOverrideSecureCredentials() throws IOExcept .put(S3Repository.ACCESS_KEY_SETTING.getKey(), "insecure_aws_key") .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret").build()); try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); - S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); - AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + S3Repository s3repo = createAndStartRepository(metadata, s3Plugin); + AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); @@ -125,8 +135,8 @@ public void testRepositoryCredentialsOnly() throws IOException { .put(S3Repository.SECRET_KEY_SETTING.getKey(), "insecure_aws_secret") .build()); try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(Settings.EMPTY); - S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); - AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { + S3Repository s3repo = createAndStartRepository(metadata, s3Plugin); + AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials.getCredentials(); assertThat(credentials.getAWSAccessKeyId(), is("insecure_aws_key")); assertThat(credentials.getAWSSecretKey(), is("insecure_aws_secret")); @@ -140,6 +150,12 @@ public void testRepositoryCredentialsOnly() throws IOException { + " See the breaking changes documentation for the next major version."); } + private S3Repository createAndStartRepository(RepositoryMetaData metadata, S3RepositoryPlugin s3Plugin) { + final S3Repository repository = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY); + repository.start(); + return repository; + } + public void testReinitSecureCredentials() throws IOException { final String clientName = randomFrom("default", "some_client"); // initial client node settings @@ -156,7 +172,7 @@ public void testReinitSecureCredentials() throws IOException { } final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", builder.build()); try (S3RepositoryPlugin s3Plugin = new ProxyS3RepositoryPlugin(settings); - S3Repository s3repo = s3Plugin.createRepository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY)) { + S3Repository s3repo = createAndStartRepository(metadata, s3Plugin)) { try (AmazonS3Reference s3Ref = ((S3BlobStore) s3repo.blobStore()).clientReference()) { final AWSCredentials credentials = ((ProxyS3RepositoryPlugin.ClientAndCredentials) s3Ref.client()).credentials .getCredentials(); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java index b061e8e45edee..51fc48dfb598c 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3BlobStoreRepositoryTests.java @@ -51,6 +51,7 @@ import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.hamcrest.Matchers.not; import static org.mockito.Mockito.mock; @@ -84,8 +85,11 @@ public static void wipeRepository() { } @Override - protected void createTestRepository(final String name) { - assertAcked(client().admin().cluster().preparePutRepository(name).setType(S3Repository.TYPE).setSettings(Settings.builder() + protected void createTestRepository(final String name, boolean verify) { + assertAcked(client().admin().cluster().preparePutRepository(name) + .setType(S3Repository.TYPE) + .setVerify(verify) + .setSettings(Settings.builder() .put(S3Repository.BUCKET_SETTING.getKey(), bucket) .put(S3Repository.CLIENT_NAME.getKey(), client) .put(S3Repository.BUFFER_SIZE_SETTING.getKey(), bufferSize) @@ -96,6 +100,11 @@ protected void createTestRepository(final String name) { .put(S3Repository.SECRET_KEY_SETTING.getKey(), "not_used_but_this_is_a_secret"))); } + @Override + protected void afterCreationCheck(Repository repository) { + assertThat(repository, instanceOf(S3Repository.class)); + } + @Override protected Collection> nodePlugins() { return Collections.singletonList(TestS3RepositoryPlugin.class); @@ -125,7 +134,7 @@ void overrideCredentialsFromClusterState(S3Service awsService) { public void testInsecureRepositoryCredentials() throws Exception { final String repositoryName = "testInsecureRepositoryCredentials"; - createTestRepository(repositoryName); + createAndCheckTestRepository(repositoryName); final NodeClient nodeClient = internalCluster().getInstance(NodeClient.class); final RestGetRepositoriesAction getRepoAction = new RestGetRepositoriesAction(Settings.EMPTY, mock(RestController.class), internalCluster().getInstance(SettingsFilter.class)); diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java index 14f53ae5d3397..dcc46661bef61 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/S3RepositoryTests.java @@ -29,11 +29,13 @@ import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; -import java.io.IOException; import java.util.Collections; import java.util.Map; import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.is; +import static org.hamcrest.Matchers.not; +import static org.hamcrest.Matchers.nullValue; public class S3RepositoryTests extends ESTestCase { @@ -70,27 +72,27 @@ public void close() { } } - public void testInvalidChunkBufferSizeSettings() throws IOException { + public void testInvalidChunkBufferSizeSettings() { // chunk < buffer should fail final Settings s1 = bufferAndChunkSettings(10, 5); final Exception e1 = expectThrows(RepositoryException.class, - () -> new S3Repository(getRepositoryMetaData(s1), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())); + () -> createS3Repo(getRepositoryMetaData(s1))); assertThat(e1.getMessage(), containsString("chunk_size (5mb) can't be lower than buffer_size (10mb)")); // chunk > buffer should pass final Settings s2 = bufferAndChunkSettings(5, 10); - new S3Repository(getRepositoryMetaData(s2), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()).close(); + createS3Repo(getRepositoryMetaData(s2)).close(); // chunk = buffer should pass final Settings s3 = bufferAndChunkSettings(5, 5); - new S3Repository(getRepositoryMetaData(s3), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()).close(); + createS3Repo(getRepositoryMetaData(s3)).close(); // buffer < 5mb should fail final Settings s4 = bufferAndChunkSettings(4, 10); final IllegalArgumentException e2 = expectThrows(IllegalArgumentException.class, - () -> new S3Repository(getRepositoryMetaData(s4), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) + () -> createS3Repo(getRepositoryMetaData(s4)) .close()); assertThat(e2.getMessage(), containsString("failed to parse value [4mb] for setting [buffer_size], must be >= [5mb]")); final Settings s5 = bufferAndChunkSettings(5, 6000000); final IllegalArgumentException e3 = expectThrows(IllegalArgumentException.class, - () -> new S3Repository(getRepositoryMetaData(s5), Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) + () -> createS3Repo(getRepositoryMetaData(s5)) .close()); assertThat(e3.getMessage(), containsString("failed to parse value [6000000mb] for setting [chunk_size], must be <= [5tb]")); } @@ -106,20 +108,32 @@ private RepositoryMetaData getRepositoryMetaData(Settings settings) { return new RepositoryMetaData("dummy-repo", "mock", Settings.builder().put(settings).build()); } - public void testBasePathSetting() throws IOException { + public void testBasePathSetting() { final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.builder() .put(S3Repository.BASE_PATH_SETTING.getKey(), "foo/bar").build()); - try (S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())) { + try (S3Repository s3repo = createS3Repo(metadata)) { assertEquals("foo/bar/", s3repo.basePath().buildAsString()); } } - public void testDefaultBufferSize() throws IOException { + public void testDefaultBufferSize() { final RepositoryMetaData metadata = new RepositoryMetaData("dummy-repo", "mock", Settings.EMPTY); - try (S3Repository s3repo = new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service())) { - final long defaultBufferSize = ((S3BlobStore) s3repo.blobStore()).bufferSizeInBytes(); + try (S3Repository s3repo = createS3Repo(metadata)) { + assertThat(s3repo.getBlobStore(), is(nullValue())); + s3repo.start(); + final long defaultBufferSize = ((S3BlobStore)s3repo.blobStore()).bufferSizeInBytes(); + assertThat(s3repo.getBlobStore(), not(nullValue())); assertThat(defaultBufferSize, Matchers.lessThanOrEqualTo(100L * 1024 * 1024)); assertThat(defaultBufferSize, Matchers.greaterThanOrEqualTo(5L * 1024 * 1024)); } } + + private S3Repository createS3Repo(RepositoryMetaData metadata) { + return new S3Repository(metadata, Settings.EMPTY, NamedXContentRegistry.EMPTY, new DummyS3Service()) { + @Override + protected void assertSnapshotOrGenericThread() { + // eliminate thread name check as we create repo manually on test/main threads + } + }; + } } diff --git a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java index d5b2a6413e9a9..c6cbaa50cdf02 100644 --- a/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java +++ b/server/src/main/java/org/elasticsearch/repositories/RepositoriesService.java @@ -38,6 +38,7 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.snapshots.RestoreService; import org.elasticsearch.snapshots.SnapshotsService; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; @@ -58,16 +59,20 @@ public class RepositoriesService extends AbstractComponent implements ClusterSta private final ClusterService clusterService; + private final ThreadPool threadPool; + private final VerifyNodeRepositoryAction verifyAction; private volatile Map repositories = Collections.emptyMap(); @Inject public RepositoriesService(Settings settings, ClusterService clusterService, TransportService transportService, - Map typesRegistry) { + Map typesRegistry, + ThreadPool threadPool) { super(settings); this.typesRegistry = typesRegistry; this.clusterService = clusterService; + this.threadPool = threadPool; // Doesn't make sense to maintain repositories on non-master and non-data nodes // Nothing happens there anyway if (DiscoveryNode.isDataNode(settings) || DiscoveryNode.isMasterNode(settings)) { @@ -208,39 +213,51 @@ public boolean mustAck(DiscoveryNode discoveryNode) { public void verifyRepository(final String repositoryName, final ActionListener listener) { final Repository repository = repository(repositoryName); try { - final String verificationToken = repository.startVerification(); - if (verificationToken != null) { + threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> { try { - verifyAction.verify(repositoryName, verificationToken, new ActionListener() { - @Override - public void onResponse(VerifyResponse verifyResponse) { - try { - repository.endVerification(verificationToken); - } catch (Exception e) { - logger.warn(() -> new ParameterizedMessage("[{}] failed to finish repository verification", repositoryName), e); + final String verificationToken = repository.startVerification(); + if (verificationToken != null) { + try { + verifyAction.verify(repositoryName, verificationToken, new ActionListener() { + @Override + public void onResponse(VerifyResponse verifyResponse) { + threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> { + try { + repository.endVerification(verificationToken); + } catch (Exception e) { + logger.warn(() -> new ParameterizedMessage( + "[{}] failed to finish repository verification", repositoryName), e); + listener.onFailure(e); + return; + } + listener.onResponse(verifyResponse); + }); + } + + @Override + public void onFailure(Exception e) { + listener.onFailure(e); + } + }); + } catch (Exception e) { + threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> { + try { + repository.endVerification(verificationToken); + } catch (Exception inner) { + inner.addSuppressed(e); + logger.warn(() -> new ParameterizedMessage( + "[{}] failed to finish repository verification", repositoryName), inner); + } listener.onFailure(e); - return; - } - listener.onResponse(verifyResponse); - } - - @Override - public void onFailure(Exception e) { - listener.onFailure(e); + }); } - }); - } catch (Exception e) { - try { - repository.endVerification(verificationToken); - } catch (Exception inner) { - inner.addSuppressed(e); - logger.warn(() -> new ParameterizedMessage("[{}] failed to finish repository verification", repositoryName), inner); + } else { + listener.onResponse(new VerifyResponse(new DiscoveryNode[0], new VerificationFailure[0])); } + } catch (Exception e) { listener.onFailure(e); } - } else { - listener.onResponse(new VerifyResponse(new DiscoveryNode[0], new VerificationFailure[0])); - } + }); } catch (Exception e) { listener.onFailure(e); } diff --git a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java index 380ae97408016..fbaf369912e8a 100644 --- a/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java +++ b/server/src/main/java/org/elasticsearch/repositories/VerifyNodeRepositoryAction.java @@ -61,7 +61,7 @@ public VerifyNodeRepositoryAction(Settings settings, TransportService transportS this.transportService = transportService; this.clusterService = clusterService; this.repositoriesService = repositoriesService; - transportService.registerRequestHandler(ACTION_NAME, VerifyNodeRepositoryRequest::new, ThreadPool.Names.SAME, new VerifyNodeRepositoryRequestHandler()); + transportService.registerRequestHandler(ACTION_NAME, VerifyNodeRepositoryRequest::new, ThreadPool.Names.SNAPSHOT, new VerifyNodeRepositoryRequestHandler()); } public void verify(String repository, String verificationToken, final ActionListener listener) { diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index 86131fe468d28..22743e38839d5 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -34,6 +34,7 @@ import org.apache.lucene.store.RateLimiter; import org.apache.lucene.util.BytesRef; import org.apache.lucene.util.BytesRefBuilder; +import org.apache.lucene.util.SetOnce; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; @@ -102,6 +103,7 @@ import org.elasticsearch.snapshots.SnapshotInfo; import org.elasticsearch.snapshots.SnapshotMissingException; import org.elasticsearch.snapshots.SnapshotShardFailure; +import org.elasticsearch.threadpool.ThreadPool; import java.io.FilterInputStream; import java.io.IOException; @@ -126,8 +128,8 @@ /** * BlobStore - based implementation of Snapshot Repository *

- * This repository works with any {@link BlobStore} implementation. The blobStore should be initialized in the derived - * class before {@link #doStart()} is called. + * This repository works with any {@link BlobStore} implementation. The blobStore could be (and preferred) lazy initialized in + * {@link #createBlobStore()}. *

* BlobStoreRepository maintains the following structure in the blob store *

@@ -169,8 +171,6 @@
  */
 public abstract class BlobStoreRepository extends AbstractLifecycleComponent implements Repository {
 
-    private BlobContainer snapshotsBlobContainer;
-
     protected final RepositoryMetaData metadata;
 
     protected final NamedXContentRegistry namedXContentRegistry;
@@ -225,6 +225,12 @@ public abstract class BlobStoreRepository extends AbstractLifecycleComponent imp
 
     private final ChecksumBlobStoreFormat indexShardSnapshotsFormat;
 
+    private final Object lock = new Object();
+
+    private final SetOnce blobContainer = new SetOnce<>();
+
+    private final SetOnce blobStore = new SetOnce<>();
+
     /**
      * Constructs new BlobStoreRepository
      *
@@ -251,7 +257,6 @@ protected BlobStoreRepository(RepositoryMetaData metadata, Settings globalSettin
 
     @Override
     protected void doStart() {
-        this.snapshotsBlobContainer = blobStore().blobContainer(basePath());
         globalMetaDataFormat = new ChecksumBlobStoreFormat<>(METADATA_CODEC, METADATA_NAME_FORMAT,
             MetaData::fromXContent, namedXContentRegistry, isCompress());
         indexMetaDataFormat = new ChecksumBlobStoreFormat<>(INDEX_METADATA_CODEC, METADATA_NAME_FORMAT,
@@ -265,17 +270,82 @@ protected void doStop() {}
 
     @Override
     protected void doClose() {
-        try {
-            blobStore().close();
-        } catch (Exception t) {
-            logger.warn("cannot close blob store", t);
+        BlobStore store;
+        // to close blobStore if blobStore initialization is started during close
+        synchronized (lock) {
+            store = blobStore.get();
+        }
+        if (store != null) {
+            try {
+                store.close();
+            } catch (Exception t) {
+                logger.warn("cannot close blob store", t);
+            }
         }
     }
 
+    // package private, only use for testing
+    BlobContainer getBlobContainer() {
+        return blobContainer.get();
+    }
+
+    // for test purposes only
+    protected BlobStore getBlobStore() {
+        return blobStore.get();
+    }
+
     /**
-     * Returns the BlobStore to read and write data.
+     * maintains single lazy instance of {@link BlobContainer}
      */
-    protected abstract BlobStore blobStore();
+    protected BlobContainer blobContainer() {
+        assertSnapshotOrGenericThread();
+
+        BlobContainer blobContainer = this.blobContainer.get();
+        if (blobContainer == null) {
+           synchronized (lock) {
+               blobContainer = this.blobContainer.get();
+               if (blobContainer == null) {
+                   blobContainer = blobStore().blobContainer(basePath());
+                   this.blobContainer.set(blobContainer);
+               }
+           }
+        }
+
+        return blobContainer;
+    }
+
+    /**
+     * maintains single lazy instance of {@link BlobStore}
+     */
+    protected BlobStore blobStore() {
+        assertSnapshotOrGenericThread();
+
+        BlobStore store = blobStore.get();
+        if (store == null) {
+            synchronized (lock) {
+                store = blobStore.get();
+                if (store == null) {
+                    if (lifecycle.started() == false) {
+                        throw new RepositoryException(metadata.name(), "repository is not in started state");
+                    }
+                    try {
+                        store = createBlobStore();
+                    } catch (RepositoryException e) {
+                        throw e;
+                    } catch (Exception e) {
+                        throw new RepositoryException(metadata.name(), "cannot create blob store" , e);
+                    }
+                    blobStore.set(store);
+                }
+            }
+        }
+        return store;
+    }
+
+    /**
+     * Creates new BlobStore to read and write data.
+     */
+    protected abstract BlobStore createBlobStore() throws Exception;
 
     /**
      * Returns base path of the repository
@@ -319,12 +389,12 @@ public void initializeSnapshot(SnapshotId snapshotId, List indices, Met
             if (repositoryData.getAllSnapshotIds().stream().anyMatch(s -> s.getName().equals(snapshotName))) {
                 throw new InvalidSnapshotNameException(metadata.name(), snapshotId.getName(), "snapshot with the same name already exists");
             }
-            if (snapshotFormat.exists(snapshotsBlobContainer, snapshotId.getUUID())) {
+            if (snapshotFormat.exists(blobContainer(), snapshotId.getUUID())) {
                 throw new InvalidSnapshotNameException(metadata.name(), snapshotId.getName(), "snapshot with the same name already exists");
             }
 
             // Write Global MetaData
-            globalMetaDataFormat.write(clusterMetaData, snapshotsBlobContainer, snapshotId.getUUID());
+            globalMetaDataFormat.write(clusterMetaData, blobContainer(), snapshotId.getUUID());
 
             // write the index metadata for each index in the snapshot
             for (IndexId index : indices) {
@@ -421,7 +491,7 @@ public void deleteSnapshot(SnapshotId snapshotId, long repositoryStateId) {
 
     private void deleteSnapshotBlobIgnoringErrors(final SnapshotInfo snapshotInfo, final String blobId) {
         try {
-            snapshotFormat.delete(snapshotsBlobContainer, blobId);
+            snapshotFormat.delete(blobContainer(), blobId);
         } catch (IOException e) {
             if (snapshotInfo != null) {
                 logger.warn(() -> new ParameterizedMessage("[{}] Unable to delete snapshot file [{}]",
@@ -434,7 +504,7 @@ private void deleteSnapshotBlobIgnoringErrors(final SnapshotInfo snapshotInfo, f
 
     private void deleteGlobalMetaDataBlobIgnoringErrors(final SnapshotInfo snapshotInfo, final String blobId) {
         try {
-            globalMetaDataFormat.delete(snapshotsBlobContainer, blobId);
+            globalMetaDataFormat.delete(blobContainer(), blobId);
         } catch (IOException e) {
             if (snapshotInfo != null) {
                 logger.warn(() -> new ParameterizedMessage("[{}] Unable to delete global metadata file [{}]",
@@ -472,7 +542,7 @@ public SnapshotInfo finalizeSnapshot(final SnapshotId snapshotId,
             startTime, failure, System.currentTimeMillis(), totalShards, shardFailures,
             includeGlobalState);
         try {
-            snapshotFormat.write(blobStoreSnapshot, snapshotsBlobContainer, snapshotId.getUUID());
+            snapshotFormat.write(blobStoreSnapshot, blobContainer(), snapshotId.getUUID());
             final RepositoryData repositoryData = getRepositoryData();
             writeIndexGen(repositoryData.addSnapshot(snapshotId, blobStoreSnapshot.state(), indices), repositoryStateId);
         } catch (FileAlreadyExistsException ex) {
@@ -490,7 +560,7 @@ public SnapshotInfo finalizeSnapshot(final SnapshotId snapshotId,
     @Override
     public SnapshotInfo getSnapshotInfo(final SnapshotId snapshotId) {
         try {
-            return snapshotFormat.read(snapshotsBlobContainer, snapshotId.getUUID());
+            return snapshotFormat.read(blobContainer(), snapshotId.getUUID());
         } catch (NoSuchFileException ex) {
             throw new SnapshotMissingException(metadata.name(), snapshotId, ex);
         } catch (IOException | NotXContentException ex) {
@@ -501,7 +571,7 @@ public SnapshotInfo getSnapshotInfo(final SnapshotId snapshotId) {
     @Override
     public MetaData getSnapshotGlobalMetaData(final SnapshotId snapshotId) {
         try {
-            return globalMetaDataFormat.read(snapshotsBlobContainer, snapshotId.getUUID());
+            return globalMetaDataFormat.read(blobContainer(), snapshotId.getUUID());
         } catch (NoSuchFileException ex) {
             throw new SnapshotMissingException(metadata.name(), snapshotId, ex);
         } catch (IOException ex) {
@@ -543,11 +613,21 @@ public long getRestoreThrottleTimeInNanos() {
         return restoreRateLimitingTimeInNanos.count();
     }
 
+    protected void assertSnapshotOrGenericThread() {
+        assert Thread.currentThread().getName().contains(ThreadPool.Names.SNAPSHOT)
+            || Thread.currentThread().getName().contains(ThreadPool.Names.GENERIC) :
+            "Expected current thread [" + Thread.currentThread() + "] to be the snapshot or generic thread.";
+    }
+
     @Override
     public String startVerification() {
         try {
             if (isReadOnly()) {
-                // It's readonly - so there is not much we can do here to verify it
+                // TODO: add repository verification for read-only repositories
+
+                // It's readonly - so there is not much we can do here to verify it apart try to create blobStore()
+                // and check that is is accessible on the master
+                blobStore();
                 return null;
             } else {
                 String seed = UUIDs.randomBase64UUID();
@@ -584,7 +664,7 @@ public RepositoryData getRepositoryData() {
             final String snapshotsIndexBlobName = INDEX_FILE_PREFIX + Long.toString(indexGen);
 
             RepositoryData repositoryData;
-            try (InputStream blob = snapshotsBlobContainer.readBlob(snapshotsIndexBlobName)) {
+            try (InputStream blob = blobContainer().readBlob(snapshotsIndexBlobName)) {
                 BytesStreamOutput out = new BytesStreamOutput();
                 Streams.copy(blob, out);
                 // EMPTY is safe here because RepositoryData#fromXContent calls namedObject
@@ -598,7 +678,7 @@ public RepositoryData getRepositoryData() {
             }
 
             // now load the incompatible snapshot ids, if they exist
-            try (InputStream blob = snapshotsBlobContainer.readBlob(INCOMPATIBLE_SNAPSHOTS_BLOB)) {
+            try (InputStream blob = blobContainer().readBlob(INCOMPATIBLE_SNAPSHOTS_BLOB)) {
                 BytesStreamOutput out = new BytesStreamOutput();
                 Streams.copy(blob, out);
                 try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY,
@@ -636,11 +716,6 @@ public boolean isReadOnly() {
         return readOnly;
     }
 
-    // package private, only use for testing
-    BlobContainer blobContainer() {
-        return snapshotsBlobContainer;
-    }
-
     protected void writeIndexGen(final RepositoryData repositoryData, final long repositoryStateId) throws IOException {
         assert isReadOnly() == false; // can not write to a read only repository
         final long currentGen = latestIndexBlobId();
@@ -668,7 +743,7 @@ protected void writeIndexGen(final RepositoryData repositoryData, final long rep
         // delete the N-2 index file if it exists, keep the previous one around as a backup
         if (isReadOnly() == false && newGen - 2 >= 0) {
             final String oldSnapshotIndexFile = INDEX_FILE_PREFIX + Long.toString(newGen - 2);
-            snapshotsBlobContainer.deleteBlobIgnoringIfNotExists(oldSnapshotIndexFile);
+            blobContainer().deleteBlobIgnoringIfNotExists(oldSnapshotIndexFile);
         }
 
         // write the current generation to the index-latest file
@@ -736,7 +811,7 @@ long latestIndexBlobId() throws IOException {
 
     // package private for testing
     long readSnapshotIndexLatestBlob() throws IOException {
-        try (InputStream blob = snapshotsBlobContainer.readBlob(INDEX_LATEST_BLOB)) {
+        try (InputStream blob = blobContainer().readBlob(INDEX_LATEST_BLOB)) {
             BytesStreamOutput out = new BytesStreamOutput();
             Streams.copy(blob, out);
             return Numbers.bytesToLong(out.bytes().toBytesRef());
@@ -744,7 +819,7 @@ long readSnapshotIndexLatestBlob() throws IOException {
     }
 
     private long listBlobsToGetLatestIndexId() throws IOException {
-        Map blobs = snapshotsBlobContainer.listBlobsByPrefix(INDEX_FILE_PREFIX);
+        Map blobs = blobContainer().listBlobsByPrefix(INDEX_FILE_PREFIX);
         long latest = RepositoryData.EMPTY_REPO_GEN;
         if (blobs.isEmpty()) {
             // no snapshot index blobs have been written yet
@@ -766,7 +841,7 @@ private long listBlobsToGetLatestIndexId() throws IOException {
 
     private void writeAtomic(final String blobName, final BytesReference bytesRef, boolean failIfAlreadyExists) throws IOException {
         try (InputStream stream = bytesRef.streamInput()) {
-            snapshotsBlobContainer.writeBlobAtomic(blobName, stream, bytesRef.length(), failIfAlreadyExists);
+            blobContainer().writeBlobAtomic(blobName, stream, bytesRef.length(), failIfAlreadyExists);
         }
     }
 
@@ -806,6 +881,7 @@ public IndexShardSnapshotStatus getShardSnapshotStatus(SnapshotId snapshotId, Ve
 
     @Override
     public void verify(String seed, DiscoveryNode localNode) {
+        assertSnapshotOrGenericThread();
         BlobContainer testBlobContainer = blobStore().blobContainer(basePath().add(testBlobPrefix(seed)));
         if (testBlobContainer.blobExists("master.dat")) {
             try  {
diff --git a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java
index 4d4ab60feef0f..643ff2bc93d3c 100644
--- a/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java
+++ b/server/src/main/java/org/elasticsearch/repositories/fs/FsRepository.java
@@ -31,7 +31,6 @@
 import org.elasticsearch.repositories.RepositoryException;
 import org.elasticsearch.repositories.blobstore.BlobStoreRepository;
 
-import java.io.IOException;
 import java.nio.file.Path;
 import java.util.function.Function;
 
@@ -61,8 +60,7 @@ public class FsRepository extends BlobStoreRepository {
     public static final Setting COMPRESS_SETTING = Setting.boolSetting("compress", false, Property.NodeScope);
     public static final Setting REPOSITORIES_COMPRESS_SETTING =
         Setting.boolSetting("repositories.fs.compress", false, Property.NodeScope);
-
-    private final FsBlobStore blobStore;
+    private final Environment environment;
 
     private ByteSizeValue chunkSize;
 
@@ -74,37 +72,45 @@ public class FsRepository extends BlobStoreRepository {
      * Constructs a shared file system repository.
      */
     public FsRepository(RepositoryMetaData metadata, Environment environment,
-                        NamedXContentRegistry namedXContentRegistry) throws IOException {
+                        NamedXContentRegistry namedXContentRegistry) {
         super(metadata, environment.settings(), namedXContentRegistry);
+        this.environment = environment;
         String location = REPOSITORIES_LOCATION_SETTING.get(metadata.settings());
         if (location.isEmpty()) {
-            logger.warn("the repository location is missing, it should point to a shared file system location that is available on all master and data nodes");
+            logger.warn("the repository location is missing, it should point to a shared file system location"
+                + " that is available on all master and data nodes");
             throw new RepositoryException(metadata.name(), "missing location");
         }
         Path locationFile = environment.resolveRepoFile(location);
         if (locationFile == null) {
             if (environment.repoFiles().length > 0) {
-                logger.warn("The specified location [{}] doesn't start with any repository paths specified by the path.repo setting: [{}] ", location, environment.repoFiles());
-                throw new RepositoryException(metadata.name(), "location [" + location + "] doesn't match any of the locations specified by path.repo");
+                logger.warn("The specified location [{}] doesn't start with any "
+                    + "repository paths specified by the path.repo setting: [{}] ", location, environment.repoFiles());
+                throw new RepositoryException(metadata.name(), "location [" + location
+                    + "] doesn't match any of the locations specified by path.repo");
             } else {
-                logger.warn("The specified location [{}] should start with a repository path specified by the path.repo setting, but the path.repo setting was not set on this node", location);
-                throw new RepositoryException(metadata.name(), "location [" + location + "] doesn't match any of the locations specified by path.repo because this setting is empty");
+                logger.warn("The specified location [{}] should start with a repository path specified by"
+                    + " the path.repo setting, but the path.repo setting was not set on this node", location);
+                throw new RepositoryException(metadata.name(), "location [" + location
+                    + "] doesn't match any of the locations specified by path.repo because this setting is empty");
             }
         }
 
-        blobStore = new FsBlobStore(settings, locationFile);
         if (CHUNK_SIZE_SETTING.exists(metadata.settings())) {
             this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings());
         } else {
             this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(settings);
         }
-        this.compress = COMPRESS_SETTING.exists(metadata.settings()) ? COMPRESS_SETTING.get(metadata.settings()) : REPOSITORIES_COMPRESS_SETTING.get(settings);
+        this.compress = COMPRESS_SETTING.exists(metadata.settings())
+            ? COMPRESS_SETTING.get(metadata.settings()) : REPOSITORIES_COMPRESS_SETTING.get(settings);
         this.basePath = BlobPath.cleanPath();
     }
 
     @Override
-    protected BlobStore blobStore() {
-        return blobStore;
+    protected BlobStore createBlobStore() throws Exception {
+        final String location = REPOSITORIES_LOCATION_SETTING.get(metadata.settings());
+        final Path locationFile = environment.resolveRepoFile(location);
+        return new FsBlobStore(settings, locationFile);
     }
 
     @Override
diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
index cc971ed1b043a..3a5302bcec2e6 100644
--- a/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
+++ b/server/src/test/java/org/elasticsearch/indices/cluster/IndicesClusterStateServiceRandomUpdatesTests.java
@@ -406,7 +406,7 @@ private IndicesClusterStateService createIndicesClusterStateService(DiscoveryNod
             Collections.emptySet());
         final ClusterService clusterService = mock(ClusterService.class);
         final RepositoriesService repositoriesService = new RepositoriesService(settings, clusterService,
-            transportService, null);
+            transportService, null, threadPool);
         final PeerRecoveryTargetService recoveryTargetService = new PeerRecoveryTargetService(settings, threadPool,
             transportService, null, clusterService);
         final ShardStateAction shardStateAction = mock(ShardStateAction.class);
diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java
index 7a1d3a894204f..0eae9a1420068 100644
--- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java
+++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryRestoreTests.java
@@ -173,10 +173,17 @@ public void testSnapshotWithConflictingName() throws IOException {
     }
 
     /** Create a {@link Repository} with a random name **/
-    private Repository createRepository() throws IOException {
+    private Repository createRepository() {
         Settings settings = Settings.builder().put("location", randomAlphaOfLength(10)).build();
         RepositoryMetaData repositoryMetaData = new RepositoryMetaData(randomAlphaOfLength(10), FsRepository.TYPE, settings);
-        return new FsRepository(repositoryMetaData, createEnvironment(), xContentRegistry());
+        final FsRepository repository = new FsRepository(repositoryMetaData, createEnvironment(), xContentRegistry()) {
+            @Override
+            protected void assertSnapshotOrGenericThread() {
+                // eliminate thread name check as we create repo manually
+            }
+        };
+        repository.start();
+        return repository;
     }
 
     /** Create a {@link Environment} with random path.home and path.repo **/
diff --git a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java
index 7e4d5cc54a900..1abdb97f174b6 100644
--- a/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java
+++ b/server/src/test/java/org/elasticsearch/repositories/blobstore/BlobStoreRepositoryTests.java
@@ -24,10 +24,16 @@
 import org.elasticsearch.client.Client;
 import org.elasticsearch.common.UUIDs;
 import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.plugins.RepositoryPlugin;
 import org.elasticsearch.repositories.IndexId;
 import org.elasticsearch.repositories.RepositoriesService;
+import org.elasticsearch.repositories.Repository;
 import org.elasticsearch.repositories.RepositoryData;
 import org.elasticsearch.repositories.RepositoryException;
+import org.elasticsearch.repositories.fs.FsRepository;
 import org.elasticsearch.snapshots.SnapshotId;
 import org.elasticsearch.snapshots.SnapshotState;
 import org.elasticsearch.test.ESIntegTestCase;
@@ -37,18 +43,42 @@
 import java.nio.file.Path;
 import java.util.ArrayList;
 import java.util.Arrays;
+import java.util.Collection;
 import java.util.Collections;
 import java.util.List;
+import java.util.Map;
 import java.util.stream.Collectors;
 
 import static org.elasticsearch.repositories.RepositoryDataTests.generateRandomRepoData;
 import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.nullValue;
 
 /**
  * Tests for the {@link BlobStoreRepository} and its subclasses.
  */
 public class BlobStoreRepositoryTests extends ESSingleNodeTestCase {
 
+    static final String REPO_TYPE = "fsLike";
+
+    protected Collection> getPlugins() {
+        return Arrays.asList(FsLikeRepoPlugin.class);
+    }
+
+    // the reason for this plug-in is to drop any assertSnapshotOrGenericThread as mostly all access in this test goes from test threads
+    public static class FsLikeRepoPlugin extends org.elasticsearch.plugins.Plugin implements RepositoryPlugin {
+
+        @Override
+        public Map getRepositories(Environment env, NamedXContentRegistry namedXContentRegistry) {
+            return Collections.singletonMap(REPO_TYPE,
+                (metadata) -> new FsRepository(metadata, env, namedXContentRegistry) {
+                    @Override
+                    protected void assertSnapshotOrGenericThread() {
+                        // eliminate thread name check as we access blobStore on test/main threads
+                    }
+                });
+        }
+    }
+
     public void testRetrieveSnapshots() throws Exception {
         final Client client = client();
         final Path location = ESIntegTestCase.randomRepoPath(node().settings());
@@ -57,7 +87,7 @@ public void testRetrieveSnapshots() throws Exception {
         logger.info("-->  creating repository");
         PutRepositoryResponse putRepositoryResponse =
             client.admin().cluster().preparePutRepository(repositoryName)
-                                    .setType("fs")
+                                    .setType(REPO_TYPE)
                                     .setSettings(Settings.builder().put(node().settings()).put("location", location))
                                     .get();
         assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
@@ -209,7 +239,7 @@ private BlobStoreRepository setupRepo() {
 
         PutRepositoryResponse putRepositoryResponse =
             client.admin().cluster().preparePutRepository(repositoryName)
-                                    .setType("fs")
+                                    .setType(REPO_TYPE)
                                     .setSettings(Settings.builder().put(node().settings()).put("location", location))
                                     .get();
         assertThat(putRepositoryResponse.isAcknowledged(), equalTo(true));
@@ -217,6 +247,7 @@ private BlobStoreRepository setupRepo() {
         final RepositoriesService repositoriesService = getInstanceFromNode(RepositoriesService.class);
         @SuppressWarnings("unchecked") final BlobStoreRepository repository =
             (BlobStoreRepository) repositoriesService.repository(repositoryName);
+        assertThat("getBlobContainer has to be lazy initialized", repository.getBlobContainer(), nullValue());
         return repository;
     }
 
diff --git a/server/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java b/server/src/test/java/org/elasticsearch/repositories/fs/FsBlobStoreRepositoryIT.java
similarity index 79%
rename from server/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java
rename to server/src/test/java/org/elasticsearch/repositories/fs/FsBlobStoreRepositoryIT.java
index 792b1bdbdddb0..1ed42cb24746b 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/FsBlobStoreRepositoryIT.java
+++ b/server/src/test/java/org/elasticsearch/repositories/fs/FsBlobStoreRepositoryIT.java
@@ -16,22 +16,29 @@
  * specific language governing permissions and limitations
  * under the License.
  */
-package org.elasticsearch.snapshots;
+package org.elasticsearch.repositories.fs;
 
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.unit.ByteSizeUnit;
+import org.elasticsearch.repositories.Repository;
 import org.elasticsearch.repositories.blobstore.ESBlobStoreRepositoryIntegTestCase;
 
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
+import static org.hamcrest.Matchers.instanceOf;
 
 public class FsBlobStoreRepositoryIT extends ESBlobStoreRepositoryIntegTestCase {
     @Override
-    protected void createTestRepository(String name) {
+    protected void createTestRepository(String name, boolean verify) {
         assertAcked(client().admin().cluster().preparePutRepository(name)
+            .setVerify(verify)
             .setType("fs").setSettings(Settings.builder()
                 .put("location", randomRepoPath())
                 .put("compress", randomBoolean())
                 .put("chunk_size", randomIntBetween(100, 1000), ByteSizeUnit.BYTES)));
+    }
 
+    @Override
+    protected void afterCreationCheck(Repository repository) {
+        assertThat(repository, instanceOf(FsRepository.class));
     }
 }
diff --git a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
index c9ca1637b1ade..d2954a4c128ba 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
+++ b/server/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
@@ -19,6 +19,7 @@
 
 package org.elasticsearch.snapshots;
 
+import org.apache.lucene.util.SetOnce;
 import org.elasticsearch.ExceptionsHelper;
 import org.elasticsearch.Version;
 import org.elasticsearch.action.ActionFuture;
@@ -93,6 +94,7 @@
 import org.elasticsearch.script.StoredScriptsIT;
 import org.elasticsearch.snapshots.mockstore.MockRepository;
 import org.elasticsearch.test.junit.annotations.TestLogging;
+import org.elasticsearch.threadpool.ThreadPool;
 
 import java.io.IOException;
 import java.nio.channels.SeekableByteChannel;
@@ -1262,7 +1264,7 @@ public void testDeleteSnapshotWithMissingIndexAndShardMetadata() throws Exceptio
         RepositoriesService service = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName());
         Repository repository = service.repository("test-repo");
 
-        final Map indexIds = repository.getRepositoryData().getIndices();
+        final Map indexIds = getRepositoryData(repository).getIndices();
         final Path indicesPath = repo.resolve("indices");
 
         logger.info("--> delete index metadata and shard metadata");
@@ -1739,6 +1741,7 @@ public void testDeleteRepositoryWhileSnapshotting() throws Exception {
 
         logger.info("--> trying to create a repository with different name");
         assertAcked(client.admin().cluster().preparePutRepository("test-repo-2")
+                .setVerify(false) // do not do verification itself as snapshot threads could be fully blocked
                 .setType("fs").setSettings(Settings.builder().put("location", repositoryLocation.resolve("test"))));
 
         logger.info("--> unblocking blocked node");
@@ -2563,7 +2566,7 @@ public void testDeleteOrphanSnapshot() throws Exception {
 
         logger.info("--> emulate an orphan snapshot");
         RepositoriesService repositoriesService = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName());
-        final RepositoryData repositoryData = repositoriesService.repository(repositoryName).getRepositoryData();
+        final RepositoryData repositoryData = getRepositoryData(repositoriesService.repository(repositoryName));
         final IndexId indexId = repositoryData.resolveIndexId(idxName);
 
         clusterService.submitStateUpdateTask("orphan snapshot test", new ClusterStateUpdateTask() {
@@ -2784,7 +2787,8 @@ public void testRestoreSnapshotWithCorruptedIndexMetadata() throws Exception {
         RepositoriesService service = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName());
         Repository repository = service.repository("test-repo");
 
-        final Map indexIds = repository.getRepositoryData().getIndices();
+        final RepositoryData repositoryData = getRepositoryData(repository);
+        final Map indexIds = repositoryData.getIndices();
         assertThat(indexIds.size(), equalTo(nbIndices));
 
         // Choose a random index from the snapshot
@@ -3445,6 +3449,19 @@ public void testAbortedSnapshotDuringInitDoesNotStart() throws Exception {
         }
     }
 
+    private RepositoryData getRepositoryData(Repository repository) throws InterruptedException {
+        ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, internalCluster().getMasterName());
+        final SetOnce repositoryData = new SetOnce<>();
+        final CountDownLatch latch = new CountDownLatch(1);
+        threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> {
+            repositoryData.set(repository.getRepositoryData());
+            latch.countDown();
+        });
+
+        latch.await();
+        return repositoryData.get();
+    }
+
     private void verifySnapshotInfo(final GetSnapshotsResponse response, final Map> indicesPerSnapshot) {
         for (SnapshotInfo snapshotInfo : response.getSnapshots()) {
             final List expected = snapshotInfo.indices();
diff --git a/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java b/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java
index d05a10905d858..75a86831bc554 100644
--- a/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java
+++ b/server/src/test/java/org/elasticsearch/snapshots/mockstore/MockRepository.java
@@ -92,8 +92,6 @@ public long getFailureCount() {
 
     private final long waitAfterUnblock;
 
-    private final MockBlobStore mockBlobStore;
-
     private final String randomPrefix;
 
     private volatile boolean blockOnInitialization;
@@ -128,7 +126,6 @@ public MockRepository(RepositoryMetaData metadata, Environment environment,
         waitAfterUnblock = metadata.settings().getAsLong("wait_after_unblock", 0L);
         allowAtomicOperations = metadata.settings().getAsBoolean("allow_atomic_operations", true);
         logger.info("starting mock repository with random prefix {}", randomPrefix);
-        mockBlobStore = new MockBlobStore(super.blobStore());
     }
 
     @Override
@@ -163,8 +160,8 @@ protected void doStop() {
     }
 
     @Override
-    protected BlobStore blobStore() {
-        return mockBlobStore;
+    protected BlobStore createBlobStore() throws Exception {
+        return new MockBlobStore(super.createBlobStore());
     }
 
     public synchronized void unblock() {
@@ -195,7 +192,7 @@ public boolean blocked() {
     }
 
     private synchronized boolean blockExecution() {
-        logger.debug("Blocking execution");
+        logger.debug("[{}] Blocking execution", metadata.name());
         boolean wasBlocked = false;
         try {
             while (blockOnDataFiles || blockOnControlFiles || blockOnInitialization || blockOnWriteIndexFile ||
@@ -207,7 +204,7 @@ private synchronized boolean blockExecution() {
         } catch (InterruptedException ex) {
             Thread.currentThread().interrupt();
         }
-        logger.debug("Unblocking execution");
+        logger.debug("[{}] Unblocking execution", metadata.name());
         return wasBlocked;
     }
 
@@ -285,7 +282,7 @@ private void maybeIOExceptionOrBlock(String blobName) throws IOException {
             }
 
             private void blockExecutionAndMaybeWait(final String blobName) {
-                logger.info("blocking I/O operation for file [{}] at path [{}]", blobName, path());
+                logger.info("[{}] blocking I/O operation for file [{}] at path [{}]", metadata.name(), blobName, path());
                 if (blockExecution() && waitAfterUnblock > 0) {
                     try {
                         // Delay operation after unblocking
diff --git a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java
index bf9c81932348d..439728bac9ea6 100644
--- a/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/repositories/blobstore/ESBlobStoreRepositoryIntegTestCase.java
@@ -18,6 +18,7 @@
  */
 package org.elasticsearch.repositories.blobstore;
 
+import org.apache.lucene.util.SetOnce;
 import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotRequestBuilder;
 import org.elasticsearch.action.admin.cluster.snapshots.create.CreateSnapshotResponse;
 import org.elasticsearch.action.admin.cluster.snapshots.restore.RestoreSnapshotRequestBuilder;
@@ -27,34 +28,61 @@
 import org.elasticsearch.common.blobstore.BlobContainer;
 import org.elasticsearch.repositories.IndexId;
 import org.elasticsearch.repositories.RepositoriesService;
+import org.elasticsearch.repositories.Repository;
 import org.elasticsearch.repositories.RepositoryData;
 import org.elasticsearch.snapshots.SnapshotMissingException;
 import org.elasticsearch.snapshots.SnapshotRestoreException;
 import org.elasticsearch.test.ESIntegTestCase;
+import org.elasticsearch.threadpool.ThreadPool;
 
 import java.util.Arrays;
 import java.util.HashSet;
 import java.util.List;
 import java.util.Locale;
 import java.util.Set;
+import java.util.concurrent.CountDownLatch;
 import java.util.concurrent.ExecutionException;
 
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
 import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
 import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.greaterThan;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.notNullValue;
+import static org.hamcrest.Matchers.nullValue;
 
 /**
  * Basic integration tests for blob-based repository validation.
  */
 public abstract class ESBlobStoreRepositoryIntegTestCase extends ESIntegTestCase {
 
-    protected abstract void createTestRepository(String name);
+    protected abstract void createTestRepository(String name, boolean verify);
+
+    protected void afterCreationCheck(Repository repository) {
+
+    }
+
+    protected void createAndCheckTestRepository(String name) {
+        final boolean verify = randomBoolean();
+        createTestRepository(name, verify);
+
+        final Iterable repositoriesServices =
+            internalCluster().getDataOrMasterNodeInstances(RepositoriesService.class);
+
+        for (RepositoriesService repositoriesService : repositoriesServices) {
+            final BlobStoreRepository repository = (BlobStoreRepository) repositoriesService.repository(name);
+
+            afterCreationCheck(repository);
+            assertThat("blob store has to be lazy initialized",
+                repository.getBlobStore(), verify ? is(notNullValue()) : is(nullValue()));
+        }
+
+    }
 
     public void testSnapshotAndRestore() throws Exception {
         final String repoName = randomAsciiName();
         logger.info("-->  creating repository {}", repoName);
-        createTestRepository(repoName);
+        createAndCheckTestRepository(repoName);
         int indexCount = randomIntBetween(1, 5);
         int[] docCounts = new int[indexCount];
         String[] indexNames = generateRandomNames(indexCount);
@@ -125,7 +153,7 @@ public void testSnapshotAndRestore() throws Exception {
     public void testMultipleSnapshotAndRollback() throws Exception {
         String repoName = randomAsciiName();
         logger.info("-->  creating repository {}", repoName);
-        createTestRepository(repoName);
+        createAndCheckTestRepository(repoName);
         int iterationCount = randomIntBetween(2, 5);
         int[] docCounts = new int[iterationCount];
         String indexName = randomAsciiName();
@@ -177,12 +205,12 @@ public void testMultipleSnapshotAndRollback() throws Exception {
         }
     }
 
-    public void testIndicesDeletedFromRepository() {
+    public void testIndicesDeletedFromRepository() throws Exception {
         Client client = client();
 
         logger.info("-->  creating repository");
         final String repoName = "test-repo";
-        createTestRepository(repoName);
+        createAndCheckTestRepository(repoName);
 
         createIndex("test-idx-1", "test-idx-2", "test-idx-3");
         ensureGreen();
@@ -219,12 +247,22 @@ public void testIndicesDeletedFromRepository() {
 
         logger.info("--> verify index folder deleted from blob container");
         RepositoriesService repositoriesSvc = internalCluster().getInstance(RepositoriesService.class, internalCluster().getMasterName());
+        ThreadPool threadPool = internalCluster().getInstance(ThreadPool.class, internalCluster().getMasterName());
         @SuppressWarnings("unchecked") BlobStoreRepository repository = (BlobStoreRepository) repositoriesSvc.repository(repoName);
-        BlobContainer indicesBlobContainer = repository.blobStore().blobContainer(repository.basePath().add("indices"));
-        RepositoryData repositoryData = repository.getRepositoryData();
-        for (IndexId indexId : repositoryData.getIndices().values()) {
+
+        final SetOnce indicesBlobContainer = new SetOnce<>();
+        final SetOnce repositoryData = new SetOnce<>();
+        final CountDownLatch latch = new CountDownLatch(1);
+        threadPool.executor(ThreadPool.Names.SNAPSHOT).execute(() -> {
+            indicesBlobContainer.set(repository.blobStore().blobContainer(repository.basePath().add("indices")));
+            repositoryData.set(repository.getRepositoryData());
+            latch.countDown();
+        });
+
+        latch.await();
+        for (IndexId indexId : repositoryData.get().getIndices().values()) {
             if (indexId.getName().equals("test-idx-3")) {
-                assertFalse(indicesBlobContainer.blobExists(indexId.getId())); // deleted index
+                assertFalse(indicesBlobContainer.get().blobExists(indexId.getId())); // deleted index
             }
         }
     }

From 315999de54526214877ab30d58d92488482d164c Mon Sep 17 00:00:00 2001
From: Michael Basnight 
Date: Fri, 13 Jul 2018 13:49:47 -0500
Subject: [PATCH 028/107] Watcher: Make settings reloadable (#31746)

This commit allows for rebuilding watcher secure secrets via the
reload_secure_settings API call. The commit also renames a method in the
Notification Service to make it a bit more readable.
---
 .../elasticsearch/xpack/watcher/Watcher.java  | 22 ++++++++++-
 .../notification/NotificationService.java     |  4 +-
 .../notification/email/EmailService.java      |  2 +-
 .../notification/hipchat/HipChatService.java  |  6 +--
 .../notification/jira/JiraService.java        |  2 +-
 .../pagerduty/PagerDutyService.java           |  2 +-
 .../notification/slack/SlackService.java      |  2 +-
 .../xpack/watcher/WatcherPluginTests.java     | 37 +++++++++++++++++++
 .../NotificationServiceTests.java             |  2 +-
 9 files changed, 68 insertions(+), 11 deletions(-)

diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java
index f7d51d328a797..78d1d37287f60 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/Watcher.java
@@ -38,6 +38,7 @@
 import org.elasticsearch.node.Node;
 import org.elasticsearch.plugins.ActionPlugin;
 import org.elasticsearch.plugins.Plugin;
+import org.elasticsearch.plugins.ReloadablePlugin;
 import org.elasticsearch.plugins.ScriptPlugin;
 import org.elasticsearch.rest.RestController;
 import org.elasticsearch.rest.RestHandler;
@@ -123,6 +124,7 @@
 import org.elasticsearch.xpack.watcher.input.simple.SimpleInputFactory;
 import org.elasticsearch.xpack.watcher.input.transform.TransformInput;
 import org.elasticsearch.xpack.watcher.input.transform.TransformInputFactory;
+import org.elasticsearch.xpack.watcher.notification.NotificationService;
 import org.elasticsearch.xpack.watcher.notification.email.Account;
 import org.elasticsearch.xpack.watcher.notification.email.EmailService;
 import org.elasticsearch.xpack.watcher.notification.email.HtmlSanitizer;
@@ -194,7 +196,7 @@
 
 import static java.util.Collections.emptyList;
 
-public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin {
+public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin, ReloadablePlugin {
 
     // This setting is only here for backward compatibility reasons as 6.x indices made use of it. It can be removed in 8.x.
     @Deprecated
@@ -221,6 +223,7 @@ public class Watcher extends Plugin implements ActionPlugin, ScriptPlugin {
     protected final boolean transportClient;
     protected final boolean enabled;
     protected final Environment env;
+    protected List reloadableServices = new ArrayList<>();
 
     public Watcher(final Settings settings) {
         this.settings = settings;
@@ -275,6 +278,12 @@ public Collection createComponents(Client client, ClusterService cluster
         SlackService slackService = new SlackService(settings, httpClient, clusterService.getClusterSettings());
         PagerDutyService pagerDutyService = new PagerDutyService(settings, httpClient, clusterService.getClusterSettings());
 
+        reloadableServices.add(emailService);
+        reloadableServices.add(hipChatService);
+        reloadableServices.add(jiraService);
+        reloadableServices.add(slackService);
+        reloadableServices.add(pagerDutyService);
+
         TextTemplateEngine templateEngine = new TextTemplateEngine(settings, scriptService);
         Map emailAttachmentParsers = new HashMap<>();
         emailAttachmentParsers.put(HttpEmailAttachementParser.TYPE, new HttpEmailAttachementParser(httpClient, httpTemplateParser,
@@ -613,4 +622,15 @@ public List getContexts() {
     public void close() throws IOException {
         IOUtils.closeWhileHandlingException(httpClient);
     }
+
+    /**
+     * Reloads all the reloadable services in watcher.
+     */
+    @Override
+    public void reload(Settings settings) {
+        if (enabled == false || transportClient) {
+            return;
+        }
+        reloadableServices.forEach(s -> s.reload(settings));
+    }
 }
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java
index 88399d3cb93d8..027825ab77871 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/NotificationService.java
@@ -31,7 +31,7 @@ public abstract class NotificationService extends AbstractComponent {
     public NotificationService(Settings settings, String type,
                                ClusterSettings clusterSettings, List> pluginSettings) {
         this(settings, type);
-        clusterSettings.addSettingsUpdateConsumer(this::setAccountSetting, pluginSettings);
+        clusterSettings.addSettingsUpdateConsumer(this::reload, pluginSettings);
     }
 
     // Used for testing only
@@ -40,7 +40,7 @@ public NotificationService(Settings settings, String type,
         this.type = type;
     }
 
-    protected synchronized void setAccountSetting(Settings settings) {
+    public synchronized void reload(Settings settings) {
         Tuple, Account> accounts = buildAccounts(settings, this::createAccount);
         this.accounts = Collections.unmodifiableMap(accounts.v1());
         this.defaultAccount = accounts.v2();
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java
index 15859a5e044c5..e45ed55cee3ac 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/email/EmailService.java
@@ -127,7 +127,7 @@ public EmailService(Settings settings, @Nullable CryptoService cryptoService, Cl
         clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_SEND_PARTIAL, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_SMTP_WAIT_ON_QUIT, (s, o) -> {}, (s, o) -> {});
         // do an initial load
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java
index ca970d5597ba1..2f21c2299a9a9 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/hipchat/HipChatService.java
@@ -79,13 +79,13 @@ public HipChatService(Settings settings, HttpClient httpClient, ClusterSettings
         clusterSettings.addAffixUpdateConsumer(SETTING_PORT, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_MESSAGE_DEFAULTS, (s, o) -> {}, (s, o) -> {});
 
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
-    protected synchronized void setAccountSetting(Settings settings) {
+    public synchronized void reload(Settings settings) {
         defaultServer = new HipChatServer(settings.getByPrefix("xpack.notification.hipchat."));
-        super.setAccountSetting(settings);
+        super.reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java
index 3ccff775051a4..49c05f36b2445 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/jira/JiraService.java
@@ -75,7 +75,7 @@ public JiraService(Settings settings, HttpClient httpClient, ClusterSettings clu
         clusterSettings.addAffixUpdateConsumer(SETTING_SECURE_PASSWORD, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {});
         // do an initial load
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java
index 21c2f1fefb1a9..32a6dcb91aa51 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/pagerduty/PagerDutyService.java
@@ -46,7 +46,7 @@ public PagerDutyService(Settings settings, HttpClient httpClient, ClusterSetting
         clusterSettings.addAffixUpdateConsumer(SETTING_SERVICE_API_KEY, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_SECURE_SERVICE_API_KEY, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {});
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java
index d648501a5f8d6..2a38e08d59903 100644
--- a/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java
+++ b/x-pack/plugin/watcher/src/main/java/org/elasticsearch/xpack/watcher/notification/slack/SlackService.java
@@ -46,7 +46,7 @@ public SlackService(Settings settings, HttpClient httpClient, ClusterSettings cl
         clusterSettings.addAffixUpdateConsumer(SETTING_URL, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_URL_SECURE, (s, o) -> {}, (s, o) -> {});
         clusterSettings.addAffixUpdateConsumer(SETTING_DEFAULTS, (s, o) -> {}, (s, o) -> {});
-        setAccountSetting(settings);
+        reload(settings);
     }
 
     @Override
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java
index abb981053e730..474f69c70edb3 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/WatcherPluginTests.java
@@ -15,6 +15,7 @@
 import org.elasticsearch.test.IndexSettingsModule;
 import org.elasticsearch.threadpool.ExecutorBuilder;
 import org.elasticsearch.xpack.core.watcher.watch.Watch;
+import org.elasticsearch.xpack.watcher.notification.NotificationService;
 
 import java.util.List;
 
@@ -22,6 +23,10 @@
 import static org.hamcrest.Matchers.containsString;
 import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.is;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.times;
+import static org.mockito.Mockito.verify;
+import static org.mockito.Mockito.verifyNoMoreInteractions;
 
 public class WatcherPluginTests extends ESTestCase {
 
@@ -97,4 +102,36 @@ public void testThreadPoolSize() {
                 .build();
         assertThat(Watcher.getWatcherThreadPoolSize(noDataNodeSettings), is(1));
     }
+
+    public void testReload() {
+        Settings settings = Settings.builder()
+            .put("xpack.watcher.enabled", true)
+            .put("path.home", createTempDir())
+            .build();
+        NotificationService mockService = mock(NotificationService.class);
+        Watcher watcher = new TestWatcher(settings, mockService);
+
+        watcher.reload(settings);
+        verify(mockService, times(1)).reload(settings);
+    }
+
+    public void testReloadDisabled() {
+        Settings settings = Settings.builder()
+            .put("xpack.watcher.enabled", false)
+            .put("path.home", createTempDir())
+            .build();
+        NotificationService mockService = mock(NotificationService.class);
+        Watcher watcher = new TestWatcher(settings, mockService);
+
+        watcher.reload(settings);
+        verifyNoMoreInteractions(mockService);
+    }
+
+    private class TestWatcher extends Watcher {
+
+        TestWatcher(Settings settings, NotificationService service) {
+            super(settings);
+            reloadableServices.add(service);
+        }
+    }
 }
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java
index 829337e9acb7a..cb86913678a96 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/notification/NotificationServiceTests.java
@@ -82,7 +82,7 @@ private static class TestNotificationService extends NotificationService
 
         TestNotificationService(Settings settings) {
             super(settings, "test");
-            setAccountSetting(settings);
+            reload(settings);
         }
 
         @Override

From b959534c1ca35c40a84ecf7496498842b5f2396f Mon Sep 17 00:00:00 2001
From: Paul Sanwald 
Date: Fri, 13 Jul 2018 14:59:11 -0400
Subject: [PATCH 029/107] fix typo

---
 .../aggregations/bucket/autodatehistogram-aggregation.asciidoc  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc
index 28cb65ce6cc48..3bd430d03d5ac 100644
--- a/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/autodatehistogram-aggregation.asciidoc
@@ -215,7 +215,7 @@ the specified time zone.
     "by_day": {
       "buckets": [
         {
-          "key_as_string": "2015-09-30T23:00:00.000-01:00",
+          "key_as_string": "2015-09-30T23:00:00.000-01:00", <1>
           "key": 1443657600000,
           "doc_count": 1
         },

From 94d33111ef6451f43476a28770fb83fc109f3f55 Mon Sep 17 00:00:00 2001
From: Jack Conradson 
Date: Fri, 13 Jul 2018 13:07:26 -0700
Subject: [PATCH 030/107] Clean Up Snapshot Create Rest API (#31779)

Make SnapshotInfo and CreateSnapshotResponse parsers lenient for backwards compatibility.  Remove extraneous fields from CreateSnapshotRequest toXContent.
---
 .../elasticsearch/client/SnapshotClient.java  |  4 +-
 .../org/elasticsearch/client/SnapshotIT.java  |  4 +-
 .../SnapshotClientDocumentationIT.java        | 10 +++-
 .../snapshot/create_snapshot.asciidoc         | 11 ++++
 .../create/CreateSnapshotRequest.java         |  4 +-
 .../create/CreateSnapshotResponse.java        | 50 ++++++-------------
 .../action/support/IndicesOptions.java        | 28 +++++------
 .../elasticsearch/snapshots/SnapshotInfo.java | 25 ----------
 .../create/CreateSnapshotRequestTests.java    |  4 +-
 .../create/CreateSnapshotResponseTests.java   |  6 +--
 .../action/support/IndicesOptionsTests.java   |  3 --
 11 files changed, 56 insertions(+), 93 deletions(-)

diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
index bc0bbe95488f4..f75f6cdef2405 100644
--- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
+++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java
@@ -176,7 +176,7 @@ public void verifyRepositoryAsync(VerifyRepositoryRequest verifyRepositoryReques
      * See  Snapshot and Restore
      * API on elastic.co
      */
-    public CreateSnapshotResponse createSnapshot(CreateSnapshotRequest createSnapshotRequest, RequestOptions options)
+    public CreateSnapshotResponse create(CreateSnapshotRequest createSnapshotRequest, RequestOptions options)
         throws IOException {
         return restHighLevelClient.performRequestAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
             CreateSnapshotResponse::fromXContent, emptySet());
@@ -188,7 +188,7 @@ public CreateSnapshotResponse createSnapshot(CreateSnapshotRequest createSnapsho
      * See  Snapshot and Restore
      * API on elastic.co
      */
-    public void createSnapshotAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options,
+    public void createAsync(CreateSnapshotRequest createSnapshotRequest, RequestOptions options,
                                     ActionListener listener) {
         restHighLevelClient.performRequestAsyncAndParseEntity(createSnapshotRequest, RequestConverters::createSnapshot, options,
             CreateSnapshotResponse::fromXContent, listener, emptySet());
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
index 45f9b5bbb0b0a..6d035f5db654a 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java
@@ -61,8 +61,8 @@ private PutRepositoryResponse createTestRepository(String repository, String typ
     private CreateSnapshotResponse createTestSnapshot(CreateSnapshotRequest createSnapshotRequest) throws IOException {
         // assumes the repository already exists
 
-        return execute(createSnapshotRequest, highLevelClient().snapshot()::createSnapshot,
-            highLevelClient().snapshot()::createSnapshotAsync);
+        return execute(createSnapshotRequest, highLevelClient().snapshot()::create,
+            highLevelClient().snapshot()::createAsync);
     }
 
     public void testCreateRepository() throws IOException {
diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
index 403ebc7d774a4..68a8113af6d38 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java
@@ -425,7 +425,7 @@ public void testSnapshotCreate() throws IOException {
         // end::create-snapshot-request-waitForCompletion
 
         // tag::create-snapshot-execute
-        CreateSnapshotResponse response = client.snapshot().createSnapshot(request, RequestOptions.DEFAULT);
+        CreateSnapshotResponse response = client.snapshot().create(request, RequestOptions.DEFAULT);
         // end::create-snapshot-execute
 
         // tag::create-snapshot-response
@@ -433,6 +433,12 @@ public void testSnapshotCreate() throws IOException {
         // end::create-snapshot-response
 
         assertEquals(RestStatus.OK, status);
+
+        // tag::create-snapshot-response-snapshot-info
+        SnapshotInfo snapshotInfo = response.getSnapshotInfo(); // <1>
+        // end::create-snapshot-response-snapshot-info
+
+        assertNotNull(snapshotInfo);
     }
 
     public void testSnapshotCreateAsync() throws InterruptedException {
@@ -460,7 +466,7 @@ public void onFailure(Exception exception) {
             listener = new LatchedActionListener<>(listener, latch);
 
             // tag::create-snapshot-execute-async
-            client.snapshot().createSnapshotAsync(request, RequestOptions.DEFAULT, listener); // <1>
+            client.snapshot().createAsync(request, RequestOptions.DEFAULT, listener); // <1>
             // end::create-snapshot-execute-async
 
             assertTrue(latch.await(30L, TimeUnit.SECONDS));
diff --git a/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc
index dbd31380a9b4b..971a6ee486711 100644
--- a/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc
+++ b/docs/java-rest/high-level/snapshot/create_snapshot.asciidoc
@@ -73,11 +73,22 @@ include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-r
 [[java-rest-high-snapshot-create-snapshot-sync]]
 ==== Synchronous Execution
 
+Execute a `CreateSnapshotRequest` synchronously to receive a `CreateSnapshotResponse`.
+
 ["source","java",subs="attributes,callouts,macros"]
 --------------------------------------------------
 include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-execute]
 --------------------------------------------------
 
+Retrieve the `SnapshotInfo` from a `CreateSnapshotResponse` when the snapshot is fully created.
+(The `waitForCompletion` parameter is `true`).
+
+["source","java",subs="attributes,callouts,macros"]
+--------------------------------------------------
+include-tagged::{doc-tests}/SnapshotClientDocumentationIT.java[create-snapshot-response-snapshot-info]
+--------------------------------------------------
+<1> The `SnapshotInfo` object.
+
 [[java-rest-high-snapshot-create-snapshot-async]]
 ==== Asynchronous Execution
 
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
index 2ff01ab01ed1f..15fbac35bffd2 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequest.java
@@ -42,9 +42,9 @@
 
 import static org.elasticsearch.action.ValidateActions.addValidationError;
 import static org.elasticsearch.common.Strings.EMPTY_ARRAY;
+import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
 import static org.elasticsearch.common.settings.Settings.readSettingsFromStream;
 import static org.elasticsearch.common.settings.Settings.writeSettingsToStream;
-import static org.elasticsearch.common.settings.Settings.Builder.EMPTY_SETTINGS;
 import static org.elasticsearch.common.xcontent.support.XContentMapValues.nodeBooleanValue;
 
 /**
@@ -433,8 +433,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
         if (indicesOptions != null) {
             indicesOptions.toXContent(builder, params);
         }
-        builder.field("wait_for_completion", waitForCompletion);
-        builder.field("master_node_timeout", masterNodeTimeout.toString());
         builder.endObject();
         return builder;
     }
diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java
index a2dc02c5c8299..d0a82e36a97da 100644
--- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java
+++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponse.java
@@ -21,14 +21,16 @@
 
 import org.elasticsearch.action.ActionResponse;
 import org.elasticsearch.common.Nullable;
+import org.elasticsearch.common.ParseField;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ObjectParser;
 import org.elasticsearch.common.xcontent.ToXContentObject;
 import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.common.xcontent.XContentParser;
-import org.elasticsearch.common.xcontent.XContentParser.Token;
 import org.elasticsearch.rest.RestStatus;
 import org.elasticsearch.snapshots.SnapshotInfo;
+import org.elasticsearch.snapshots.SnapshotInfo.SnapshotInfoBuilder;
 
 import java.io.IOException;
 import java.util.Objects;
@@ -38,6 +40,14 @@
  */
 public class CreateSnapshotResponse extends ActionResponse implements ToXContentObject {
 
+    private static final ObjectParser PARSER =
+        new ObjectParser<>(CreateSnapshotResponse.class.getName(), true, CreateSnapshotResponse::new);
+
+    static {
+        PARSER.declareObject(CreateSnapshotResponse::setSnapshotInfoFromBuilder,
+            SnapshotInfo.SNAPSHOT_INFO_PARSER, new ParseField("snapshot"));
+    }
+
     @Nullable
     private SnapshotInfo snapshotInfo;
 
@@ -48,8 +58,8 @@ public class CreateSnapshotResponse extends ActionResponse implements ToXContent
     CreateSnapshotResponse() {
     }
 
-    void setSnapshotInfo(SnapshotInfo snapshotInfo) {
-        this.snapshotInfo = snapshotInfo;
+    private void setSnapshotInfoFromBuilder(SnapshotInfoBuilder snapshotInfoBuilder) {
+        this.snapshotInfo = snapshotInfoBuilder.build();
     }
 
     /**
@@ -101,38 +111,8 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
         return builder;
     }
 
-    public static CreateSnapshotResponse fromXContent(XContentParser parser) throws IOException {
-        CreateSnapshotResponse createSnapshotResponse = new CreateSnapshotResponse();
-
-        parser.nextToken(); // move to '{'
-
-        if (parser.currentToken() != Token.START_OBJECT) {
-            throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['{']");
-        }
-
-        parser.nextToken(); // move to 'snapshot' || 'accepted'
-
-        if ("snapshot".equals(parser.currentName())) {
-            createSnapshotResponse.snapshotInfo = SnapshotInfo.fromXContent(parser);
-        } else if ("accepted".equals(parser.currentName())) {
-            parser.nextToken(); // move to 'accepted' field value
-
-            if (parser.booleanValue()) {
-                // ensure accepted is a boolean value
-            }
-
-            parser.nextToken(); // move past 'true'/'false'
-        } else {
-            throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "] expected ['snapshot', 'accepted']");
-        }
-
-        if (parser.currentToken() != Token.END_OBJECT) {
-            throw new IllegalArgumentException("unexpected token [" + parser.currentToken() + "], expected ['}']");
-        }
-
-        parser.nextToken(); // move past '}'
-
-        return createSnapshotResponse;
+    public static CreateSnapshotResponse fromXContent(XContentParser parser) {
+        return PARSER.apply(parser, null);
     }
 
     @Override
diff --git a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
index 19572a6c212a2..f2cf0b5444d52 100644
--- a/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
+++ b/server/src/main/java/org/elasticsearch/action/support/IndicesOptions.java
@@ -22,6 +22,7 @@
 import org.elasticsearch.Version;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.common.io.stream.StreamOutput;
+import org.elasticsearch.common.xcontent.ToXContent;
 import org.elasticsearch.common.xcontent.ToXContentFragment;
 import org.elasticsearch.common.xcontent.XContentBuilder;
 import org.elasticsearch.rest.RestRequest;
@@ -316,21 +317,6 @@ public static IndicesOptions fromMap(Map map, IndicesOptions def
                 defaultSettings);
     }
 
-    @Override
-    public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException {
-        builder.startArray("expand_wildcards");
-        for (WildcardStates expandWildcard : expandWildcards) {
-            builder.value(expandWildcard.toString().toLowerCase(Locale.ROOT));
-        }
-        builder.endArray();
-        builder.field("ignore_unavailable", ignoreUnavailable());
-        builder.field("allow_no_indices", allowNoIndices());
-        builder.field("forbid_aliases_to_multiple_indices", allowAliasesToMultipleIndices() == false);
-        builder.field("forbid_closed_indices", forbidClosedIndices());
-        builder.field("ignore_aliases", ignoreAliases());
-        return builder;
-    }
-
     /**
      * Returns true if the name represents a valid name for one of the indices option
      * false otherwise
@@ -360,6 +346,18 @@ public static IndicesOptions fromParameters(Object wildcardsString, Object ignor
         );
     }
 
+    @Override
+    public XContentBuilder toXContent(XContentBuilder builder, ToXContent.Params params) throws IOException {
+        builder.startArray("expand_wildcards");
+        for (WildcardStates expandWildcard : expandWildcards) {
+            builder.value(expandWildcard.toString().toLowerCase(Locale.ROOT));
+        }
+        builder.endArray();
+        builder.field("ignore_unavailable", ignoreUnavailable());
+        builder.field("allow_no_indices", allowNoIndices());
+        return builder;
+    }
+
     /**
      * @return indices options that requires every specified index to exist, expands wildcards only to open indices and
      *         allows that no indices are resolved from wildcard expressions (not returning an error).
diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
index cf2f66a750cd7..67ddabc37fa30 100644
--- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
+++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java
@@ -140,22 +140,6 @@ private void setShardFailures(List shardFailures) {
             this.shardFailures = shardFailures;
         }
 
-        private void ignoreVersion(String version) {
-            // ignore extra field
-        }
-
-        private void ignoreStartTime(String startTime) {
-            // ignore extra field
-        }
-
-        private void ignoreEndTime(String endTime) {
-            // ignore extra field
-        }
-
-        private void ignoreDurationInMillis(long durationInMillis) {
-            // ignore extra field
-        }
-
         public SnapshotInfo build() {
             SnapshotId snapshotId = new SnapshotId(snapshotName, snapshotUUID);
 
@@ -197,10 +181,6 @@ private void setSuccessfulShards(int successfulShards) {
         int getSuccessfulShards() {
             return successfulShards;
         }
-
-        private void ignoreFailedShards(int failedShards) {
-            // ignore extra field
-        }
     }
 
     public static final ObjectParser SNAPSHOT_INFO_PARSER =
@@ -222,14 +202,9 @@ private void ignoreFailedShards(int failedShards) {
         SNAPSHOT_INFO_PARSER.declareInt(SnapshotInfoBuilder::setVersion, new ParseField(VERSION_ID));
         SNAPSHOT_INFO_PARSER.declareObjectArray(SnapshotInfoBuilder::setShardFailures, SnapshotShardFailure.SNAPSHOT_SHARD_FAILURE_PARSER,
             new ParseField(FAILURES));
-        SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreVersion, new ParseField(VERSION));
-        SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreStartTime, new ParseField(START_TIME));
-        SNAPSHOT_INFO_PARSER.declareString(SnapshotInfoBuilder::ignoreEndTime, new ParseField(END_TIME));
-        SNAPSHOT_INFO_PARSER.declareLong(SnapshotInfoBuilder::ignoreDurationInMillis, new ParseField(DURATION_IN_MILLIS));
 
         SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setTotalShards, new ParseField(TOTAL));
         SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::setSuccessfulShards, new ParseField(SUCCESSFUL));
-        SHARD_STATS_PARSER.declareInt(ShardStatsBuilder::ignoreFailedShards, new ParseField(FAILED));
     }
 
     private final SnapshotId snapshotId;
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java
index 1bde8ab572b72..0b598be6849cb 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotRequestTests.java
@@ -102,8 +102,8 @@ public void testToXContent() throws IOException {
                 NamedXContentRegistry.EMPTY, null, BytesReference.bytes(builder).streamInput());
         Map map = parser.mapOrdered();
         CreateSnapshotRequest processed = new CreateSnapshotRequest((String)map.get("repository"), (String)map.get("snapshot"));
-        processed.waitForCompletion((boolean)map.getOrDefault("wait_for_completion", false));
-        processed.masterNodeTimeout((String)map.getOrDefault("master_node_timeout", "30s"));
+        processed.waitForCompletion(original.waitForCompletion());
+        processed.masterNodeTimeout(original.masterNodeTimeout());
         processed.source(map);
 
         assertEquals(original, processed);
diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java
index bbfc9755bf215..bbb11fc6feef0 100644
--- a/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java
+++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/snapshots/create/CreateSnapshotResponseTests.java
@@ -40,7 +40,7 @@ protected CreateSnapshotResponse doParseInstance(XContentParser parser) throws I
 
     @Override
     protected boolean supportsUnknownFields() {
-        return false;
+        return true;
     }
 
     @Override
@@ -63,9 +63,7 @@ protected CreateSnapshotResponse createTestInstance() {
 
         boolean globalState = randomBoolean();
 
-        CreateSnapshotResponse response = new CreateSnapshotResponse();
-        response.setSnapshotInfo(
+        return new CreateSnapshotResponse(
             new SnapshotInfo(snapshotId, indices, startTime, reason, endTime, totalShards, shardFailures, globalState));
-        return response;
     }
 }
diff --git a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java
index 8e94764cc7acc..f564066b4e3df 100644
--- a/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java
+++ b/server/src/test/java/org/elasticsearch/action/support/IndicesOptionsTests.java
@@ -320,8 +320,5 @@ public void testToXContent() throws IOException {
         }
         assertEquals(map.get("ignore_unavailable"), options.contains(Option.IGNORE_UNAVAILABLE));
         assertEquals(map.get("allow_no_indices"), options.contains(Option.ALLOW_NO_INDICES));
-        assertEquals(map.get("forbid_aliases_to_multiple_indices"), options.contains(Option.FORBID_ALIASES_TO_MULTIPLE_INDICES));
-        assertEquals(map.get("forbid_closed_indices"), options.contains(Option.FORBID_CLOSED_INDICES));
-        assertEquals(map.get("ignore_aliases"), options.contains(Option.IGNORE_ALIASES));
     }
 }

From 2945c3a3ed4e4bfb5cd23e4c959ede19997d2164 Mon Sep 17 00:00:00 2001
From: Zachary Tong 
Date: Fri, 13 Jul 2018 16:10:39 -0400
Subject: [PATCH 031/107] [Rollup] Histo group config should support
 scaled_floats (#32048)

Metric config already whitelist scaled_floats, but it wasn't added to
the histo group config.  This centralizes the mapping types map
so that both metrics and histo (and any future configs) use the same
map.

Fixes #32035
---
 .../xpack/core/rollup/RollupField.java              | 13 +++++++++++++
 .../xpack/core/rollup/job/HistoGroupConfig.java     |  8 +-------
 .../xpack/core/rollup/job/MetricConfig.java         | 13 +------------
 .../job/HistoGroupConfigSerializingTests.java       |  4 +++-
 4 files changed, 18 insertions(+), 20 deletions(-)

diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java
index 1e2e011276dc3..134ce6c87b3f7 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/RollupField.java
@@ -6,6 +6,7 @@
 package org.elasticsearch.xpack.core.rollup;
 
 import org.elasticsearch.common.ParseField;
+import org.elasticsearch.index.mapper.NumberFieldMapper;
 import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
 import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
 import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
@@ -15,6 +16,8 @@
 
 import java.util.Arrays;
 import java.util.List;
+import java.util.stream.Collectors;
+import java.util.stream.Stream;
 
 public class RollupField {
     // Fields that are used both in core Rollup actions and Rollup plugin
@@ -34,6 +37,16 @@ public class RollupField {
     public static final List SUPPORTED_METRICS = Arrays.asList(MaxAggregationBuilder.NAME, MinAggregationBuilder.NAME,
             SumAggregationBuilder.NAME, AvgAggregationBuilder.NAME, ValueCountAggregationBuilder.NAME);
 
+    // these mapper types are used by the configs (metric, histo, etc) to validate field mappings
+    public static final List NUMERIC_FIELD_MAPPER_TYPES;
+    static {
+        List types = Stream.of(NumberFieldMapper.NumberType.values())
+            .map(NumberFieldMapper.NumberType::typeName)
+            .collect(Collectors.toList());
+        types.add("scaled_float"); // have to add manually since scaled_float is in a module
+        NUMERIC_FIELD_MAPPER_TYPES = types;
+    }
+
     /**
      * Format to the appropriate Rollup field name convention
      *
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java
index 2b1511077d955..87de9e165345e 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfig.java
@@ -15,7 +15,6 @@
 import org.elasticsearch.common.xcontent.ObjectParser;
 import org.elasticsearch.common.xcontent.ToXContentFragment;
 import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.mapper.NumberFieldMapper;
 import org.elasticsearch.search.aggregations.bucket.composite.CompositeValuesSourceBuilder;
 import org.elasticsearch.search.aggregations.bucket.composite.HistogramValuesSourceBuilder;
 import org.elasticsearch.search.aggregations.bucket.histogram.HistogramAggregationBuilder;
@@ -30,7 +29,6 @@
 import java.util.Objects;
 import java.util.Set;
 import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 /**
  * The configuration object for the histograms in the rollup config
@@ -51,10 +49,6 @@ public class HistoGroupConfig implements Writeable, ToXContentFragment {
 
     private static final ParseField INTERVAL = new ParseField("interval");
     private static final ParseField FIELDS = new ParseField("fields");
-    private static final List MAPPER_TYPES = Stream.of(NumberFieldMapper.NumberType.values())
-            .map(NumberFieldMapper.NumberType::typeName)
-            .collect(Collectors.toList());
-
 
     private final long interval;
     private final String[] fields;
@@ -126,7 +120,7 @@ public void validateMappings(Map> fieldCa
             Map fieldCaps = fieldCapsResponse.get(field);
             if (fieldCaps != null && fieldCaps.isEmpty() == false) {
                 fieldCaps.forEach((key, value) -> {
-                    if (MAPPER_TYPES.contains(key)) {
+                    if (RollupField.NUMERIC_FIELD_MAPPER_TYPES.contains(key)) {
                         if (value.isAggregatable() == false) {
                             validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " +
                                     "but is not.");
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java
index 67b83646c4237..006d8c35c324d 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/MetricConfig.java
@@ -15,7 +15,6 @@
 import org.elasticsearch.common.xcontent.ObjectParser;
 import org.elasticsearch.common.xcontent.ToXContentFragment;
 import org.elasticsearch.common.xcontent.XContentBuilder;
-import org.elasticsearch.index.mapper.NumberFieldMapper;
 import org.elasticsearch.search.aggregations.metrics.avg.AvgAggregationBuilder;
 import org.elasticsearch.search.aggregations.metrics.max.MaxAggregationBuilder;
 import org.elasticsearch.search.aggregations.metrics.min.MinAggregationBuilder;
@@ -32,7 +31,6 @@
 import java.util.Map;
 import java.util.Objects;
 import java.util.stream.Collectors;
-import java.util.stream.Stream;
 
 /**
  * The configuration object for the metrics portion of a rollup job config
@@ -66,15 +64,6 @@ public class MetricConfig implements Writeable, ToXContentFragment {
     private static final ParseField AVG = new ParseField("avg");
     private static final ParseField VALUE_COUNT = new ParseField("value_count");
 
-    private static final List MAPPER_TYPES;
-    static {
-        List types = Stream.of(NumberFieldMapper.NumberType.values())
-                .map(NumberFieldMapper.NumberType::typeName)
-                .collect(Collectors.toList());
-        types.add("scaled_float"); // have to add manually since scaled_float is in a module
-        MAPPER_TYPES = types;
-    }
-
     public static final ObjectParser PARSER = new ObjectParser<>(NAME, MetricConfig.Builder::new);
 
     static {
@@ -153,7 +142,7 @@ public void validateMappings(Map> fieldCa
         Map fieldCaps = fieldCapsResponse.get(field);
         if (fieldCaps != null && fieldCaps.isEmpty() == false) {
             fieldCaps.forEach((key, value) -> {
-                if (MAPPER_TYPES.contains(key)) {
+                if (RollupField.NUMERIC_FIELD_MAPPER_TYPES.contains(key)) {
                     if (value.isAggregatable() == false) {
                         validationException.addValidationError("The field [" + field + "] must be aggregatable across all indices, " +
                                 "but is not.");
diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfigSerializingTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfigSerializingTests.java
index 18a64bc2adfd6..92e7d8b9643e6 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfigSerializingTests.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/job/HistoGroupConfigSerializingTests.java
@@ -11,6 +11,7 @@
 import org.elasticsearch.common.xcontent.XContentParser;
 import org.elasticsearch.test.AbstractSerializingTestCase;
 import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers;
+import org.elasticsearch.xpack.core.rollup.RollupField;
 
 import java.io.IOException;
 import java.util.Collections;
@@ -111,7 +112,8 @@ public void testValidateMatchingField() throws IOException {
         // Have to mock fieldcaps because the ctor's aren't public...
         FieldCapabilities fieldCaps = mock(FieldCapabilities.class);
         when(fieldCaps.isAggregatable()).thenReturn(true);
-        responseMap.put("my_field", Collections.singletonMap("long", fieldCaps));
+        String mappingType = randomFrom(RollupField.NUMERIC_FIELD_MAPPER_TYPES);
+        responseMap.put("my_field", Collections.singletonMap(mappingType, fieldCaps));
 
         HistoGroupConfig config = new HistoGroupConfig.Builder()
                 .setFields(Collections.singletonList("my_field"))

From e48de6a32ef0cd9412568b545570088f1d971678 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Christoph=20B=C3=BCscher?= 
Date: Fri, 13 Jul 2018 23:08:18 +0200
Subject: [PATCH 032/107] Mute failing tests

Relates to #32055
---
 .../test/search.inner_hits/10_basic.yml         |  9 +++++----
 .../test/search/110_field_collapsing.yml        | 17 ++++++++++++++---
 2 files changed, 19 insertions(+), 7 deletions(-)

diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
index 884a50507c7b7..8f162ae2eb238 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
@@ -13,8 +13,9 @@ setup:
 ---
 "Nested inner hits":
     - skip:
-        version: " - 6.1.99"
-        reason: "<= 6.1 nodes don't always include index or id in nested inner hits"
+        version: "all"
+        reason: "https://github.com/elastic/elasticsearch/issues/32055"
+
     - do:
         index:
           index: test
@@ -45,8 +46,8 @@ setup:
 "Nested doc version and seqIDs":
 
     - skip:
-        version: " - 6.3.99"
-        reason:  "object notation for docvalue_fields was introduced in 6.4"
+        version: "all"
+        reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
     - do:
         index:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
index 2dfd868d66b06..39597b1fbbeea 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
@@ -107,6 +107,9 @@ setup:
 
 ---
 "field collapsing and inner_hits":
+  - skip:
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -146,6 +149,9 @@ setup:
 
 ---
 "field collapsing, inner_hits and maxConcurrentGroupRequests":
+  - skip:
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -226,6 +232,9 @@ setup:
 
 ---
 "no hits and inner_hits":
+  - skip:
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -240,6 +249,9 @@ setup:
 
 ---
 "field collapsing and multiple inner_hits":
+  - skip:
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -292,10 +304,9 @@ setup:
 
 ---
 "field collapsing, inner_hits and version":
-
   - skip:
-      version: " - 6.1.0"
-      reason:  "bug fixed in 6.1.1"
+      version: "all"
+      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:

From ec470f29c1573be3265218db23da0dfeb4531a7b Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Fri, 13 Jul 2018 23:26:10 +0200
Subject: [PATCH 033/107] Replace Ingest ScriptContext with Custom Interface
 (#32003)

* Replace Ingest ScriptContext with Custom Interface
* Make org.elasticsearch.ingest.common.ScriptProcessorTests#testScripting more precise
* Don't mock script factory in ScriptProcessorTests
* Adjust mock script plugin in IT for new API
---
 .../ingest/common/ScriptProcessor.java        | 10 ++--
 .../ingest/common/IngestRestartIT.java        |  4 +-
 .../ingest/common/ScriptProcessorTests.java   | 38 ++++++++------
 .../script/ExecutableScript.java              |  1 -
 .../elasticsearch/script/IngestScript.java    | 52 +++++++++++++++++++
 .../elasticsearch/script/ScriptModule.java    |  2 +-
 .../script/ScriptServiceTests.java            |  8 +--
 .../script/MockScriptEngine.java              |  8 +++
 8 files changed, 91 insertions(+), 32 deletions(-)
 create mode 100644 server/src/main/java/org/elasticsearch/script/IngestScript.java

diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
index ddb284b9c890d..74c68fd5c2638 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
@@ -31,7 +31,7 @@
 import org.elasticsearch.ingest.AbstractProcessor;
 import org.elasticsearch.ingest.IngestDocument;
 import org.elasticsearch.ingest.Processor;
-import org.elasticsearch.script.ExecutableScript;
+import org.elasticsearch.script.IngestScript;
 import org.elasticsearch.script.Script;
 import org.elasticsearch.script.ScriptException;
 import org.elasticsearch.script.ScriptService;
@@ -73,10 +73,8 @@ public final class ScriptProcessor extends AbstractProcessor {
      */
     @Override
     public void execute(IngestDocument document) {
-        ExecutableScript.Factory factory = scriptService.compile(script, ExecutableScript.INGEST_CONTEXT);
-        ExecutableScript executableScript = factory.newInstance(script.getParams());
-        executableScript.setNextVar("ctx",  document.getSourceAndMetadata());
-        executableScript.run();
+        IngestScript.Factory factory = scriptService.compile(script, IngestScript.CONTEXT);
+        factory.newInstance(script.getParams()).execute(document.getSourceAndMetadata());
     }
 
     @Override
@@ -108,7 +106,7 @@ public ScriptProcessor create(Map registry, String pr
 
                 // verify script is able to be compiled before successfully creating processor.
                 try {
-                    scriptService.compile(script, ExecutableScript.INGEST_CONTEXT);
+                    scriptService.compile(script, IngestScript.CONTEXT);
                 } catch (ScriptException e) {
                     throw newConfigurationException(TYPE, processorTag, null, e);
                 }
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java
index 69236144007bc..8c3976d2b175c 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/IngestRestartIT.java
@@ -58,9 +58,7 @@ protected boolean ignoreExternalCluster() {
     public static class CustomScriptPlugin extends MockScriptPlugin {
         @Override
         protected Map, Object>> pluginScripts() {
-            return Collections.singletonMap("my_script", script -> {
-                @SuppressWarnings("unchecked")
-                Map ctx = (Map) script.get("ctx");
+            return Collections.singletonMap("my_script", ctx -> {
                 ctx.put("z", 0);
                 return null;
             });
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java
index 1004a41bcc592..72bc337e9c9f7 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/ScriptProcessorTests.java
@@ -19,22 +19,22 @@
 
 package org.elasticsearch.ingest.common;
 
+import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
 
+import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.ingest.IngestDocument;
 import org.elasticsearch.ingest.RandomDocumentPicks;
-import org.elasticsearch.script.ExecutableScript;
+import org.elasticsearch.script.MockScriptEngine;
 import org.elasticsearch.script.Script;
+import org.elasticsearch.script.ScriptModule;
 import org.elasticsearch.script.ScriptService;
+import org.elasticsearch.script.ScriptType;
 import org.elasticsearch.test.ESTestCase;
 
 import static org.hamcrest.Matchers.hasKey;
 import static org.hamcrest.core.Is.is;
-import static org.mockito.Mockito.any;
-import static org.mockito.Mockito.doAnswer;
-import static org.mockito.Mockito.mock;
-import static org.mockito.Mockito.when;
 
 public class ScriptProcessorTests extends ESTestCase {
 
@@ -42,24 +42,28 @@ public void testScripting() throws Exception {
         int randomBytesIn = randomInt();
         int randomBytesOut = randomInt();
         int randomBytesTotal = randomBytesIn + randomBytesOut;
-
-        ScriptService scriptService = mock(ScriptService.class);
-        Script script = mockScript("_script");
-        ExecutableScript.Factory factory = mock(ExecutableScript.Factory.class);
-        ExecutableScript executableScript = mock(ExecutableScript.class);
-        when(scriptService.compile(script, ExecutableScript.INGEST_CONTEXT)).thenReturn(factory);
-        when(factory.newInstance(any())).thenReturn(executableScript);
+        String scriptName = "script";
+        ScriptService scriptService = new ScriptService(Settings.builder().build(),
+            Collections.singletonMap(
+                Script.DEFAULT_SCRIPT_LANG, new MockScriptEngine(
+                    Script.DEFAULT_SCRIPT_LANG,
+                    Collections.singletonMap(
+                        scriptName, ctx -> {
+                            ctx.put("bytes_total", randomBytesTotal);
+                            return null;
+                        }
+                    )
+                )
+            ),
+            new HashMap<>(ScriptModule.CORE_CONTEXTS)
+        );
+        Script script = new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, scriptName, Collections.emptyMap());
 
         Map document = new HashMap<>();
         document.put("bytes_in", randomInt());
         document.put("bytes_out", randomInt());
         IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random(), document);
 
-        doAnswer(invocationOnMock ->  {
-            ingestDocument.setFieldValue("bytes_total", randomBytesTotal);
-            return null;
-        }).when(executableScript).run();
-
         ScriptProcessor processor = new ScriptProcessor(randomAlphaOfLength(10), script, scriptService);
 
         processor.execute(ingestDocument);
diff --git a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java
index e87b7cdf3890a..2f7a01c37980d 100644
--- a/server/src/main/java/org/elasticsearch/script/ExecutableScript.java
+++ b/server/src/main/java/org/elasticsearch/script/ExecutableScript.java
@@ -50,5 +50,4 @@ interface Factory {
     // TODO: remove these once each has its own script interface
     ScriptContext AGGS_CONTEXT = new ScriptContext<>("aggs_executable", Factory.class);
     ScriptContext UPDATE_CONTEXT = new ScriptContext<>("update", Factory.class);
-    ScriptContext INGEST_CONTEXT = new ScriptContext<>("ingest", Factory.class);
 }
diff --git a/server/src/main/java/org/elasticsearch/script/IngestScript.java b/server/src/main/java/org/elasticsearch/script/IngestScript.java
new file mode 100644
index 0000000000000..f357394ed31f0
--- /dev/null
+++ b/server/src/main/java/org/elasticsearch/script/IngestScript.java
@@ -0,0 +1,52 @@
+
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.script;
+
+import java.util.Map;
+
+/**
+ * A script used by the Ingest Script Processor.
+ */
+public abstract class IngestScript {
+
+    public static final String[] PARAMETERS = { "ctx" };
+
+    /** The context used to compile {@link IngestScript} factories. */
+    public static final ScriptContext CONTEXT = new ScriptContext<>("ingest", Factory.class);
+
+    /** The generic runtime parameters for the script. */
+    private final Map params;
+
+    public IngestScript(Map params) {
+        this.params = params;
+    }
+
+    /** Return the parameters for this script. */
+    public Map getParams() {
+        return params;
+    }
+
+    public abstract void execute(Map ctx);
+
+    public interface Factory {
+        IngestScript newInstance(Map params);
+    }
+}
diff --git a/server/src/main/java/org/elasticsearch/script/ScriptModule.java b/server/src/main/java/org/elasticsearch/script/ScriptModule.java
index 042953117c5a5..bf4bd9c57cef0 100644
--- a/server/src/main/java/org/elasticsearch/script/ScriptModule.java
+++ b/server/src/main/java/org/elasticsearch/script/ScriptModule.java
@@ -51,7 +51,7 @@ public class ScriptModule {
             ExecutableScript.CONTEXT,
             ExecutableScript.AGGS_CONTEXT,
             ExecutableScript.UPDATE_CONTEXT,
-            ExecutableScript.INGEST_CONTEXT,
+            IngestScript.CONTEXT,
             FilterScript.CONTEXT,
             SimilarityScript.CONTEXT,
             SimilarityWeightScript.CONTEXT,
diff --git a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java
index b35fcbcc03c17..585f860165160 100644
--- a/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/script/ScriptServiceTests.java
@@ -168,7 +168,7 @@ public void testAllowAllScriptContextSettings() throws IOException {
         assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.CONTEXT);
         assertCompileAccepted("painless", "script", ScriptType.INLINE, SearchScript.AGGS_CONTEXT);
         assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.UPDATE_CONTEXT);
-        assertCompileAccepted("painless", "script", ScriptType.INLINE, ExecutableScript.INGEST_CONTEXT);
+        assertCompileAccepted("painless", "script", ScriptType.INLINE, IngestScript.CONTEXT);
     }
 
     public void testAllowSomeScriptTypeSettings() throws IOException {
@@ -209,13 +209,13 @@ public void testAllowNoScriptContextSettings() throws IOException {
     }
 
     public void testCompileNonRegisteredContext() throws IOException {
-        contexts.remove(ExecutableScript.INGEST_CONTEXT.name);
+        contexts.remove(IngestScript.CONTEXT.name);
         buildScriptService(Settings.EMPTY);
 
         String type = scriptEngine.getType();
         IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
-            scriptService.compile(new Script(ScriptType.INLINE, type, "test", Collections.emptyMap()), ExecutableScript.INGEST_CONTEXT));
-        assertThat(e.getMessage(), containsString("script context [" + ExecutableScript.INGEST_CONTEXT.name + "] not supported"));
+            scriptService.compile(new Script(ScriptType.INLINE, type, "test", Collections.emptyMap()), IngestScript.CONTEXT));
+        assertThat(e.getMessage(), containsString("script context [" + IngestScript.CONTEXT.name + "] not supported"));
     }
 
     public void testCompileCountedInCompilationStats() throws IOException {
diff --git a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
index e608bd13d2559..8e40e4bcf1468 100644
--- a/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
+++ b/test/framework/src/main/java/org/elasticsearch/script/MockScriptEngine.java
@@ -88,6 +88,14 @@ public  T compile(String name, String source, ScriptContext context, Map new IngestScript(parameters) {
+                @Override
+                public void execute(Map ctx) {
+                    script.apply(ctx);
+                }
+            };
+            return context.factoryClazz.cast(factory);
         } else if (context.instanceClazz.equals(TemplateScript.class)) {
             TemplateScript.Factory factory = vars -> {
                 // TODO: need a better way to implement all these new contexts

From f446f912c26d17e9fd4636d8da0b40b94b74ce80 Mon Sep 17 00:00:00 2001
From: Tim Brooks 
Date: Fri, 13 Jul 2018 16:41:02 -0600
Subject: [PATCH 034/107] Add nio http transport to security plugin (#32018)

This is related to #27260. It adds the SecurityNioHttpServerTransport
to the security plugin. It randomly uses the nio http transport in
security integration tests.
---
 .../transport/netty4/Netty4TcpChannel.java    |   2 +-
 .../http/nio/HttpReadWriteHandler.java        |   4 +-
 .../http/nio/NioHttpChannel.java              |   2 +-
 .../http/nio/NioHttpServerChannel.java        |   3 +-
 .../http/nio/NioHttpServerTransport.java      |  27 ++-
 .../xpack/security/Security.java              |  21 +-
 .../security/rest/SecurityRestFilter.java     |  10 +-
 .../security/transport/SSLEngineUtils.java    |  93 ++++++++
 .../SecurityHttpExceptionHandler.java         |  64 ++++++
 .../transport/SecurityHttpSettings.java       |  22 ++
 .../transport/ServerTransportFilter.java      |  46 +---
 .../SecurityNetty4HttpServerTransport.java    |  47 +---
 .../security/transport/nio/NioIPFilter.java   |  32 +++
 .../transport/nio/SSLChannelContext.java      |   5 +
 .../security/transport/nio/SSLDriver.java     |   4 +
 .../nio/SecurityNioHttpServerTransport.java   | 132 +++++++++++
 .../transport/nio/SecurityNioTransport.java   |  19 +-
 .../test/SecurityIntegTestCase.java           |   1 +
 .../test/SecuritySettingsSource.java          |   1 +
 .../transport/SecurityHttpSettingsTests.java  |  44 ++++
 ...ecurityNetty4HttpServerTransportTests.java |  29 ---
 .../transport/nio/NioIPFilterTests.java       |  91 ++++++++
 .../SecurityNioHttpServerTransportTests.java  | 207 ++++++++++++++++++
 23 files changed, 750 insertions(+), 156 deletions(-)
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettings.java
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilter.java
 create mode 100644 x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
 create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettingsTests.java
 create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java
 create mode 100644 x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java

diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java
index 78a1425500072..51821c73329ca 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/transport/netty4/Netty4TcpChannel.java
@@ -112,7 +112,7 @@ public void sendMessage(BytesReference reference, ActionListener listener)
         }
     }
 
-    public Channel getLowLevelChannel() {
+    public Channel getNettyChannel() {
         return channel;
     }
 
diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java
index ad81719ebcbb9..3dcd59cf8e28c 100644
--- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java
+++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/HttpReadWriteHandler.java
@@ -51,8 +51,8 @@ public class HttpReadWriteHandler implements ReadWriteHandler {
     private final NioHttpChannel nioHttpChannel;
     private final NioHttpServerTransport transport;
 
-    HttpReadWriteHandler(NioHttpChannel nioHttpChannel, NioHttpServerTransport transport, HttpHandlingSettings settings,
-                         NioCorsConfig corsConfig) {
+    public HttpReadWriteHandler(NioHttpChannel nioHttpChannel, NioHttpServerTransport transport, HttpHandlingSettings settings,
+                                NioCorsConfig corsConfig) {
         this.nioHttpChannel = nioHttpChannel;
         this.transport = transport;
 
diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java
index 0a797a5687ec7..1a4c5f14c91da 100644
--- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java
+++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpChannel.java
@@ -28,7 +28,7 @@
 
 public class NioHttpChannel extends NioSocketChannel implements HttpChannel {
 
-    NioHttpChannel(SocketChannel socketChannel) {
+    public NioHttpChannel(SocketChannel socketChannel) {
         super(socketChannel);
     }
 
diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java
index 2674d38dc490e..d72376da5c03b 100644
--- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java
+++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerChannel.java
@@ -23,12 +23,11 @@
 import org.elasticsearch.http.HttpServerChannel;
 import org.elasticsearch.nio.NioServerSocketChannel;
 
-import java.io.IOException;
 import java.nio.channels.ServerSocketChannel;
 
 public class NioHttpServerChannel extends NioServerSocketChannel implements HttpServerChannel {
 
-    NioHttpServerChannel(ServerSocketChannel serverSocketChannel) throws IOException {
+    public NioHttpServerChannel(ServerSocketChannel serverSocketChannel) {
         super(serverSocketChannel);
     }
 
diff --git a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java
index b80778e964293..9c672c1caf15a 100644
--- a/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java
+++ b/plugins/transport-nio/src/main/java/org/elasticsearch/http/nio/NioHttpServerTransport.java
@@ -35,7 +35,6 @@
 import org.elasticsearch.http.AbstractHttpServerTransport;
 import org.elasticsearch.http.HttpChannel;
 import org.elasticsearch.http.HttpServerChannel;
-import org.elasticsearch.http.HttpServerTransport;
 import org.elasticsearch.http.nio.cors.NioCorsConfig;
 import org.elasticsearch.http.nio.cors.NioCorsConfigBuilder;
 import org.elasticsearch.nio.BytesChannelContext;
@@ -87,21 +86,21 @@ public class NioHttpServerTransport extends AbstractHttpServerTransport {
             (s) -> Integer.toString(EsExecutors.numberOfProcessors(s) * 2),
             (s) -> Setting.parseInt(s, 1, "http.nio.worker_count"), Setting.Property.NodeScope);
 
-    private final PageCacheRecycler pageCacheRecycler;
+    protected final PageCacheRecycler pageCacheRecycler;
+    protected final NioCorsConfig corsConfig;
 
-    private final boolean tcpNoDelay;
-    private final boolean tcpKeepAlive;
-    private final boolean reuseAddress;
-    private final int tcpSendBufferSize;
-    private final int tcpReceiveBufferSize;
+    protected final boolean tcpNoDelay;
+    protected final boolean tcpKeepAlive;
+    protected final boolean reuseAddress;
+    protected final int tcpSendBufferSize;
+    protected final int tcpReceiveBufferSize;
 
     private NioGroup nioGroup;
-    private HttpChannelFactory channelFactory;
-    private final NioCorsConfig corsConfig;
+    private ChannelFactory channelFactory;
 
     public NioHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays,
                                   PageCacheRecycler pageCacheRecycler, ThreadPool threadPool, NamedXContentRegistry xContentRegistry,
-                                  HttpServerTransport.Dispatcher dispatcher) {
+                                  Dispatcher dispatcher) {
         super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher);
         this.pageCacheRecycler = pageCacheRecycler;
 
@@ -136,7 +135,7 @@ protected void doStart() {
             nioGroup = new NioGroup(daemonThreadFactory(this.settings, HTTP_SERVER_ACCEPTOR_THREAD_NAME_PREFIX), acceptorCount,
                 daemonThreadFactory(this.settings, HTTP_SERVER_WORKER_THREAD_NAME_PREFIX), workerCount,
                 (s) -> new EventHandler(this::onNonChannelException, s));
-            channelFactory = new HttpChannelFactory();
+            channelFactory = channelFactory();
             bindServer();
             success = true;
         } catch (IOException e) {
@@ -162,6 +161,10 @@ protected HttpServerChannel bind(InetSocketAddress socketAddress) throws IOExcep
         return nioGroup.bindServerChannel(socketAddress, channelFactory);
     }
 
+    protected ChannelFactory channelFactory() {
+        return new HttpChannelFactory();
+    }
+
     static NioCorsConfig buildCorsConfig(Settings settings) {
         if (SETTING_CORS_ENABLED.get(settings) == false) {
             return NioCorsConfigBuilder.forOrigins().disable().build();
@@ -194,7 +197,7 @@ static NioCorsConfig buildCorsConfig(Settings settings) {
             .build();
     }
 
-    private void acceptChannel(NioSocketChannel socketChannel) {
+    protected void acceptChannel(NioSocketChannel socketChannel) {
         super.serverAcceptedChannel((HttpChannel) socketChannel);
     }
 
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
index 26ec50c0eb3c4..3115c08a9469d 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/Security.java
@@ -200,11 +200,13 @@
 import org.elasticsearch.xpack.security.rest.action.user.RestPutUserAction;
 import org.elasticsearch.xpack.security.rest.action.user.RestSetEnabledAction;
 import org.elasticsearch.xpack.security.support.SecurityIndexManager;
+import org.elasticsearch.xpack.security.transport.SecurityHttpSettings;
 import org.elasticsearch.xpack.security.transport.SecurityServerTransportInterceptor;
 import org.elasticsearch.xpack.security.transport.filter.IPFilter;
 import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4HttpServerTransport;
 import org.elasticsearch.xpack.security.transport.netty4.SecurityNetty4ServerTransport;
 import org.elasticsearch.xpack.core.template.TemplateUtils;
+import org.elasticsearch.xpack.security.transport.nio.SecurityNioHttpServerTransport;
 import org.elasticsearch.xpack.security.transport.nio.SecurityNioTransport;
 import org.joda.time.DateTime;
 import org.joda.time.DateTimeZone;
@@ -511,21 +513,22 @@ static Settings additionalSettings(final Settings settings, final boolean enable
 
             if (NetworkModule.HTTP_TYPE_SETTING.exists(settings)) {
                 final String httpType = NetworkModule.HTTP_TYPE_SETTING.get(settings);
-                if (httpType.equals(SecurityField.NAME4)) {
-                    SecurityNetty4HttpServerTransport.overrideSettings(builder, settings);
+                if (httpType.equals(SecurityField.NAME4) || httpType.equals(SecurityField.NIO)) {
+                    SecurityHttpSettings.overrideSettings(builder, settings);
                 } else {
                     final String message = String.format(
                             Locale.ROOT,
-                            "http type setting [%s] must be [%s] but is [%s]",
+                            "http type setting [%s] must be [%s] or [%s] but is [%s]",
                             NetworkModule.HTTP_TYPE_KEY,
                             SecurityField.NAME4,
+                            SecurityField.NIO,
                             httpType);
                     throw new IllegalArgumentException(message);
                 }
             } else {
                 // default to security4
                 builder.put(NetworkModule.HTTP_TYPE_KEY, SecurityField.NAME4);
-                SecurityNetty4HttpServerTransport.overrideSettings(builder, settings);
+                SecurityHttpSettings.overrideSettings(builder, settings);
             }
             builder.put(SecuritySettings.addUserSettings(settings));
             return builder.build();
@@ -869,8 +872,14 @@ public Map> getHttpTransports(Settings set
         if (enabled == false) { // don't register anything if we are not enabled
             return Collections.emptyMap();
         }
-        return Collections.singletonMap(SecurityField.NAME4, () -> new SecurityNetty4HttpServerTransport(settings,
-                networkService, bigArrays, ipFilter.get(), getSslService(), threadPool, xContentRegistry, dispatcher));
+
+        Map> httpTransports = new HashMap<>();
+        httpTransports.put(SecurityField.NAME4, () -> new SecurityNetty4HttpServerTransport(settings, networkService, bigArrays,
+            ipFilter.get(), getSslService(), threadPool, xContentRegistry, dispatcher));
+        httpTransports.put(SecurityField.NIO, () -> new SecurityNioHttpServerTransport(settings, networkService, bigArrays,
+            pageCacheRecycler, threadPool, xContentRegistry, dispatcher, ipFilter.get(), getSslService()));
+
+        return httpTransports;
     }
 
     @Override
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java
index 9109bb37e8c41..8d304302e03ee 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/rest/SecurityRestFilter.java
@@ -5,8 +5,6 @@
  */
 package org.elasticsearch.xpack.security.rest;
 
-import io.netty.channel.Channel;
-import io.netty.handler.ssl.SslHandler;
 import org.apache.logging.log4j.Logger;
 import org.apache.logging.log4j.message.ParameterizedMessage;
 import org.apache.logging.log4j.util.Supplier;
@@ -15,7 +13,6 @@
 import org.elasticsearch.common.logging.ESLoggerFactory;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
 import org.elasticsearch.http.HttpChannel;
-import org.elasticsearch.http.netty4.Netty4HttpChannel;
 import org.elasticsearch.license.XPackLicenseState;
 import org.elasticsearch.rest.BytesRestResponse;
 import org.elasticsearch.rest.RestChannel;
@@ -24,7 +21,7 @@
 import org.elasticsearch.rest.RestRequest.Method;
 import org.elasticsearch.xpack.core.security.rest.RestRequestFilter;
 import org.elasticsearch.xpack.security.authc.AuthenticationService;
-import org.elasticsearch.xpack.security.transport.ServerTransportFilter;
+import org.elasticsearch.xpack.security.transport.SSLEngineUtils;
 
 import java.io.IOException;
 
@@ -53,10 +50,7 @@ public void handleRequest(RestRequest request, RestChannel channel, NodeClient c
             // CORS - allow for preflight unauthenticated OPTIONS request
             if (extractClientCertificate) {
                 HttpChannel httpChannel = request.getHttpChannel();
-                Channel nettyChannel = ((Netty4HttpChannel) httpChannel).getNettyChannel();
-                SslHandler handler = nettyChannel.pipeline().get(SslHandler.class);
-                assert handler != null;
-                ServerTransportFilter.extractClientCertificates(logger, threadContext, handler.engine(), nettyChannel);
+                SSLEngineUtils.extractClientCertificates(logger, threadContext, httpChannel);
             }
             service.authenticate(maybeWrapRestRequest(request), ActionListener.wrap(
                 authentication -> {
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java
new file mode 100644
index 0000000000000..5bbcbaa050917
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SSLEngineUtils.java
@@ -0,0 +1,93 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport;
+
+import io.netty.channel.Channel;
+import io.netty.handler.ssl.SslHandler;
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
+import org.apache.logging.log4j.util.Supplier;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.http.HttpChannel;
+import org.elasticsearch.http.netty4.Netty4HttpChannel;
+import org.elasticsearch.http.nio.NioHttpChannel;
+import org.elasticsearch.nio.SocketChannelContext;
+import org.elasticsearch.transport.TcpChannel;
+import org.elasticsearch.transport.netty4.Netty4TcpChannel;
+import org.elasticsearch.transport.nio.NioTcpChannel;
+import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
+import org.elasticsearch.xpack.security.transport.nio.SSLChannelContext;
+
+import javax.net.ssl.SSLEngine;
+import javax.net.ssl.SSLPeerUnverifiedException;
+import java.security.cert.Certificate;
+import java.security.cert.X509Certificate;
+
+public class SSLEngineUtils {
+
+    private SSLEngineUtils() {}
+
+    public static void extractClientCertificates(Logger logger, ThreadContext threadContext, HttpChannel httpChannel) {
+        SSLEngine sslEngine = getSSLEngine(httpChannel);
+        extract(logger, threadContext, sslEngine, httpChannel);
+    }
+
+    public static void extractClientCertificates(Logger logger, ThreadContext threadContext, TcpChannel tcpChannel) {
+        SSLEngine sslEngine = getSSLEngine(tcpChannel);
+        extract(logger, threadContext, sslEngine, tcpChannel);
+    }
+
+    public static SSLEngine getSSLEngine(HttpChannel httpChannel) {
+        if (httpChannel instanceof Netty4HttpChannel) {
+            Channel nettyChannel = ((Netty4HttpChannel) httpChannel).getNettyChannel();
+            SslHandler handler = nettyChannel.pipeline().get(SslHandler.class);
+            assert handler != null : "Must have SslHandler";
+            return handler.engine();
+        } else if (httpChannel instanceof NioHttpChannel) {
+            SocketChannelContext context = ((NioHttpChannel) httpChannel).getContext();
+            assert context instanceof SSLChannelContext : "Must be SSLChannelContext.class, found:  " + context.getClass();
+            return ((SSLChannelContext) context).getSSLEngine();
+        } else {
+            throw new AssertionError("Unknown channel class type: " + httpChannel.getClass());
+        }
+    }
+
+    public static SSLEngine getSSLEngine(TcpChannel tcpChannel) {
+        if (tcpChannel instanceof Netty4TcpChannel) {
+            Channel nettyChannel = ((Netty4TcpChannel) tcpChannel).getNettyChannel();
+            SslHandler handler = nettyChannel.pipeline().get(SslHandler.class);
+            assert handler != null : "Must have SslHandler";
+            return handler.engine();
+        } else if (tcpChannel instanceof NioTcpChannel) {
+            SocketChannelContext context = ((NioTcpChannel) tcpChannel).getContext();
+            assert context instanceof SSLChannelContext : "Must be SSLChannelContext.class, found:  " + context.getClass();
+            return ((SSLChannelContext) context).getSSLEngine();
+        } else {
+            throw new AssertionError("Unknown channel class type: " + tcpChannel.getClass());
+        }
+    }
+
+    private static void extract(Logger logger, ThreadContext threadContext, SSLEngine sslEngine, Object channel) {
+        try {
+            Certificate[] certs = sslEngine.getSession().getPeerCertificates();
+            if (certs instanceof X509Certificate[]) {
+                threadContext.putTransient(PkiRealm.PKI_CERT_HEADER_NAME, certs);
+            }
+        } catch (SSLPeerUnverifiedException e) {
+            // this happens when client authentication is optional and the client does not provide credentials. If client
+            // authentication was required then this connection should be closed before ever getting into this class
+            assert sslEngine.getNeedClientAuth() == false;
+            assert sslEngine.getWantClientAuth();
+            if (logger.isTraceEnabled()) {
+                logger.trace(
+                    (Supplier) () -> new ParameterizedMessage(
+                        "SSL Peer did not present a certificate on channel [{}]", channel), e);
+            } else if (logger.isDebugEnabled()) {
+                logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
+            }
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java
new file mode 100644
index 0000000000000..c1999c5ddfba2
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpExceptionHandler.java
@@ -0,0 +1,64 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport;
+
+import org.apache.logging.log4j.Logger;
+import org.apache.logging.log4j.message.ParameterizedMessage;
+import org.elasticsearch.common.component.Lifecycle;
+import org.elasticsearch.common.network.CloseableChannel;
+import org.elasticsearch.http.HttpChannel;
+
+import java.util.function.BiConsumer;
+
+import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isCloseDuringHandshakeException;
+import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isNotSslRecordException;
+import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isReceivedCertificateUnknownException;
+
+public final class SecurityHttpExceptionHandler implements BiConsumer {
+
+    private final Lifecycle lifecycle;
+    private final Logger logger;
+    private final BiConsumer fallback;
+
+    public SecurityHttpExceptionHandler(Logger logger, Lifecycle lifecycle, BiConsumer fallback) {
+        this.lifecycle = lifecycle;
+        this.logger = logger;
+        this.fallback = fallback;
+    }
+
+    public void accept(HttpChannel channel, Exception e) {
+        if (!lifecycle.started()) {
+            return;
+        }
+
+        if (isNotSslRecordException(e)) {
+            if (logger.isTraceEnabled()) {
+                logger.trace(new ParameterizedMessage("received plaintext http traffic on a https channel, closing connection {}",
+                    channel), e);
+            } else {
+                logger.warn("received plaintext http traffic on a https channel, closing connection {}", channel);
+            }
+            CloseableChannel.closeChannel(channel);
+        } else if (isCloseDuringHandshakeException(e)) {
+            if (logger.isTraceEnabled()) {
+                logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e);
+            } else {
+                logger.warn("connection {} closed during ssl handshake", channel);
+            }
+            CloseableChannel.closeChannel(channel);
+        } else if (isReceivedCertificateUnknownException(e)) {
+            if (logger.isTraceEnabled()) {
+                logger.trace(new ParameterizedMessage("http client did not trust server's certificate, closing connection {}",
+                    channel), e);
+            } else {
+                logger.warn("http client did not trust this server's certificate, closing connection {}", channel);
+            }
+            CloseableChannel.closeChannel(channel);
+        } else {
+            fallback.accept(channel, e);
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettings.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettings.java
new file mode 100644
index 0000000000000..f8079535acf99
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettings.java
@@ -0,0 +1,22 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport;
+
+import org.elasticsearch.common.settings.Settings;
+
+import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
+import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED;
+
+public final class SecurityHttpSettings {
+
+    private SecurityHttpSettings() {}
+
+    public static void overrideSettings(Settings.Builder settingsBuilder, Settings settings) {
+        if (HTTP_SSL_ENABLED.get(settings) && SETTING_HTTP_COMPRESSION.exists(settings) == false) {
+            settingsBuilder.put(SETTING_HTTP_COMPRESSION.getKey(), false);
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java
index 9427812ba1349..2f0c40c1fdd16 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java
@@ -5,11 +5,7 @@
  */
 package org.elasticsearch.xpack.security.transport;
 
-import io.netty.channel.Channel;
-import io.netty.handler.ssl.SslHandler;
 import org.apache.logging.log4j.Logger;
-import org.apache.logging.log4j.message.ParameterizedMessage;
-import org.apache.logging.log4j.util.Supplier;
 import org.elasticsearch.Version;
 import org.elasticsearch.action.ActionListener;
 import org.elasticsearch.action.IndicesRequest;
@@ -20,11 +16,13 @@
 import org.elasticsearch.common.logging.Loggers;
 import org.elasticsearch.common.util.concurrent.ThreadContext;
 import org.elasticsearch.transport.TaskTransportChannel;
+import org.elasticsearch.transport.TcpChannel;
 import org.elasticsearch.transport.TcpTransportChannel;
 import org.elasticsearch.transport.TransportChannel;
 import org.elasticsearch.transport.TransportRequest;
 import org.elasticsearch.transport.TransportService;
 import org.elasticsearch.transport.netty4.Netty4TcpChannel;
+import org.elasticsearch.transport.nio.NioTcpChannel;
 import org.elasticsearch.xpack.core.security.SecurityContext;
 import org.elasticsearch.xpack.core.security.authc.Authentication;
 import org.elasticsearch.xpack.core.security.user.KibanaUser;
@@ -32,16 +30,10 @@
 import org.elasticsearch.xpack.core.security.user.User;
 import org.elasticsearch.xpack.security.action.SecurityActionMapper;
 import org.elasticsearch.xpack.security.authc.AuthenticationService;
-import org.elasticsearch.xpack.security.authc.pki.PkiRealm;
 import org.elasticsearch.xpack.security.authz.AuthorizationService;
 import org.elasticsearch.xpack.security.authz.AuthorizationUtils;
 
-import javax.net.ssl.SSLEngine;
-import javax.net.ssl.SSLPeerUnverifiedException;
-
 import java.io.IOException;
-import java.security.cert.Certificate;
-import java.security.cert.X509Certificate;
 
 import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError;
 
@@ -115,13 +107,12 @@ requests from all the nodes are attached with a user (either a serialize
                 unwrappedChannel = ((TaskTransportChannel) unwrappedChannel).getChannel();
             }
 
-            if (extractClientCert && (unwrappedChannel instanceof TcpTransportChannel) &&
-                ((TcpTransportChannel) unwrappedChannel).getChannel() instanceof Netty4TcpChannel) {
-                Channel channel = ((Netty4TcpChannel) ((TcpTransportChannel) unwrappedChannel).getChannel()).getLowLevelChannel();
-                SslHandler sslHandler = channel.pipeline().get(SslHandler.class);
-                if (channel.isOpen()) {
-                    assert sslHandler != null : "channel [" + channel + "] did not have a ssl handler. pipeline " + channel.pipeline();
-                    extractClientCertificates(logger, threadContext, sslHandler.engine(), channel);
+            if (extractClientCert && (unwrappedChannel instanceof TcpTransportChannel)) {
+                TcpChannel tcpChannel = ((TcpTransportChannel) unwrappedChannel).getChannel();
+                if (tcpChannel instanceof Netty4TcpChannel || tcpChannel instanceof NioTcpChannel) {
+                    if (tcpChannel.isOpen()) {
+                        SSLEngineUtils.extractClientCertificates(logger, threadContext, tcpChannel);
+                    }
                 }
             }
 
@@ -172,27 +163,6 @@ private void executeAsCurrentVersionKibanaUser(String securityAction, TransportR
         }
     }
 
-    static void extractClientCertificates(Logger logger, ThreadContext threadContext, SSLEngine sslEngine, Channel channel) {
-        try {
-            Certificate[] certs = sslEngine.getSession().getPeerCertificates();
-            if (certs instanceof X509Certificate[]) {
-                threadContext.putTransient(PkiRealm.PKI_CERT_HEADER_NAME, certs);
-            }
-        } catch (SSLPeerUnverifiedException e) {
-            // this happens when client authentication is optional and the client does not provide credentials. If client
-            // authentication was required then this connection should be closed before ever getting into this class
-            assert sslEngine.getNeedClientAuth() == false;
-            assert sslEngine.getWantClientAuth();
-            if (logger.isTraceEnabled()) {
-                logger.trace(
-                        (Supplier) () -> new ParameterizedMessage(
-                                "SSL Peer did not present a certificate on channel [{}]", channel), e);
-            } else if (logger.isDebugEnabled()) {
-                logger.debug("SSL Peer did not present a certificate on channel [{}]", channel);
-            }
-        }
-    }
-
     /**
      * A server transport filter rejects internal calls, which should be used on connections
      * where only clients connect to. This ensures that no client can send any internal actions
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java
index d7a609f6f14ba..a728467f8bde7 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransport.java
@@ -8,8 +8,6 @@
 import io.netty.channel.Channel;
 import io.netty.channel.ChannelHandler;
 import io.netty.handler.ssl.SslHandler;
-import org.apache.logging.log4j.message.ParameterizedMessage;
-import org.elasticsearch.common.network.CloseableChannel;
 import org.elasticsearch.common.network.NetworkService;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.common.util.BigArrays;
@@ -19,18 +17,16 @@
 import org.elasticsearch.threadpool.ThreadPool;
 import org.elasticsearch.xpack.core.ssl.SSLConfiguration;
 import org.elasticsearch.xpack.core.ssl.SSLService;
+import org.elasticsearch.xpack.security.transport.SecurityHttpExceptionHandler;
 import org.elasticsearch.xpack.security.transport.filter.IPFilter;
 
 import javax.net.ssl.SSLEngine;
 
-import static org.elasticsearch.http.HttpTransportSettings.SETTING_HTTP_COMPRESSION;
 import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED;
-import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isCloseDuringHandshakeException;
-import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isNotSslRecordException;
-import static org.elasticsearch.xpack.core.security.transport.SSLExceptionHelper.isReceivedCertificateUnknownException;
 
 public class SecurityNetty4HttpServerTransport extends Netty4HttpServerTransport {
 
+    private final SecurityHttpExceptionHandler securityExceptionHandler;
     private final IPFilter ipFilter;
     private final SSLService sslService;
     private final SSLConfiguration sslConfiguration;
@@ -39,6 +35,7 @@ public SecurityNetty4HttpServerTransport(Settings settings, NetworkService netwo
                                              SSLService sslService, ThreadPool threadPool, NamedXContentRegistry xContentRegistry,
                                              Dispatcher dispatcher) {
         super(settings, networkService, bigArrays, threadPool, xContentRegistry, dispatcher);
+        this.securityExceptionHandler = new SecurityHttpExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e));
         this.ipFilter = ipFilter;
         final boolean ssl = HTTP_SSL_ENABLED.get(settings);
         this.sslService = sslService;
@@ -51,41 +48,11 @@ public SecurityNetty4HttpServerTransport(Settings settings, NetworkService netwo
         } else {
             this.sslConfiguration = null;
         }
-
     }
 
     @Override
     protected void onException(HttpChannel channel, Exception e) {
-        if (!lifecycle.started()) {
-            return;
-        }
-
-        if (isNotSslRecordException(e)) {
-            if (logger.isTraceEnabled()) {
-                logger.trace(new ParameterizedMessage("received plaintext http traffic on a https channel, closing connection {}",
-                    channel), e);
-            } else {
-                logger.warn("received plaintext http traffic on a https channel, closing connection {}", channel);
-            }
-            CloseableChannel.closeChannel(channel);
-        } else if (isCloseDuringHandshakeException(e)) {
-            if (logger.isTraceEnabled()) {
-                logger.trace(new ParameterizedMessage("connection {} closed during ssl handshake", channel), e);
-            } else {
-                logger.warn("connection {} closed during ssl handshake", channel);
-            }
-            CloseableChannel.closeChannel(channel);
-        } else if (isReceivedCertificateUnknownException(e)) {
-            if (logger.isTraceEnabled()) {
-                logger.trace(new ParameterizedMessage("http client did not trust server's certificate, closing connection {}",
-                    channel), e);
-            } else {
-                logger.warn("http client did not trust this server's certificate, closing connection {}", channel);
-            }
-            CloseableChannel.closeChannel(channel);
-        } else {
-            super.onException(channel, e);
-        }
+        securityExceptionHandler.accept(channel, e);
     }
 
     @Override
@@ -115,10 +82,4 @@ protected void initChannel(Channel ch) throws Exception {
             ch.pipeline().addFirst("ip_filter", new IpFilterRemoteAddressFilter(ipFilter, IPFilter.HTTP_PROFILE_NAME));
         }
     }
-
-    public static void overrideSettings(Settings.Builder settingsBuilder, Settings settings) {
-        if (HTTP_SSL_ENABLED.get(settings) && SETTING_HTTP_COMPRESSION.exists(settings) == false) {
-            settingsBuilder.put(SETTING_HTTP_COMPRESSION.getKey(), false);
-        }
-    }
 }
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilter.java
new file mode 100644
index 0000000000000..afb13ceff2edd
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilter.java
@@ -0,0 +1,32 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport.nio;
+
+import org.elasticsearch.common.Nullable;
+import org.elasticsearch.nio.NioSocketChannel;
+import org.elasticsearch.xpack.security.transport.filter.IPFilter;
+
+import java.util.function.Predicate;
+
+public final class NioIPFilter implements Predicate {
+
+    private final IPFilter filter;
+    private final String profile;
+
+    NioIPFilter(@Nullable IPFilter filter, String profile) {
+        this.filter = filter;
+        this.profile = profile;
+    }
+
+    @Override
+    public boolean test(NioSocketChannel nioChannel) {
+        if (filter != null) {
+            return filter.accept(profile, nioChannel.getRemoteAddress());
+        } else {
+            return true;
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java
index da348ea1f78e1..c83bd16ca95e1 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLChannelContext.java
@@ -14,6 +14,7 @@
 import org.elasticsearch.nio.NioSelector;
 import org.elasticsearch.nio.WriteOperation;
 
+import javax.net.ssl.SSLEngine;
 import java.io.IOException;
 import java.util.function.BiConsumer;
 import java.util.function.Consumer;
@@ -164,6 +165,10 @@ public void closeFromSelector() throws IOException {
         }
     }
 
+    public SSLEngine getSSLEngine() {
+        return sslDriver.getSSLEngine();
+    }
+
     private static class CloseNotifyOperation implements WriteOperation {
 
         private static final BiConsumer LISTENER = (v, t) -> {};
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java
index 4080574713cce..382230684c77f 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SSLDriver.java
@@ -96,6 +96,10 @@ public void renegotiate() throws SSLException {
         }
     }
 
+    public SSLEngine getSSLEngine() {
+        return engine;
+    }
+
     public boolean hasFlushPending() {
         return networkWriteBuffer.hasRemaining();
     }
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
new file mode 100644
index 0000000000000..006c78b4ae0de
--- /dev/null
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
@@ -0,0 +1,132 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport.nio;
+
+import org.elasticsearch.common.network.NetworkService;
+import org.elasticsearch.common.recycler.Recycler;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.common.util.PageCacheRecycler;
+import org.elasticsearch.common.xcontent.NamedXContentRegistry;
+import org.elasticsearch.http.nio.HttpReadWriteHandler;
+import org.elasticsearch.http.nio.NioHttpChannel;
+import org.elasticsearch.http.nio.NioHttpServerChannel;
+import org.elasticsearch.http.nio.NioHttpServerTransport;
+import org.elasticsearch.nio.BytesChannelContext;
+import org.elasticsearch.nio.ChannelFactory;
+import org.elasticsearch.nio.InboundChannelBuffer;
+import org.elasticsearch.nio.NioSelector;
+import org.elasticsearch.nio.NioSocketChannel;
+import org.elasticsearch.nio.ServerChannelContext;
+import org.elasticsearch.nio.SocketChannelContext;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.xpack.core.ssl.SSLConfiguration;
+import org.elasticsearch.xpack.core.ssl.SSLService;
+import org.elasticsearch.xpack.security.transport.SecurityHttpExceptionHandler;
+import org.elasticsearch.xpack.security.transport.filter.IPFilter;
+
+import javax.net.ssl.SSLEngine;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.nio.ByteBuffer;
+import java.nio.channels.ServerSocketChannel;
+import java.nio.channels.SocketChannel;
+import java.util.function.Consumer;
+import java.util.function.Supplier;
+
+import static org.elasticsearch.xpack.core.XPackSettings.HTTP_SSL_ENABLED;
+
+public class SecurityNioHttpServerTransport extends NioHttpServerTransport {
+
+    private final SecurityHttpExceptionHandler securityExceptionHandler;
+    private final IPFilter ipFilter;
+    private final NioIPFilter nioIpFilter;
+    private final SSLService sslService;
+    private final SSLConfiguration sslConfiguration;
+    private final boolean sslEnabled;
+
+    public SecurityNioHttpServerTransport(Settings settings, NetworkService networkService, BigArrays bigArrays,
+                                          PageCacheRecycler pageCacheRecycler, ThreadPool threadPool,
+                                          NamedXContentRegistry xContentRegistry, Dispatcher dispatcher, IPFilter ipFilter,
+                                          SSLService sslService) {
+        super(settings, networkService, bigArrays, pageCacheRecycler, threadPool, xContentRegistry, dispatcher);
+        this.securityExceptionHandler = new SecurityHttpExceptionHandler(logger, lifecycle, (c, e) -> super.onException(c, e));
+        this.ipFilter = ipFilter;
+        this.nioIpFilter = new NioIPFilter(ipFilter, IPFilter.HTTP_PROFILE_NAME);
+        this.sslEnabled = HTTP_SSL_ENABLED.get(settings);
+        this.sslService = sslService;
+        if (sslEnabled) {
+            this.sslConfiguration = sslService.sslConfiguration(SSLService.getHttpTransportSSLSettings(settings), Settings.EMPTY);
+            if (sslService.isConfigurationValidForServerUsage(sslConfiguration) == false) {
+                throw new IllegalArgumentException("a key must be provided to run as a server. the key should be configured using the " +
+                    "[xpack.security.http.ssl.key] or [xpack.security.http.ssl.keystore.path] setting");
+            }
+        } else {
+            this.sslConfiguration = null;
+        }
+    }
+
+    @Override
+    protected void doStart() {
+        super.doStart();
+        ipFilter.setBoundHttpTransportAddress(this.boundAddress());
+    }
+
+    protected SecurityHttpChannelFactory channelFactory() {
+        return new SecurityHttpChannelFactory();
+    }
+
+    class SecurityHttpChannelFactory extends ChannelFactory {
+
+        private SecurityHttpChannelFactory() {
+            super(new RawChannelFactory(tcpNoDelay, tcpKeepAlive, reuseAddress, tcpSendBufferSize, tcpReceiveBufferSize));
+        }
+
+        @Override
+        public NioHttpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException {
+            NioHttpChannel httpChannel = new NioHttpChannel(channel);
+            Supplier pageSupplier = () -> {
+                Recycler.V bytes = pageCacheRecycler.bytePage(false);
+                return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close);
+            };
+            HttpReadWriteHandler httpHandler = new HttpReadWriteHandler(httpChannel,SecurityNioHttpServerTransport.this,
+                handlingSettings, corsConfig);
+            InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier);
+            Consumer exceptionHandler = (e) -> securityExceptionHandler.accept(httpChannel, e);
+
+            SocketChannelContext context;
+            if (sslEnabled) {
+                SSLEngine sslEngine;
+                boolean hostnameVerificationEnabled = sslConfiguration.verificationMode().isHostnameVerificationEnabled();
+                if (hostnameVerificationEnabled) {
+                    InetSocketAddress address = (InetSocketAddress) channel.getRemoteAddress();
+                    // we create the socket based on the name given. don't reverse DNS
+                    sslEngine = sslService.createSSLEngine(sslConfiguration, address.getHostString(), address.getPort());
+                } else {
+                    sslEngine = sslService.createSSLEngine(sslConfiguration, null, -1);
+                }
+                SSLDriver sslDriver = new SSLDriver(sslEngine, false);
+                context = new SSLChannelContext(httpChannel, selector, exceptionHandler, sslDriver, httpHandler, buffer, nioIpFilter);
+            } else {
+                context = new BytesChannelContext(httpChannel, selector, exceptionHandler, httpHandler, buffer, nioIpFilter);
+            }
+            httpChannel.setContext(context);
+
+            return httpChannel;
+        }
+
+        @Override
+        public NioHttpServerChannel createServerChannel(NioSelector selector, ServerSocketChannel channel) {
+            NioHttpServerChannel httpServerChannel = new NioHttpServerChannel(channel);
+            Consumer exceptionHandler = (e) -> onServerException(httpServerChannel, e);
+            Consumer acceptor = SecurityNioHttpServerTransport.this::acceptChannel;
+            ServerChannelContext context = new ServerChannelContext(httpServerChannel, this, selector, acceptor, exceptionHandler);
+            httpServerChannel.setContext(context);
+
+            return httpServerChannel;
+        }
+    }
+}
diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java
index fb94b669e833b..71e14696a11ff 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioTransport.java
@@ -44,7 +44,6 @@
 import java.util.Collections;
 import java.util.Map;
 import java.util.function.Consumer;
-import java.util.function.Predicate;
 import java.util.function.Supplier;
 
 import static org.elasticsearch.xpack.core.security.SecurityField.setting;
@@ -129,19 +128,11 @@ protected TcpChannelFactory channelFactory(ProfileSettings profileSettings, bool
         return new SecurityTcpChannelFactory(profileSettings, isClient);
     }
 
-    private boolean validateChannel(NioSocketChannel channel) {
-        if (authenticator != null) {
-            NioTcpChannel nioTcpChannel = (NioTcpChannel) channel;
-            return authenticator.accept(nioTcpChannel.getProfile(), nioTcpChannel.getRemoteAddress());
-        } else {
-            return true;
-        }
-    }
-
     private class SecurityTcpChannelFactory extends TcpChannelFactory {
 
         private final String profileName;
         private final boolean isClient;
+        private final NioIPFilter ipFilter;
 
         private SecurityTcpChannelFactory(ProfileSettings profileSettings, boolean isClient) {
             super(new RawChannelFactory(profileSettings.tcpNoDelay,
@@ -151,12 +142,12 @@ private SecurityTcpChannelFactory(ProfileSettings profileSettings, boolean isCli
                 Math.toIntExact(profileSettings.receiveBufferSize.getBytes())));
             this.profileName = profileSettings.profileName;
             this.isClient = isClient;
+            this.ipFilter = new NioIPFilter(authenticator, profileName);
         }
 
         @Override
         public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel) throws IOException {
             NioTcpChannel nioChannel = new NioTcpChannel(profileName, channel);
-            SocketChannelContext context;
             Supplier pageSupplier = () -> {
                 Recycler.V bytes = pageCacheRecycler.bytePage(false);
                 return new InboundChannelBuffer.Page(ByteBuffer.wrap(bytes.v()), bytes::close);
@@ -164,8 +155,8 @@ public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel)
             TcpReadWriteHandler readWriteHandler = new TcpReadWriteHandler(nioChannel, SecurityNioTransport.this);
             InboundChannelBuffer buffer = new InboundChannelBuffer(pageSupplier);
             Consumer exceptionHandler = (e) -> onException(nioChannel, e);
-            Predicate filter = SecurityNioTransport.this::validateChannel;
 
+            SocketChannelContext context;
             if (sslEnabled) {
                 SSLEngine sslEngine;
                 SSLConfiguration defaultConfig = profileConfiguration.get(TcpTransport.DEFAULT_PROFILE);
@@ -179,9 +170,9 @@ public NioTcpChannel createChannel(NioSelector selector, SocketChannel channel)
                     sslEngine = sslService.createSSLEngine(sslConfig, null, -1);
                 }
                 SSLDriver sslDriver = new SSLDriver(sslEngine, isClient);
-                context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer, filter);
+                context = new SSLChannelContext(nioChannel, selector, exceptionHandler, sslDriver, readWriteHandler, buffer, ipFilter);
             } else {
-                context = new BytesChannelContext(nioChannel, selector, exceptionHandler, readWriteHandler, buffer, filter);
+                context = new BytesChannelContext(nioChannel, selector, exceptionHandler, readWriteHandler, buffer, ipFilter);
             }
             nioChannel.setContext(context);
 
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java
index e6db3407496eb..9bb0e44eb664c 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecurityIntegTestCase.java
@@ -244,6 +244,7 @@ protected Settings nodeSettings(int nodeOrdinal) {
         builder.put(customSettings, false); // handle secure settings separately
         builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
         builder.put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO);
+        builder.put(NetworkModule.HTTP_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO);
         Settings.Builder customBuilder = Settings.builder().put(customSettings);
         if (customBuilder.getSecureSettings() != null) {
             SecuritySettingsSource.addSecureSettings(builder, secureSettings ->
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java
index 2e0662264a248..df1456c3790b2 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java
@@ -126,6 +126,7 @@ public Settings nodeSettings(int nodeOrdinal) {
         Settings.Builder builder = Settings.builder().put(super.nodeSettings(nodeOrdinal))
                 .put(XPackSettings.SECURITY_ENABLED.getKey(), true)
                 .put(NetworkModule.TRANSPORT_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO)
+                .put(NetworkModule.HTTP_TYPE_KEY, randomBoolean() ? SecurityField.NAME4 : SecurityField.NIO)
                 //TODO: for now isolate security tests from watcher & monitoring (randomize this later)
                 .put(XPackSettings.WATCHER_ENABLED.getKey(), false)
                 .put(XPackSettings.MONITORING_ENABLED.getKey(), false)
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettingsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettingsTests.java
new file mode 100644
index 0000000000000..56c79a4c12791
--- /dev/null
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/SecurityHttpSettingsTests.java
@@ -0,0 +1,44 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport;
+
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.http.HttpTransportSettings;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.xpack.core.XPackSettings;
+
+import static org.hamcrest.Matchers.is;
+
+public class SecurityHttpSettingsTests extends ESTestCase {
+
+    public void testDisablesCompressionByDefaultForSsl() {
+        Settings settings = Settings.builder()
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
+
+        Settings.Builder pluginSettingsBuilder = Settings.builder();
+        SecurityHttpSettings.overrideSettings(pluginSettingsBuilder, settings);
+        assertThat(HttpTransportSettings.SETTING_HTTP_COMPRESSION.get(pluginSettingsBuilder.build()), is(false));
+    }
+
+    public void testLeavesCompressionOnIfNotSsl() {
+        Settings settings = Settings.builder()
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), false).build();
+        Settings.Builder pluginSettingsBuilder = Settings.builder();
+        SecurityHttpSettings.overrideSettings(pluginSettingsBuilder, settings);
+        assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
+    }
+
+    public void testDoesNotChangeExplicitlySetCompression() {
+        Settings settings = Settings.builder()
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put(HttpTransportSettings.SETTING_HTTP_COMPRESSION.getKey(), true)
+            .build();
+
+        Settings.Builder pluginSettingsBuilder = Settings.builder();
+        SecurityHttpSettings.overrideSettings(pluginSettingsBuilder, settings);
+        assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
+    }
+}
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java
index ec925f43abe79..ad64dea79a587 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java
@@ -14,7 +14,6 @@
 import org.elasticsearch.common.util.BigArrays;
 import org.elasticsearch.env.Environment;
 import org.elasticsearch.env.TestEnvironment;
-import org.elasticsearch.http.HttpTransportSettings;
 import org.elasticsearch.http.NullDispatcher;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.threadpool.ThreadPool;
@@ -144,34 +143,6 @@ public void testCustomSSLConfiguration() throws Exception {
         assertThat(customEngine.getEnabledProtocols(), not(equalTo(defaultEngine.getEnabledProtocols())));
     }
 
-    public void testDisablesCompressionByDefaultForSsl() throws Exception {
-        Settings settings = Settings.builder()
-                .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
-
-        Settings.Builder pluginSettingsBuilder = Settings.builder();
-        SecurityNetty4HttpServerTransport.overrideSettings(pluginSettingsBuilder, settings);
-        assertThat(HttpTransportSettings.SETTING_HTTP_COMPRESSION.get(pluginSettingsBuilder.build()), is(false));
-    }
-
-    public void testLeavesCompressionOnIfNotSsl() throws Exception {
-        Settings settings = Settings.builder()
-                .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), false).build();
-        Settings.Builder pluginSettingsBuilder = Settings.builder();
-        SecurityNetty4HttpServerTransport.overrideSettings(pluginSettingsBuilder, settings);
-        assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
-    }
-
-    public void testDoesNotChangeExplicitlySetCompression() throws Exception {
-        Settings settings = Settings.builder()
-                .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
-                .put(HttpTransportSettings.SETTING_HTTP_COMPRESSION.getKey(), true)
-                .build();
-
-        Settings.Builder pluginSettingsBuilder = Settings.builder();
-        SecurityNetty4HttpServerTransport.overrideSettings(pluginSettingsBuilder, settings);
-        assertThat(pluginSettingsBuilder.build().isEmpty(), is(true));
-    }
-
     public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() throws Exception {
         MockSecureSettings secureSettings = new MockSecureSettings();
         secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode");
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java
new file mode 100644
index 0000000000000..1832669fce144
--- /dev/null
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/NioIPFilterTests.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport.nio;
+
+import org.elasticsearch.common.component.Lifecycle;
+import org.elasticsearch.common.network.InetAddresses;
+import org.elasticsearch.common.settings.ClusterSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.transport.BoundTransportAddress;
+import org.elasticsearch.common.transport.TransportAddress;
+import org.elasticsearch.http.HttpServerTransport;
+import org.elasticsearch.license.XPackLicenseState;
+import org.elasticsearch.nio.NioSocketChannel;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.transport.Transport;
+import org.elasticsearch.xpack.security.audit.AuditTrailService;
+import org.elasticsearch.xpack.security.transport.filter.IPFilter;
+import org.junit.Before;
+
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashSet;
+
+import static org.hamcrest.Matchers.is;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class NioIPFilterTests extends ESTestCase {
+
+    private NioIPFilter nioIPFilter;
+
+    @Before
+    public void init() throws Exception {
+        Settings settings = Settings.builder()
+            .put("xpack.security.transport.filter.allow", "127.0.0.1")
+            .put("xpack.security.transport.filter.deny", "10.0.0.0/8")
+            .build();
+
+        boolean isHttpEnabled = randomBoolean();
+
+        Transport transport = mock(Transport.class);
+        TransportAddress address = new TransportAddress(InetAddress.getLoopbackAddress(), 9300);
+        when(transport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { address }, address));
+        when(transport.lifecycleState()).thenReturn(Lifecycle.State.STARTED);
+        ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, new HashSet<>(Arrays.asList(
+            IPFilter.HTTP_FILTER_ALLOW_SETTING,
+            IPFilter.HTTP_FILTER_DENY_SETTING,
+            IPFilter.IP_FILTER_ENABLED_HTTP_SETTING,
+            IPFilter.IP_FILTER_ENABLED_SETTING,
+            IPFilter.TRANSPORT_FILTER_ALLOW_SETTING,
+            IPFilter.TRANSPORT_FILTER_DENY_SETTING,
+            IPFilter.PROFILE_FILTER_ALLOW_SETTING,
+            IPFilter.PROFILE_FILTER_DENY_SETTING)));
+        XPackLicenseState licenseState = mock(XPackLicenseState.class);
+        when(licenseState.isIpFilteringAllowed()).thenReturn(true);
+        when(licenseState.isSecurityEnabled()).thenReturn(true);
+        AuditTrailService auditTrailService = new AuditTrailService(settings, Collections.emptyList(), licenseState);
+        IPFilter ipFilter = new IPFilter(settings, auditTrailService, clusterSettings, licenseState);
+        ipFilter.setBoundTransportAddress(transport.boundAddress(), transport.profileBoundAddresses());
+        if (isHttpEnabled) {
+            HttpServerTransport httpTransport = mock(HttpServerTransport.class);
+            TransportAddress httpAddress = new TransportAddress(InetAddress.getLoopbackAddress(), 9200);
+            when(httpTransport.boundAddress()).thenReturn(new BoundTransportAddress(new TransportAddress[] { httpAddress }, httpAddress));
+            when(httpTransport.lifecycleState()).thenReturn(Lifecycle.State.STARTED);
+            ipFilter.setBoundHttpTransportAddress(httpTransport.boundAddress());
+        }
+
+        if (isHttpEnabled) {
+            nioIPFilter = new NioIPFilter(ipFilter, IPFilter.HTTP_PROFILE_NAME);
+        } else {
+            nioIPFilter = new NioIPFilter(ipFilter, "default");
+        }
+    }
+
+    public void testThatFilteringWorksByIp() throws Exception {
+        InetSocketAddress localhostAddr = new InetSocketAddress(InetAddresses.forString("127.0.0.1"), 12345);
+        NioSocketChannel channel1 = mock(NioSocketChannel.class);
+        when(channel1.getRemoteAddress()).thenReturn(localhostAddr);
+        assertThat(nioIPFilter.test(channel1), is(true));
+
+        InetSocketAddress remoteAddr = new InetSocketAddress(InetAddresses.forString("10.0.0.8"), 12345);
+        NioSocketChannel channel2 = mock(NioSocketChannel.class);
+        when(channel2.getRemoteAddress()).thenReturn(remoteAddr);
+        assertThat(nioIPFilter.test(channel2), is(false));
+    }
+}
diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java
new file mode 100644
index 0000000000000..b5d84d459160f
--- /dev/null
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransportTests.java
@@ -0,0 +1,207 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.xpack.security.transport.nio;
+
+import org.elasticsearch.common.network.NetworkService;
+import org.elasticsearch.common.settings.MockSecureSettings;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.BigArrays;
+import org.elasticsearch.common.util.PageCacheRecycler;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.env.TestEnvironment;
+import org.elasticsearch.http.NullDispatcher;
+import org.elasticsearch.http.nio.NioHttpChannel;
+import org.elasticsearch.nio.NioSelector;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.xpack.core.XPackSettings;
+import org.elasticsearch.xpack.core.ssl.SSLClientAuth;
+import org.elasticsearch.xpack.core.ssl.SSLService;
+import org.elasticsearch.xpack.security.transport.SSLEngineUtils;
+import org.elasticsearch.xpack.security.transport.filter.IPFilter;
+import org.junit.Before;
+
+import javax.net.ssl.SSLEngine;
+import java.io.IOException;
+import java.net.InetAddress;
+import java.net.InetSocketAddress;
+import java.nio.channels.SocketChannel;
+import java.nio.file.Path;
+import java.util.Collections;
+import java.util.Locale;
+
+import static org.hamcrest.Matchers.arrayContaining;
+import static org.hamcrest.Matchers.containsString;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.is;
+import static org.hamcrest.Matchers.not;
+import static org.mockito.Mockito.mock;
+import static org.mockito.Mockito.when;
+
+public class SecurityNioHttpServerTransportTests extends ESTestCase {
+
+    private SSLService sslService;
+    private Environment env;
+    private InetSocketAddress address = new InetSocketAddress(InetAddress.getLoopbackAddress(), 0);
+
+    @Before
+    public void createSSLService() {
+        Path testNodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks");
+        MockSecureSettings secureSettings = new MockSecureSettings();
+        secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode");
+        Settings settings = Settings.builder()
+            .put("xpack.ssl.keystore.path", testNodeStore)
+            .put("path.home", createTempDir())
+            .setSecureSettings(secureSettings)
+            .build();
+        env = TestEnvironment.newEnvironment(settings);
+        sslService = new SSLService(settings, env);
+    }
+
+    public void testDefaultClientAuth() throws IOException {
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
+
+        assertThat(engine.getNeedClientAuth(), is(false));
+        assertThat(engine.getWantClientAuth(), is(false));
+    }
+
+    public void testOptionalClientAuth() throws IOException {
+        String value = randomFrom(SSLClientAuth.OPTIONAL.name(), SSLClientAuth.OPTIONAL.name().toLowerCase(Locale.ROOT));
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("xpack.security.http.ssl.client_authentication", value).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
+        assertThat(engine.getNeedClientAuth(), is(false));
+        assertThat(engine.getWantClientAuth(), is(true));
+    }
+
+    public void testRequiredClientAuth() throws IOException {
+        String value = randomFrom(SSLClientAuth.REQUIRED.name(), SSLClientAuth.REQUIRED.name().toLowerCase(Locale.ROOT));
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("xpack.security.http.ssl.client_authentication", value).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
+        assertThat(engine.getNeedClientAuth(), is(true));
+        assertThat(engine.getWantClientAuth(), is(false));
+    }
+
+    public void testNoClientAuth() throws IOException {
+        String value = randomFrom(SSLClientAuth.NONE.name(), SSLClientAuth.NONE.name().toLowerCase(Locale.ROOT));
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("xpack.security.http.ssl.client_authentication", value).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine engine = SSLEngineUtils.getSSLEngine(channel);
+        assertThat(engine.getNeedClientAuth(), is(false));
+        assertThat(engine.getWantClientAuth(), is(false));
+    }
+
+    public void testCustomSSLConfiguration() throws IOException {
+        Settings settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true).build();
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+        SecurityNioHttpServerTransport.SecurityHttpChannelFactory factory = transport.channelFactory();
+        SocketChannel socketChannel = mock(SocketChannel.class);
+        when(socketChannel.getRemoteAddress()).thenReturn(address);
+        NioHttpChannel channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine defaultEngine = SSLEngineUtils.getSSLEngine(channel);
+
+        settings = Settings.builder()
+            .put(env.settings())
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("xpack.security.http.ssl.supported_protocols", "TLSv1.2")
+            .build();
+        sslService = new SSLService(settings, TestEnvironment.newEnvironment(settings));
+        transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+        factory = transport.channelFactory();
+        channel = factory.createChannel(mock(NioSelector.class), socketChannel);
+        SSLEngine customEngine = SSLEngineUtils.getSSLEngine(channel);
+        assertThat(customEngine.getEnabledProtocols(), arrayContaining("TLSv1.2"));
+        assertThat(customEngine.getEnabledProtocols(), not(equalTo(defaultEngine.getEnabledProtocols())));
+    }
+
+    public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() {
+        MockSecureSettings secureSettings = new MockSecureSettings();
+        secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode");
+        Settings settings = Settings.builder()
+            .put("xpack.ssl.truststore.path",
+                getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"))
+            .setSecureSettings(secureSettings)
+            .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true)
+            .put("path.home", createTempDir())
+            .build();
+        env = TestEnvironment.newEnvironment(settings);
+        sslService = new SSLService(settings, env);
+
+        IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
+            () -> new SecurityNioHttpServerTransport(settings,
+                new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+                xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService));
+        assertThat(e.getMessage(), containsString("key must be provided"));
+    }
+
+    public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() {
+        MockSecureSettings secureSettings = new MockSecureSettings();
+        secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode");
+        Settings settings = Settings.builder()
+            .put("xpack.ssl.truststore.path",
+                getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"))
+            .setSecureSettings(secureSettings)
+            .put("path.home", createTempDir())
+            .build();
+        env = TestEnvironment.newEnvironment(settings);
+        sslService = new SSLService(settings, env);
+        SecurityNioHttpServerTransport transport = new SecurityNioHttpServerTransport(settings,
+            new NetworkService(Collections.emptyList()), mock(BigArrays.class), mock(PageCacheRecycler.class), mock(ThreadPool.class),
+            xContentRegistry(), new NullDispatcher(), mock(IPFilter.class), sslService);
+    }
+}

From bd20e99807ff3ca06048e1bd9898848fb06b8834 Mon Sep 17 00:00:00 2001
From: Tim Brooks 
Date: Fri, 13 Jul 2018 23:37:15 -0600
Subject: [PATCH 035/107] Fix compile issues introduced by merge (#32058)

The build was broken due to some issues with the merging of #32018. A
method that was public went private before the PR was merged. That did
not cause a merge conflict (so the PR was merged successfully). But it
did cause the build to fail.
---
 .../security/transport/nio/SecurityNioHttpServerTransport.java  | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
index 006c78b4ae0de..50a78d93c71b8 100644
--- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
+++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/nio/SecurityNioHttpServerTransport.java
@@ -59,7 +59,7 @@ public SecurityNioHttpServerTransport(Settings settings, NetworkService networkS
         this.sslEnabled = HTTP_SSL_ENABLED.get(settings);
         this.sslService = sslService;
         if (sslEnabled) {
-            this.sslConfiguration = sslService.sslConfiguration(SSLService.getHttpTransportSSLSettings(settings), Settings.EMPTY);
+            this.sslConfiguration = sslService.getHttpTransportSSLConfiguration();
             if (sslService.isConfigurationValidForServerUsage(sslConfiguration) == false) {
                 throw new IllegalArgumentException("a key must be provided to run as a server. the key should be configured using the " +
                     "[xpack.security.http.ssl.key] or [xpack.security.http.ssl.keystore.path] setting");

From 71cd43b703cb3c0f852d7c32a1acd176e77520c3 Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Sat, 14 Jul 2018 09:03:35 +0200
Subject: [PATCH 036/107] SCRIPTING: Remove unused
 MultiSearchTemplateRequestBuilder (#32049)

* Ever since 46e8d97813addd8c57fa54d2c700d26a171f2dbb this class is unused
---
 .../MultiSearchTemplateRequestBuilder.java    | 65 -------------------
 1 file changed, 65 deletions(-)
 delete mode 100644 modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java

diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java
deleted file mode 100644
index c4dac0dd88eb8..0000000000000
--- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequestBuilder.java
+++ /dev/null
@@ -1,65 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.script.mustache;
-
-import org.elasticsearch.action.ActionRequestBuilder;
-import org.elasticsearch.action.support.IndicesOptions;
-import org.elasticsearch.client.ElasticsearchClient;
-
-public class MultiSearchTemplateRequestBuilder
-        extends ActionRequestBuilder {
-
-    protected MultiSearchTemplateRequestBuilder(ElasticsearchClient client, MultiSearchTemplateAction action) {
-        super(client, action, new MultiSearchTemplateRequest());
-    }
-
-    public MultiSearchTemplateRequestBuilder add(SearchTemplateRequest request) {
-        if (request.getRequest().indicesOptions() == IndicesOptions.strictExpandOpenAndForbidClosed()
-                && request().indicesOptions() != IndicesOptions.strictExpandOpenAndForbidClosed()) {
-            request.getRequest().indicesOptions(request().indicesOptions());
-        }
-
-        super.request.add(request);
-        return this;
-    }
-
-    public MultiSearchTemplateRequestBuilder add(SearchTemplateRequestBuilder request) {
-        if (request.request().getRequest().indicesOptions() == IndicesOptions.strictExpandOpenAndForbidClosed()
-                && request().indicesOptions() != IndicesOptions.strictExpandOpenAndForbidClosed()) {
-            request.request().getRequest().indicesOptions(request().indicesOptions());
-        }
-
-        super.request.add(request);
-        return this;
-    }
-
-    public MultiSearchTemplateRequestBuilder setIndicesOptions(IndicesOptions indicesOptions) {
-        request().indicesOptions(indicesOptions);
-        return this;
-    }
-
-    /**
-     * Sets how many search requests specified in this multi search requests are allowed to be ran concurrently.
-     */
-    public MultiSearchTemplateRequestBuilder setMaxConcurrentSearchRequests(int maxConcurrentSearchRequests) {
-        request().maxConcurrentSearchRequests(maxConcurrentSearchRequests);
-        return this;
-    }
-}

From e045ad68e6ad3c13cf389c9ad643fde1c984b1c2 Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Sat, 14 Jul 2018 13:37:59 +0200
Subject: [PATCH 037/107] Cleanup Duplication in `PainlessScriptEngine`
 (#31991)

* Cleanup Duplication in `PainlessScriptEngine`
* Extract duplicate building of compiler settings to method
* Remove dead method params + dead constant in `ScriptProcessor`
---
 .../ingest/common/ScriptProcessor.java        |  3 -
 .../painless/PainlessScriptEngine.java        | 85 ++++++-------------
 2 files changed, 26 insertions(+), 62 deletions(-)

diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
index 74c68fd5c2638..169b2ab646a7d 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/ScriptProcessor.java
@@ -19,8 +19,6 @@
 
 package org.elasticsearch.ingest.common;
 
-import com.fasterxml.jackson.core.JsonFactory;
-
 import org.elasticsearch.common.bytes.BytesReference;
 import org.elasticsearch.common.xcontent.LoggingDeprecationHandler;
 import org.elasticsearch.common.xcontent.NamedXContentRegistry;
@@ -48,7 +46,6 @@
 public final class ScriptProcessor extends AbstractProcessor {
 
     public static final String TYPE = "script";
-    private static final JsonFactory JSON_FACTORY = new JsonFactory();
 
     private final Script script;
     private final ScriptService scriptService;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
index ae1944c9bd3a9..4560fd85a6589 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
@@ -366,44 +366,7 @@ private void writeNeedsMethods(Class clazz, ClassWriter writer, MainMethodRes
     }
 
     Object compile(Compiler compiler, String scriptName, String source, Map params, Object... args) {
-        final CompilerSettings compilerSettings;
-
-        if (params.isEmpty()) {
-            // Use the default settings.
-            compilerSettings = defaultCompilerSettings;
-        } else {
-            // Use custom settings specified by params.
-            compilerSettings = new CompilerSettings();
-
-            // Except regexes enabled - this is a node level setting and can't be changed in the request.
-            compilerSettings.setRegexesEnabled(defaultCompilerSettings.areRegexesEnabled());
-
-            Map copy = new HashMap<>(params);
-
-            String value = copy.remove(CompilerSettings.MAX_LOOP_COUNTER);
-            if (value != null) {
-                compilerSettings.setMaxLoopCounter(Integer.parseInt(value));
-            }
-
-            value = copy.remove(CompilerSettings.PICKY);
-            if (value != null) {
-                compilerSettings.setPicky(Boolean.parseBoolean(value));
-            }
-
-            value = copy.remove(CompilerSettings.INITIAL_CALL_SITE_DEPTH);
-            if (value != null) {
-                compilerSettings.setInitialCallSiteDepth(Integer.parseInt(value));
-            }
-
-            value = copy.remove(CompilerSettings.REGEX_ENABLED.getKey());
-            if (value != null) {
-                throw new IllegalArgumentException("[painless.regex.enabled] can only be set on node startup.");
-            }
-
-            if (!copy.isEmpty()) {
-                throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy);
-            }
-        }
+        final CompilerSettings compilerSettings = buildCompilerSettings(params);
 
         // Check we ourselves are not being called by unprivileged code.
         SpecialPermission.check();
@@ -434,14 +397,33 @@ public Object run() {
             }, COMPILATION_CONTEXT);
         // Note that it is safe to catch any of the following errors since Painless is stateless.
         } catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) {
-            throw convertToScriptException(scriptName == null ? source : scriptName, source, e);
+            throw convertToScriptException(source, e);
         }
     }
 
     void compile(Compiler compiler, Loader loader, MainMethodReserved reserved,
                  String scriptName, String source, Map params) {
-        final CompilerSettings compilerSettings;
+        final CompilerSettings compilerSettings = buildCompilerSettings(params);
+
+        try {
+            // Drop all permissions to actually compile the code itself.
+            AccessController.doPrivileged(new PrivilegedAction() {
+                @Override
+                public Void run() {
+                    String name = scriptName == null ? source : scriptName;
+                    compiler.compile(loader, reserved, name, source, compilerSettings);
+
+                    return null;
+                }
+            }, COMPILATION_CONTEXT);
+            // Note that it is safe to catch any of the following errors since Painless is stateless.
+        } catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) {
+            throw convertToScriptException(source, e);
+        }
+    }
 
+    private CompilerSettings buildCompilerSettings(Map params) {
+        CompilerSettings compilerSettings;
         if (params.isEmpty()) {
             // Use the default settings.
             compilerSettings = defaultCompilerSettings;
@@ -478,25 +460,10 @@ void compile(Compiler compiler, Loader loader, MainMethodReserved reserved,
                 throw new IllegalArgumentException("Unrecognized compile-time parameter(s): " + copy);
             }
         }
-
-        try {
-            // Drop all permissions to actually compile the code itself.
-            AccessController.doPrivileged(new PrivilegedAction() {
-                @Override
-                public Void run() {
-                    String name = scriptName == null ? source : scriptName;
-                    compiler.compile(loader, reserved, name, source, compilerSettings);
-
-                    return null;
-                }
-            }, COMPILATION_CONTEXT);
-            // Note that it is safe to catch any of the following errors since Painless is stateless.
-        } catch (OutOfMemoryError | StackOverflowError | VerifyError | Exception e) {
-            throw convertToScriptException(scriptName == null ? source : scriptName, source, e);
-        }
+        return compilerSettings;
     }
 
-    private ScriptException convertToScriptException(String scriptName, String scriptSource, Throwable t) {
+    private ScriptException convertToScriptException(String scriptSource, Throwable t) {
         // create a script stack: this is just the script portion
         List scriptStack = new ArrayList<>();
         for (StackTraceElement element : t.getStackTrace()) {
@@ -507,7 +474,7 @@ private ScriptException convertToScriptException(String scriptName, String scrip
                     scriptStack.add("<<< unknown portion of script >>>");
                 } else {
                     offset--; // offset is 1 based, line numbers must be!
-                    int startOffset = getPreviousStatement(scriptSource, offset);
+                    int startOffset = getPreviousStatement(offset);
                     int endOffset = getNextStatement(scriptSource, offset);
                     StringBuilder snippet = new StringBuilder();
                     if (startOffset > 0) {
@@ -535,7 +502,7 @@ private ScriptException convertToScriptException(String scriptName, String scrip
     }
 
     // very simple heuristic: +/- 25 chars. can be improved later.
-    private int getPreviousStatement(String scriptSource, int offset) {
+    private int getPreviousStatement(int offset) {
         return Math.max(0, offset - 25);
     }
 

From dabbba13a9b4918ae6ee13df220153b8a87a9b88 Mon Sep 17 00:00:00 2001
From: Tim Vernum 
Date: Mon, 16 Jul 2018 15:43:41 +1000
Subject: [PATCH 038/107] Fix broken OpenLDAP Vagrant QA test

This was broken due to c662565 but the problem didn't get detected as
CI builds typically don't run vagrant tests
---
 .../xpack/security/authc/ldap/LdapTestUtils.java   |  2 +-
 .../java/org/elasticsearch/test/OpenLdapTests.java | 14 ++++++++++----
 2 files changed, 11 insertions(+), 5 deletions(-)

diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java
index 9a9368c25e127..8bdfd02d2fcc5 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java
@@ -62,7 +62,7 @@ public static LDAPConnection openConnection(String url, String bindDN, String bi
 
         final SSLConfiguration sslConfiguration;
         if (useGlobalSSL) {
-            sslConfiguration = sslService.getSSLConfiguration("_global");
+            sslConfiguration = sslService.getSSLConfiguration("xpack.ssl");
         } else {
             sslConfiguration = sslService.getSSLConfiguration("xpack.security.authc.realms.foo.ssl");
         }
diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java
index c6d541b8064fd..f96823df019a5 100644
--- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java
+++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/test/OpenLdapTests.java
@@ -104,7 +104,13 @@ public void initializeSslSocketFactory() throws Exception {
             builder.put("xpack.security.authc.realms." + REALM_NAME + ".ssl.truststore.path", truststore);
             mockSecureSettings.setString("xpack.security.authc.realms." + REALM_NAME + ".ssl.truststore.secure_password", "changeit");
             builder.put("xpack.security.authc.realms." + REALM_NAME + ".ssl.verification_mode", VerificationMode.CERTIFICATE);
+
+            // If not using global ssl, need to set the truststore for the "full verification" realm
+            builder.put("xpack.security.authc.realms.vmode_full.ssl.truststore.path", truststore);
+            mockSecureSettings.setString("xpack.security.authc.realms.vmode_full.ssl.truststore.secure_password", "changeit");
         }
+        builder.put("xpack.security.authc.realms.vmode_full.ssl.verification_mode", VerificationMode.FULL);
+
         globalSettings = builder.setSecureSettings(mockSecureSettings).build();
         Environment environment = TestEnvironment.newEnvironment(globalSettings);
         sslService = new SSLService(globalSettings, environment);
@@ -188,10 +194,10 @@ public void testStandardLdapConnectionHostnameVerificationFailure() throws Excep
         Settings settings = Settings.builder()
             // The certificate used in the vagrant box is valid for "localhost", but not for "127.0.0.1"
             .put(buildLdapSettings(OPEN_LDAP_IP_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL))
-            .put("ssl.verification_mode", VerificationMode.FULL)
             .build();
 
-        RealmConfig config = new RealmConfig("oldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
+        // Pick up the "full" verification mode config
+        RealmConfig config = new RealmConfig("vmode_full", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
             new ThreadContext(Settings.EMPTY));
         LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool);
 
@@ -211,10 +217,10 @@ public void testStandardLdapConnectionHostnameVerificationSuccess() throws Excep
         Settings settings = Settings.builder()
             // The certificate used in the vagrant box is valid for "localhost" (but not for "127.0.0.1")
             .put(buildLdapSettings(OPEN_LDAP_DNS_URL, userTemplate, groupSearchBase, LdapSearchScope.ONE_LEVEL))
-            .put("ssl.verification_mode", VerificationMode.FULL)
             .build();
 
-        RealmConfig config = new RealmConfig("oldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
+        // Pick up the "full" verification mode config
+        RealmConfig config = new RealmConfig("vmode_full", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings),
             new ThreadContext(Settings.EMPTY));
         LdapSessionFactory sessionFactory = new LdapSessionFactory(config, sslService, threadPool);
 

From 82e8fceceec1a5a6be712edbd2311ac8afbac232 Mon Sep 17 00:00:00 2001
From: Daniel Mitterdorfer 
Date: Mon, 16 Jul 2018 10:40:36 +0200
Subject: [PATCH 039/107] Turn off real-mem breaker in single node tests

With this commit we disable the real-memory circuit breaker in tests
that inherit from `ESSingleNodeTestCase`. As this breaker is based on
real memory usage over which we have no (full) control in tests and
their purpose is also not to test the circuit breaker, we use the
deterministic circuit breaker implementation that only accounts for
explicitly reserved memory.

Closes #32047
Relates #32071
---
 .../java/org/elasticsearch/test/ESSingleNodeTestCase.java     | 4 ++++
 1 file changed, 4 insertions(+)

diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java
index a1b8f44a923a3..9633f56dea941 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ESSingleNodeTestCase.java
@@ -42,6 +42,7 @@
 import org.elasticsearch.index.Index;
 import org.elasticsearch.index.IndexService;
 import org.elasticsearch.indices.IndicesService;
+import org.elasticsearch.indices.breaker.HierarchyCircuitBreakerService;
 import org.elasticsearch.node.MockNode;
 import org.elasticsearch.node.Node;
 import org.elasticsearch.node.NodeValidationException;
@@ -184,6 +185,9 @@ private Node newNode() {
             .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_LOW_DISK_WATERMARK_SETTING.getKey(), "1b")
             .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_HIGH_DISK_WATERMARK_SETTING.getKey(), "1b")
             .put(DiskThresholdSettings.CLUSTER_ROUTING_ALLOCATION_DISK_FLOOD_STAGE_WATERMARK_SETTING.getKey(), "1b")
+            // turning on the real memory circuit breaker leads to spurious test failures. As have no full control over heap usage, we
+            // turn it off for these tests.
+            .put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), false)
             .put(nodeSettings()) // allow test cases to provide their own settings or override these
             .build();
         Collection> plugins = getPlugins();

From 9d48815d973af37a2305ee7970247c39162419ba Mon Sep 17 00:00:00 2001
From: Daniel Mitterdorfer 
Date: Mon, 16 Jul 2018 10:44:04 +0200
Subject: [PATCH 040/107] Turn off real-mem breaker in REST tests

With this commit we disable the real-memory circuit breaker in REST
tests as this breaker is based on real memory usage over which we have
no (full) control in tests and the REST client is not yet ready to retry
on circuit breaker exceptions.

This is only meant as a temporary measure to avoid spurious test
failures while we ensure that the REST client can handle those
situations appropriately.

Closes #32050
Relates #31767
Relates #31986
Relates #32074
---
 .../elasticsearch/gradle/test/ClusterFormationTasks.groovy  | 6 ++++++
 1 file changed, 6 insertions(+)

diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
index be0fb3a07c699..0349130076cfc 100644
--- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
+++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy
@@ -331,6 +331,12 @@ class ClusterFormationTasks {
         }
         // increase script compilation limit since tests can rapid-fire script compilations
         esConfig['script.max_compilations_rate'] = '2048/1m'
+        // Temporarily disable the real memory usage circuit breaker. It depends on real memory usage which we have no full control
+        // over and the REST client will not retry on circuit breaking exceptions yet (see #31986 for details). Once the REST client
+        // can retry on circuit breaking exceptions, we can revert again to the default configuration.
+        if (node.nodeVersion.major >= 7) {
+            esConfig['indices.breaker.total.use_real_memory'] = false
+        }
         esConfig.putAll(node.config.settings)
 
         Task writeConfig = project.tasks.create(name: name, type: DefaultTask, dependsOn: setup)

From 040bc9d4a5f3e5643da4d7b81d1e151a715150bf Mon Sep 17 00:00:00 2001
From: Jim Ferenczi 
Date: Mon, 16 Jul 2018 10:53:51 +0200
Subject: [PATCH 041/107] [Test] Mute MlJobIT#testDeleteJobAfterMissingAliases

Relates #32034
---
 .../java/org/elasticsearch/xpack/ml/integration/MlJobIT.java     | 1 +
 1 file changed, 1 insertion(+)

diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java
index 6713e66692ded..7820cbc06f5a0 100644
--- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java
+++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java
@@ -438,6 +438,7 @@ public void testDeleteJobAfterMissingIndex() throws Exception {
                 client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats"));
     }
 
+    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32034")
     public void testDeleteJobAfterMissingAliases() throws Exception {
         String jobId = "delete-job-after-missing-alias-job";
         String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId);

From 142d24a4366c413c01ca52c2f51e4aa08304681e Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Christoph=20B=C3=BCscher?= 
Date: Mon, 16 Jul 2018 10:54:23 +0200
Subject: [PATCH 042/107] Remove unused params from SSource and Walker (#31935)

The "source" field in SSource seems unused. If removed, it can also be removed
from the ctor, which in turn makes is possible to delete the sourceText in the
Walker class.
---
 .../org/elasticsearch/painless/antlr/Walker.java    |  6 ++----
 .../org/elasticsearch/painless/node/SSource.java    | 13 +++++--------
 2 files changed, 7 insertions(+), 12 deletions(-)

diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
index e2742ffb9936d..6c8d3a62e065b 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/Walker.java
@@ -29,7 +29,6 @@
 import org.antlr.v4.runtime.atn.PredictionMode;
 import org.antlr.v4.runtime.tree.TerminalNode;
 import org.elasticsearch.painless.CompilerSettings;
-import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Location;
 import org.elasticsearch.painless.Operation;
@@ -107,6 +106,7 @@
 import org.elasticsearch.painless.antlr.PainlessParser.TryContext;
 import org.elasticsearch.painless.antlr.PainlessParser.VariableContext;
 import org.elasticsearch.painless.antlr.PainlessParser.WhileContext;
+import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.node.AExpression;
 import org.elasticsearch.painless.node.ANode;
 import org.elasticsearch.painless.node.AStatement;
@@ -184,7 +184,6 @@ public static SSource buildPainlessTree(ScriptClassInfo mainMethod, MainMethodRe
     private final CompilerSettings settings;
     private final Printer debugStream;
     private final String sourceName;
-    private final String sourceText;
     private final PainlessLookup painlessLookup;
 
     private final Deque reserved = new ArrayDeque<>();
@@ -198,7 +197,6 @@ private Walker(ScriptClassInfo scriptClassInfo, MainMethodReserved reserved, Str
         this.debugStream = debugStream;
         this.settings = settings;
         this.sourceName = Location.computeSourceName(sourceName);
-        this.sourceText = sourceText;
         this.globals = new Globals(new BitSet(sourceText.length()));
         this.painlessLookup = painlessLookup;
         this.source = (SSource)visit(buildAntlrTree(sourceText));
@@ -267,7 +265,7 @@ public ANode visitSource(SourceContext ctx) {
             statements.add((AStatement)visit(ctx.dstatement()));
         }
 
-        return new SSource(scriptClassInfo, settings, sourceName, sourceText, debugStream, (MainMethodReserved)reserved.pop(),
+        return new SSource(scriptClassInfo, settings, sourceName, debugStream, (MainMethodReserved)reserved.pop(),
                            location(ctx), functions, globals, statements);
     }
 
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java
index 4781457a57dfa..cd473e2c84ec7 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java
@@ -21,9 +21,6 @@
 
 import org.elasticsearch.painless.CompilerSettings;
 import org.elasticsearch.painless.Constant;
-import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessMethod;
-import org.elasticsearch.painless.lookup.PainlessMethodKey;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Locals.Variable;
@@ -32,6 +29,9 @@
 import org.elasticsearch.painless.ScriptClassInfo;
 import org.elasticsearch.painless.SimpleChecksAdapter;
 import org.elasticsearch.painless.WriterConstants;
+import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessMethod;
+import org.elasticsearch.painless.lookup.PainlessMethodKey;
 import org.elasticsearch.painless.node.SFunction.FunctionReserved;
 import org.objectweb.asm.ClassVisitor;
 import org.objectweb.asm.ClassWriter;
@@ -130,7 +130,6 @@ public int getMaxLoopCounter() {
     private final ScriptClassInfo scriptClassInfo;
     private final CompilerSettings settings;
     private final String name;
-    private final String source;
     private final Printer debugStream;
     private final MainMethodReserved reserved;
     private final List functions;
@@ -141,14 +140,12 @@ public int getMaxLoopCounter() {
     private final List getMethods;
     private byte[] bytes;
 
-    public SSource(ScriptClassInfo scriptClassInfo, CompilerSettings settings, String name, String source, Printer debugStream,
-                   MainMethodReserved reserved, Location location,
-                   List functions, Globals globals, List statements) {
+    public SSource(ScriptClassInfo scriptClassInfo, CompilerSettings settings, String name, Printer debugStream,
+            MainMethodReserved reserved, Location location, List functions, Globals globals, List statements) {
         super(location);
         this.scriptClassInfo = Objects.requireNonNull(scriptClassInfo);
         this.settings = Objects.requireNonNull(settings);
         this.name = Objects.requireNonNull(name);
-        this.source = Objects.requireNonNull(source);
         this.debugStream = debugStream;
         this.reserved = Objects.requireNonNull(reserved);
         // process any synthetic functions generated by walker (because right now, thats still easy)

From ce8b3e3a78aa7b28ffa9ef2460dfc9191833746b Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Christoph=20B=C3=BCscher?= 
Date: Mon, 16 Jul 2018 11:22:42 +0200
Subject: [PATCH 043/107] [Tests] Fix failure due to changes exception message
 (#32036)

Java 11 seems to get more verbose on the ClassCastException we check for in
SearchDocumentationIT. This changes the test from asserting the exact exception
message to only checking the two classes involved are part of the message.

Closes #32029
---
 .../client/documentation/SearchDocumentationIT.java         | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
index 6d00e5d8d0365..26bb4682fd9db 100644
--- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
+++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java
@@ -295,7 +295,6 @@ public void testBuildingSearchQueries() {
     }
 
     @SuppressWarnings({ "unused" })
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32029")
     public void testSearchRequestAggregations() throws IOException {
         RestHighLevelClient client = highLevelClient();
         {
@@ -338,8 +337,9 @@ public void testSearchRequestAggregations() throws IOException {
                     Range range = aggregations.get("by_company"); // <1>
                     // end::search-request-aggregations-get-wrongCast
                 } catch (ClassCastException ex) {
-                    assertEquals("org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms"
-                            + " cannot be cast to org.elasticsearch.search.aggregations.bucket.range.Range", ex.getMessage());
+                    String message = ex.getMessage();
+                    assertThat(message, containsString("org.elasticsearch.search.aggregations.bucket.terms.ParsedStringTerms"));
+                    assertThat(message, containsString("org.elasticsearch.search.aggregations.bucket.range.Range"));
                 }
                 assertEquals(3, elasticBucket.getDocCount());
                 assertEquals(30, avg, 0.0);

From a1ad7a169eaeb3cc114a87600a0fe06aabe17a04 Mon Sep 17 00:00:00 2001
From: Jim Ferenczi 
Date: Mon, 16 Jul 2018 11:59:59 +0200
Subject: [PATCH 044/107] Fix BWC check after backport

Relates #31808
---
 .../java/org/elasticsearch/index/query/InnerHitBuilder.java   | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java
index 6bdc55d31cdc9..8b2db374c8da9 100644
--- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java
+++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java
@@ -199,7 +199,7 @@ public InnerHitBuilder(StreamInput in) throws IOException {
             boolean hasChildren = in.readBoolean();
             assert hasChildren == false;
         }
-        if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
+        if (in.getVersion().onOrAfter(Version.V_6_4_0)) {
             this.innerCollapseBuilder = in.readOptionalWriteable(CollapseBuilder::new);
         }
     }
@@ -247,7 +247,7 @@ public void writeTo(StreamOutput out) throws IOException {
             }
         }
         out.writeOptionalWriteable(highlightBuilder);
-        if (out.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) {
+        if (out.getVersion().onOrAfter(Version.V_6_4_0)) {
             out.writeOptionalWriteable(innerCollapseBuilder);
         }
     }

From bbe1b7cb2013077b0615536b3f5ffc48a7469581 Mon Sep 17 00:00:00 2001
From: Jim Ferenczi 
Date: Mon, 16 Jul 2018 12:03:28 +0200
Subject: [PATCH 045/107] Unmute field collapsing rest tests

BWC tests can run now that master and 6x branch are aligned.
Closes #32055
---
 .../test/search.inner_hits/10_basic.yml         |  9 ++++-----
 .../test/search/110_field_collapsing.yml        | 17 +++--------------
 2 files changed, 7 insertions(+), 19 deletions(-)

diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
index 8f162ae2eb238..884a50507c7b7 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search.inner_hits/10_basic.yml
@@ -13,9 +13,8 @@ setup:
 ---
 "Nested inner hits":
     - skip:
-        version: "all"
-        reason: "https://github.com/elastic/elasticsearch/issues/32055"
-
+        version: " - 6.1.99"
+        reason: "<= 6.1 nodes don't always include index or id in nested inner hits"
     - do:
         index:
           index: test
@@ -46,8 +45,8 @@ setup:
 "Nested doc version and seqIDs":
 
     - skip:
-        version: "all"
-        reason: "https://github.com/elastic/elasticsearch/issues/32055"
+        version: " - 6.3.99"
+        reason:  "object notation for docvalue_fields was introduced in 6.4"
 
     - do:
         index:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
index 39597b1fbbeea..2dfd868d66b06 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/search/110_field_collapsing.yml
@@ -107,9 +107,6 @@ setup:
 
 ---
 "field collapsing and inner_hits":
-  - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -149,9 +146,6 @@ setup:
 
 ---
 "field collapsing, inner_hits and maxConcurrentGroupRequests":
-  - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -232,9 +226,6 @@ setup:
 
 ---
 "no hits and inner_hits":
-  - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -249,9 +240,6 @@ setup:
 
 ---
 "field collapsing and multiple inner_hits":
-  - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
 
   - do:
       search:
@@ -304,9 +292,10 @@ setup:
 
 ---
 "field collapsing, inner_hits and version":
+
   - skip:
-      version: "all"
-      reason: "https://github.com/elastic/elasticsearch/issues/32055"
+      version: " - 6.1.0"
+      reason:  "bug fixed in 6.1.1"
 
   - do:
       search:

From 391641c3c09ea9f2d4b6784f6cd609efc0d75286 Mon Sep 17 00:00:00 2001
From: Daniel Mitterdorfer 
Date: Mon, 16 Jul 2018 13:50:17 +0200
Subject: [PATCH 046/107] Ensure only parent breaker trips in unit test

With this commit we raise the limit of the child circuit breaker used in
the unit test for the circuit breaker service so it is high enough to trip
only the parent circuit breaker. The previous limit was 300 bytes but
theoretically (considering overhead) we could reach 346 bytes. Thus any
value larger than 300 bytes could trip the child circuit breaker leading
to spurious failures.

Relates #31767
---
 .../indices/breaker/HierarchyCircuitBreakerServiceTests.java    | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java
index 00bd15d244fde..a73cf8630fe52 100644
--- a/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java
+++ b/server/src/test/java/org/elasticsearch/indices/breaker/HierarchyCircuitBreakerServiceTests.java
@@ -206,7 +206,7 @@ public void testParentBreaksOnRealMemoryUsage() throws Exception {
         Settings clusterSettings = Settings.builder()
             .put(HierarchyCircuitBreakerService.USE_REAL_MEMORY_USAGE_SETTING.getKey(), Boolean.TRUE)
             .put(HierarchyCircuitBreakerService.TOTAL_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "200b")
-            .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "300b")
+            .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_LIMIT_SETTING.getKey(), "350b")
             .put(HierarchyCircuitBreakerService.REQUEST_CIRCUIT_BREAKER_OVERHEAD_SETTING.getKey(), 2)
             .build();
 

From 6ec52fe1965c774898557faf0ea7c61d4a88e1c3 Mon Sep 17 00:00:00 2001
From: Jim Ferenczi 
Date: Mon, 16 Jul 2018 15:25:45 +0200
Subject: [PATCH 047/107] [Rollup] Fix duplicate field names in test (#32075)

This commit ensures that random field names do not clash with the explicit field names
set by the tests.

Closes #32067
---
 .../elasticsearch/xpack/core/rollup/ConfigTestHelpers.java  | 6 +++---
 1 file changed, 3 insertions(+), 3 deletions(-)

diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
index 3d82ac118f503..3e4e4a84d2f8e 100644
--- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
+++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/rollup/ConfigTestHelpers.java
@@ -94,7 +94,7 @@ public static DateHistoGroupConfig.Builder getDateHisto() {
         if (ESTestCase.randomBoolean()) {
             dateHistoBuilder.setDelay(new DateHistogramInterval(randomPositiveTimeValue()));
         }
-        dateHistoBuilder.setField(ESTestCase.randomAlphaOfLengthBetween(1, 10 ));
+        dateHistoBuilder.setField(ESTestCase.randomAlphaOfLengthBetween(5, 10));
         return dateHistoBuilder;
     }
 
@@ -112,8 +112,8 @@ public static TermsGroupConfig.Builder getTerms() {
     }
 
     public static  List getFields() {
-        return IntStream.range(0, ESTestCase.randomIntBetween(1,10))
-                .mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(1,10))
+        return IntStream.range(0, ESTestCase.randomIntBetween(1, 10))
+                .mapToObj(n -> ESTestCase.randomAlphaOfLengthBetween(5, 10))
                 .collect(Collectors.toList());
     }
 

From ced669b406066e7774b0932f3a789870bce45f94 Mon Sep 17 00:00:00 2001
From: Ioannis Kakavas 
Date: Mon, 16 Jul 2018 16:41:56 +0300
Subject: [PATCH 048/107] [TEST] Consistent algorithm usage (#32077)

Ensure that the same algorithm is used for settings and
change password requests for consistency, even if we
do not expext to reach the code where the algorithm is
checked for now.
Completes a7eaa409e804f218aa06fd02d9166b9a5998b48a
---
 .../TransportChangePasswordActionTests.java     | 17 +++++++----------
 1 file changed, 7 insertions(+), 10 deletions(-)

diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java
index 516b33cbaccfa..aabaa40381f69 100644
--- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java
+++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/user/TransportChangePasswordActionTests.java
@@ -49,18 +49,16 @@ public class TransportChangePasswordActionTests extends ESTestCase {
 
     public void testAnonymousUser() {
         final String hashingAlgorithm = randomFrom("pbkdf2", "pbkdf2_1000", "bcrypt", "bcrypt9");
-        Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser").build();
+        Settings settings = Settings.builder().put(AnonymousUser.ROLES_SETTING.getKey(), "superuser")
+            .put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), hashingAlgorithm).build();
         AnonymousUser anonymousUser = new AnonymousUser(settings);
         NativeUsersStore usersStore = mock(NativeUsersStore.class);
-        Settings passwordHashingSettings = Settings.builder().
-            put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), hashingAlgorithm).build();
-        TransportService transportService = new TransportService(passwordHashingSettings, mock(Transport.class), null,
+        TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
             TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
         TransportChangePasswordAction action = new TransportChangePasswordAction(settings, transportService,
             mock(ActionFilters.class), usersStore);
-
-        ChangePasswordRequest request = new ChangePasswordRequest();
         // Request will fail before the request hashing algorithm is checked, but we use the same algorithm as in settings for consistency
+        ChangePasswordRequest request = new ChangePasswordRequest();
         request.username(anonymousUser.principal());
         request.passwordHash(Hasher.resolve(hashingAlgorithm).hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING));
 
@@ -89,14 +87,13 @@ public void testInternalUsers() {
         NativeUsersStore usersStore = mock(NativeUsersStore.class);
         Settings passwordHashingSettings = Settings.builder().
             put(XPackSettings.PASSWORD_HASHING_ALGORITHM.getKey(), hashingAlgorithm).build();
-        TransportService transportService = new TransportService(passwordHashingSettings, mock(Transport.class), null,
+        TransportService transportService = new TransportService(Settings.EMPTY, mock(Transport.class), null,
             TransportService.NOOP_TRANSPORT_INTERCEPTOR, x -> null, null, Collections.emptySet());
-        TransportChangePasswordAction action = new TransportChangePasswordAction(Settings.EMPTY, transportService,
+        TransportChangePasswordAction action = new TransportChangePasswordAction(passwordHashingSettings, transportService,
             mock(ActionFilters.class), usersStore);
-
+        // Request will fail before the request hashing algorithm is checked, but we use the same algorithm as in settings for consistency
         ChangePasswordRequest request = new ChangePasswordRequest();
         request.username(randomFrom(SystemUser.INSTANCE.principal(), XPackUser.INSTANCE.principal()));
-        // Request will fail before the request hashing algorithm is checked, but we use the same algorithm as in settings for consistency
         request.passwordHash(Hasher.resolve(hashingAlgorithm).hash(SecuritySettingsSourceField.TEST_PASSWORD_SECURE_STRING));
 
         final AtomicReference throwableRef = new AtomicReference<>();

From e38e69cdd11fdb51e9a2e673388cb7df35f4fd7e Mon Sep 17 00:00:00 2001
From: Zachary Tong 
Date: Mon, 16 Jul 2018 10:47:46 -0400
Subject: [PATCH 049/107] [Rollup] Replace RollupIT with a ESRestTestCase
 version (#31977)

The old RollupIT was a node IT, an flaky for a number of reasons.
This new version is an ESRestTestCase and should be a little more robust.

This was added to the multi-node QA tests as that seemed like the most
appropriate location.  It didn't seem necessary to create a whole new
QA module.

Note: The only test that was ported was the "Big" test for validating
a larger dataset.  The rest of the tests are represented in existing
yaml tests.

Closes #31258
Closes #30232
Related to #30290
---
 x-pack/plugin/rollup/build.gradle             |  28 -
 .../elasticsearch/xpack/rollup/RollupIT.java  | 498 ------------------
 .../elasticsearch/multi_node/RollupIT.java    | 326 ++++++++++++
 3 files changed, 326 insertions(+), 526 deletions(-)
 delete mode 100644 x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java
 create mode 100644 x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java

diff --git a/x-pack/plugin/rollup/build.gradle b/x-pack/plugin/rollup/build.gradle
index 18ef7abee5c64..ff9c30ed9a934 100644
--- a/x-pack/plugin/rollup/build.gradle
+++ b/x-pack/plugin/rollup/build.gradle
@@ -1,6 +1,3 @@
-import com.carrotsearch.gradle.junit4.RandomizedTestingTask
-import org.elasticsearch.gradle.BuildPlugin
-
 evaluationDependsOn(xpackModule('core'))
 
 apply plugin: 'elasticsearch.esplugin'
@@ -23,33 +20,8 @@ dependencies {
     testCompile project(path: xpackModule('core'), configuration: 'testArtifacts')
 }
 
-dependencyLicenses {
-    ignoreSha 'x-pack-core'
-}
-
 run {
     plugin xpackModule('core')
 }
 
 integTest.enabled = false
-
-
-// Instead we create a separate task to run the
-// tests based on ESIntegTestCase
-task internalClusterTest(type: RandomizedTestingTask,
-        group: JavaBasePlugin.VERIFICATION_GROUP,
-        description: 'Multi-node tests',
-        dependsOn: test.dependsOn) {
-    configure(BuildPlugin.commonTestConfig(project))
-    classpath = project.test.classpath
-    testClassesDirs = project.test.testClassesDirs
-    include '**/*IT.class'
-    systemProperty 'es.set.netty.runtime.available.processors', 'false'
-}
-check.dependsOn internalClusterTest
-internalClusterTest.mustRunAfter test
-
-// also add an "alias" task to make typing on the command line easier task icTest {
-task icTest {
-    dependsOn internalClusterTest
-}
diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java
deleted file mode 100644
index 157cd6a5b9d1a..0000000000000
--- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/RollupIT.java
+++ /dev/null
@@ -1,498 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License;
- * you may not use this file except in compliance with the Elastic License.
- */
-package org.elasticsearch.xpack.rollup;
-
-import com.carrotsearch.randomizedtesting.annotations.ThreadLeakScope;
-import org.elasticsearch.ElasticsearchException;
-import org.elasticsearch.action.admin.indices.get.GetIndexResponse;
-import org.elasticsearch.action.bulk.BulkRequestBuilder;
-import org.elasticsearch.action.index.IndexRequest;
-import org.elasticsearch.action.search.SearchAction;
-import org.elasticsearch.action.search.SearchRequest;
-import org.elasticsearch.action.search.SearchResponse;
-import org.elasticsearch.action.support.WriteRequest;
-import org.elasticsearch.analysis.common.CommonAnalysisPlugin;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
-import org.elasticsearch.common.settings.Settings;
-import org.elasticsearch.common.xcontent.XContentType;
-import org.elasticsearch.index.query.MatchAllQueryBuilder;
-import org.elasticsearch.license.LicenseService;
-import org.elasticsearch.plugins.Plugin;
-import org.elasticsearch.search.aggregations.Aggregation;
-import org.elasticsearch.search.aggregations.bucket.histogram.DateHistogramInterval;
-import org.elasticsearch.search.aggregations.bucket.histogram.InternalDateHistogram;
-import org.elasticsearch.search.builder.SearchSourceBuilder;
-import org.elasticsearch.test.ESIntegTestCase;
-import org.elasticsearch.test.ESTestCase;
-import org.elasticsearch.transport.Netty4Plugin;
-import org.elasticsearch.xpack.core.XPackSettings;
-import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction;
-import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction;
-import org.elasticsearch.xpack.core.rollup.action.PutRollupJobAction;
-import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction;
-import org.elasticsearch.xpack.core.rollup.action.StartRollupJobAction;
-import org.elasticsearch.xpack.core.rollup.action.StopRollupJobAction;
-import org.elasticsearch.xpack.core.rollup.job.DateHistoGroupConfig;
-import org.elasticsearch.xpack.core.rollup.job.GroupConfig;
-import org.elasticsearch.xpack.core.rollup.job.IndexerState;
-import org.elasticsearch.xpack.core.rollup.job.MetricConfig;
-import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig;
-import org.elasticsearch.xpack.core.rollup.job.RollupJobStatus;
-import org.hamcrest.Matchers;
-import org.joda.time.DateTime;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.Map;
-import java.util.concurrent.ExecutionException;
-import java.util.concurrent.TimeUnit;
-
-import static org.elasticsearch.search.aggregations.AggregationBuilders.dateHistogram;
-import static org.hamcrest.core.IsEqual.equalTo;
-
-@ThreadLeakScope(ThreadLeakScope.Scope.NONE)
-public class RollupIT extends ESIntegTestCase {
-
-    private String taskId = "test-bigID";
-
-    @Override
-    protected boolean ignoreExternalCluster() {
-        return true;
-    }
-
-    @Override
-    protected Collection> nodePlugins() {
-        return Arrays.asList(LocalStateRollup.class, CommonAnalysisPlugin.class, Netty4Plugin.class);
-    }
-
-    @Override
-    protected Collection> transportClientPlugins() {
-        return nodePlugins();
-    }
-
-    @Override
-    protected Settings nodeSettings(int nodeOrdinal) {
-        Settings.Builder builder = Settings.builder();
-        builder.put(XPackSettings.ROLLUP_ENABLED.getKey(), true);
-        builder.put(XPackSettings.SECURITY_ENABLED.getKey(), false);
-        builder.put(LicenseService.SELF_GENERATED_LICENSE_TYPE.getKey(), "trial");
-        return builder.build();
-    }
-
-    @Override
-    protected Settings externalClusterClientSettings() {
-        return nodeSettings(0);
-    }
-
-    @Override
-    protected Settings transportClientSettings() {
-        return Settings.builder().put(super.transportClientSettings())
-                .put(XPackSettings.ROLLUP_ENABLED.getKey(), true)
-                .put(XPackSettings.SECURITY_ENABLED.getKey(), false)
-                .build();
-    }
-
-    @Before
-    public void createIndex() {
-        client().admin().indices().prepareCreate("test-1").addMapping("doc", "{\"doc\": {\"properties\": {" +
-                "\"date_histo\": {\"type\": \"date\"}, " +
-                "\"histo\": {\"type\": \"integer\"}, " +
-                "\"terms\": {\"type\": \"keyword\"}}}}", XContentType.JSON).get();
-        client().admin().cluster().prepareHealth("test-1").setWaitForYellowStatus().get();
-
-        BulkRequestBuilder bulk = client().prepareBulk();
-        Map source = new HashMap<>(3);
-        for (int i = 0; i < 20; i++) {
-            for (int j = 0; j < 20; j++) {
-                for (int k = 0; k < 20; k++) {
-                    source.put("date_histo", new DateTime().minusDays(i).toString());
-                    source.put("histo", Integer.toString(j * 100));
-                    source.put("terms", Integer.toString(k * 100));
-                    source.put("foo", k);
-                    bulk.add(new IndexRequest("test-1", "doc").source(source));
-                    source.clear();
-                }
-            }
-        }
-        bulk.get();
-        client().admin().indices().prepareRefresh("test-1").get();
-    }
-
-    public void testGetJob() throws ExecutionException, InterruptedException {
-        MetricConfig metricConfig = new MetricConfig.Builder()
-                .setField("foo")
-                .setMetrics(Arrays.asList("sum", "min", "max", "avg"))
-                .build();
-
-        DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
-        datehistoGroupConfig.setField("date_histo");
-        datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
-
-        GroupConfig.Builder groupConfig = new GroupConfig.Builder();
-        groupConfig.setDateHisto(datehistoGroupConfig.build());
-
-
-        RollupJobConfig.Builder config = new RollupJobConfig.Builder();
-        config.setIndexPattern("test-1");
-        config.setRollupIndex("rolled");
-        config.setId("testGet");
-        config.setGroupConfig(groupConfig.build());
-        config.setMetricsConfig(Collections.singletonList(metricConfig));
-        config.setCron("* * * * * ? *");
-        config.setPageSize(10);
-
-        PutRollupJobAction.Request request = new PutRollupJobAction.Request();
-        request.setConfig(config.build());
-        client().execute(PutRollupJobAction.INSTANCE, request).get();
-
-        GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("testGet");
-        GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-        assertThat(response.getJobs().size(), equalTo(1));
-        assertThat(response.getJobs().get(0).getJob().getId(), equalTo("testGet"));
-    }
-
-    public void testIndexPattern() throws Exception {
-        MetricConfig metricConfig = new MetricConfig.Builder()
-                .setField("foo")
-                .setMetrics(Arrays.asList("sum", "min", "max", "avg"))
-                .build();
-
-        DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
-        datehistoGroupConfig.setField("date_histo");
-        datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
-
-        GroupConfig.Builder groupConfig = new GroupConfig.Builder();
-        groupConfig.setDateHisto(datehistoGroupConfig.build());
-
-
-        RollupJobConfig.Builder config = new RollupJobConfig.Builder();
-        config.setIndexPattern("test-*");
-        config.setId("testIndexPattern");
-        config.setRollupIndex("rolled");
-        config.setGroupConfig(groupConfig.build());
-        config.setMetricsConfig(Collections.singletonList(metricConfig));
-        config.setCron("* * * * * ? *");
-        config.setPageSize(10);
-
-        PutRollupJobAction.Request request = new PutRollupJobAction.Request();
-        request.setConfig(config.build());
-        client().execute(PutRollupJobAction.INSTANCE, request).get();
-
-        StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request("testIndexPattern");
-        StartRollupJobAction.Response startResponse = client().execute(StartRollupJobAction.INSTANCE, startRequest).get();
-        Assert.assertThat(startResponse.isStarted(), equalTo(true));
-
-        // Make sure it started
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("testIndexPattern");
-            if (rollupJobStatus == null) {
-                fail("null");
-            }
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STARTED) || state.equals(IndexerState.INDEXING));
-        }, 60, TimeUnit.SECONDS);
-
-        // And wait for it to finish
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("testIndexPattern");
-            if (rollupJobStatus == null) {
-                fail("null");
-            }
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STARTED) && rollupJobStatus.getPosition() != null);
-        }, 60, TimeUnit.SECONDS);
-
-        GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("testIndexPattern");
-        GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-        Assert.assertThat(response.getJobs().size(), equalTo(1));
-        Assert.assertThat(response.getJobs().get(0).getJob().getId(), equalTo("testIndexPattern"));
-
-        GetIndexResponse getIndexResponse = client().admin().indices().prepareGetIndex().addIndices("rolled").get();
-        Assert.assertThat(getIndexResponse.indices().length, Matchers.greaterThan(0));
-    }
-
-    @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/30290")
-    public void testTwoJobsStartStopDeleteOne() throws Exception {
-        MetricConfig metricConfig = new MetricConfig.Builder()
-                .setField("foo")
-                .setMetrics(Arrays.asList("sum", "min", "max", "avg"))
-                .build();
-
-        DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
-        datehistoGroupConfig.setField("date_histo");
-        datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
-
-        GroupConfig.Builder groupConfig = new GroupConfig.Builder();
-        groupConfig.setDateHisto(datehistoGroupConfig.build());
-
-
-        RollupJobConfig.Builder config = new RollupJobConfig.Builder();
-        config.setIndexPattern("test-1");
-        config.setRollupIndex("rolled");
-        config.setId("job1");
-        config.setGroupConfig(groupConfig.build());
-        config.setMetricsConfig(Collections.singletonList(metricConfig));
-        config.setCron("* * * * * ? *");
-        config.setPageSize(10);
-
-        PutRollupJobAction.Request request = new PutRollupJobAction.Request();
-        request.setConfig(config.build());
-        client().execute(PutRollupJobAction.INSTANCE, request).get();
-
-        RollupJobConfig.Builder config2 = new RollupJobConfig.Builder();
-        config2.setIndexPattern("test-1");
-        config2.setRollupIndex("rolled");
-        config2.setId("job2");
-        config2.setGroupConfig(groupConfig.build());
-        config2.setMetricsConfig(Collections.singletonList(metricConfig));
-        config2.setCron("* * * * * ? *");
-        config2.setPageSize(10);
-
-        PutRollupJobAction.Request request2 = new PutRollupJobAction.Request();
-        request2.setConfig(config2.build());
-        client().execute(PutRollupJobAction.INSTANCE, request2).get();
-
-        StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request("job1");
-        StartRollupJobAction.Response response = client().execute(StartRollupJobAction.INSTANCE, startRequest).get();
-        Assert.assertThat(response.isStarted(), equalTo(true));
-
-        // Make sure it started
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("job1");
-            if (rollupJobStatus == null) {
-                fail("null");
-            }
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STARTED) || state.equals(IndexerState.INDEXING));
-        }, 60, TimeUnit.SECONDS);
-
-        //but not the other task
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("job2");
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STOPPED));
-        }, 60, TimeUnit.SECONDS);
-
-        // Delete the task
-        DeleteRollupJobAction.Request deleteRequest = new DeleteRollupJobAction.Request("job1");
-        DeleteRollupJobAction.Response deleteResponse = client().execute(DeleteRollupJobAction.INSTANCE, deleteRequest).get();
-        Assert.assertTrue(deleteResponse.isAcknowledged());
-
-        // Make sure the first job's task is gone
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("job1");
-            assertTrue(rollupJobStatus == null);
-        }, 60, TimeUnit.SECONDS);
-
-        // And that we don't see it in the GetJobs API
-        GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("job1");
-        GetRollupJobsAction.Response getResponse = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-        Assert.assertThat(getResponse.getJobs().size(), equalTo(0));
-
-        // But make sure the other job is still there
-        getRequest = new GetRollupJobsAction.Request("job2");
-        getResponse = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-        Assert.assertThat(getResponse.getJobs().size(), equalTo(1));
-        Assert.assertThat(getResponse.getJobs().get(0).getJob().getId(), equalTo("job2"));
-
-        // and still STOPPED
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus("job2");
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            assertTrue(state.equals(IndexerState.STOPPED));
-        }, 60, TimeUnit.SECONDS);
-    }
-
-    public void testBig() throws Exception {
-
-        client().admin().indices().prepareCreate("test-big")
-                .addMapping("test-big", "{\"test-big\": {\"properties\": {\"timestamp\": {\"type\": \"date\"}, " +
-                    "\"thefield\": {\"type\": \"integer\"}}}}", XContentType.JSON)
-                .setSettings(Settings.builder()
-                        .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
-                        .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get();
-        client().admin().cluster().prepareHealth("test-big").setWaitForYellowStatus().get();
-
-        client().admin().indices().prepareCreate("test-verify")
-                .addMapping("test-big", "{\"test-big\": {\"properties\": {\"timestamp\": {\"type\": \"date\"}, " +
-                        "\"thefield\": {\"type\": \"integer\"}}}}", XContentType.JSON)
-                .setSettings(Settings.builder()
-                        .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1)
-                        .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0)).get();
-        client().admin().cluster().prepareHealth("test-verify").setWaitForYellowStatus().get();
-
-        BulkRequestBuilder bulk = client().prepareBulk();
-        Map source = new HashMap<>(3);
-
-        int numDays = 90;
-        int numDocsPerDay = 100;
-
-        for (int i = 0; i < numDays; i++) {
-            DateTime ts = new DateTime().minusDays(i);
-            for (int j = 0; j < numDocsPerDay; j++) {
-
-                int value = ESTestCase.randomIntBetween(0,100);
-                source.put("timestamp", ts.toString());
-                source.put("thefield", value);
-                bulk.add(new IndexRequest("test-big", "test-big").source(source));
-                bulk.add(new IndexRequest("test-verify", "test-big").source(source));
-                source.clear();
-            }
-
-            bulk.setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE);
-            bulk.get();
-            bulk = client().prepareBulk();
-            logger.info("Day: [" + i + "]: " + ts.toString() + " [" + ts.getMillis() + "]" );
-        }
-
-
-        client().admin().indices().prepareRefresh("test-big").get();
-        client().admin().indices().prepareRefresh("test-verify").get();
-
-        MetricConfig metricConfig = new MetricConfig.Builder()
-                .setField("thefield")
-                .setMetrics(Arrays.asList("sum", "min", "max", "avg"))
-                .build();
-
-        DateHistoGroupConfig.Builder datehistoGroupConfig = new DateHistoGroupConfig.Builder();
-        datehistoGroupConfig.setField("timestamp");
-        datehistoGroupConfig.setInterval(new DateHistogramInterval("1d"));
-
-        GroupConfig.Builder groupConfig = new GroupConfig.Builder();
-        groupConfig.setDateHisto(datehistoGroupConfig.build());
-
-        RollupJobConfig.Builder config = new RollupJobConfig.Builder();
-        config.setIndexPattern("test-big");
-        config.setRollupIndex("rolled");
-        config.setId(taskId);
-        config.setGroupConfig(groupConfig.build());
-        config.setMetricsConfig(Collections.singletonList(metricConfig));
-        config.setCron("* * * * * ? *");
-        config.setPageSize(1000);
-
-        PutRollupJobAction.Request request = new PutRollupJobAction.Request();
-        request.setConfig(config.build());
-        client().execute(PutRollupJobAction.INSTANCE, request).get();
-
-        StartRollupJobAction.Request startRequest = new StartRollupJobAction.Request(taskId);
-        StartRollupJobAction.Response response = client().execute(StartRollupJobAction.INSTANCE, startRequest).get();
-        Assert.assertThat(response.isStarted(), equalTo(true));
-
-        ESTestCase.assertBusy(() -> {
-            RollupJobStatus rollupJobStatus = getRollupJobStatus(taskId);
-            if (rollupJobStatus == null) {
-                fail("null");
-            }
-
-            IndexerState state = rollupJobStatus.getIndexerState();
-            logger.error("state: [" + state + "]");
-            assertTrue(state.equals(IndexerState.STARTED) && rollupJobStatus.getPosition() != null);
-        }, 60, TimeUnit.SECONDS);
-
-        RollupJobStatus rollupJobStatus = getRollupJobStatus(taskId);
-        if (rollupJobStatus == null) {
-            Assert.fail("rollup job status should not be null");
-        }
-
-        client().admin().indices().prepareRefresh("rolled").get();
-
-        SearchResponse count = client().prepareSearch("rolled").setSize(10).get();
-        // total document is numDays minus 1 because we don't build rollup for
-        // buckets that are not full (bucket for the current day).
-        Assert.assertThat(count.getHits().totalHits, equalTo(Long.valueOf(numDays-1)));
-
-        if (ESTestCase.randomBoolean()) {
-            client().admin().indices().prepareDelete("test-big").get();
-            client().admin().indices().prepareRefresh().get();
-        }
-
-        // Execute the rollup search
-        SearchRequest rollupRequest = new SearchRequest("rolled")
-                .source(new SearchSourceBuilder()
-                        .aggregation(dateHistogram("timestamp")
-                                .interval(1000*86400)
-                                .field("timestamp"))
-                        .size(0));
-        SearchResponse searchResponse = client().execute(RollupSearchAction.INSTANCE, rollupRequest).get();
-        Assert.assertNotNull(searchResponse);
-
-        // And a regular search against the verification index
-        SearchRequest verifyRequest = new SearchRequest("test-verify")
-                .source(new SearchSourceBuilder()
-                        .aggregation(dateHistogram("timestamp")
-                                .interval(1000*86400)
-                                .field("timestamp"))
-                        .size(0));
-        SearchResponse verifyResponse = client().execute(SearchAction.INSTANCE, verifyRequest).get();
-
-        Map rollupAggs = searchResponse.getAggregations().asMap();
-
-        for (Aggregation agg : verifyResponse.getAggregations().asList()) {
-            Aggregation rollupAgg = rollupAggs.get(agg.getName());
-
-            Assert.assertNotNull(rollupAgg);
-            Assert.assertThat(rollupAgg.getType(), equalTo(agg.getType()));
-            verifyAgg((InternalDateHistogram)agg, (InternalDateHistogram)rollupAgg);
-        }
-
-        // And a quick sanity check for doc type
-        SearchRequest rollupRawRequest = new SearchRequest("rolled")
-                .source(new SearchSourceBuilder().query(new MatchAllQueryBuilder())
-                        .size(1));
-        SearchResponse searchRawResponse = client().execute(SearchAction.INSTANCE, rollupRawRequest).get();
-        Assert.assertNotNull(searchRawResponse);
-        assertThat(searchRawResponse.getHits().getAt(0).getType(), equalTo("_doc"));
-    }
-
-    private void verifyAgg(InternalDateHistogram verify, InternalDateHistogram rollup) {
-        for (int i = 0; i < rollup.getBuckets().size(); i++) {
-            InternalDateHistogram.Bucket verifyBucket = verify.getBuckets().get(i);
-            InternalDateHistogram.Bucket rollupBucket = rollup.getBuckets().get(i);
-            Assert.assertThat(rollupBucket.getDocCount(), equalTo(verifyBucket.getDocCount()));
-            Assert.assertThat(((DateTime)rollupBucket.getKey()).getMillis(), equalTo(((DateTime)verifyBucket.getKey()).getMillis()));
-            Assert.assertTrue(rollupBucket.getAggregations().equals(verifyBucket.getAggregations()));
-        }
-    }
-
-    private RollupJobStatus getRollupJobStatus(final String taskId) {
-        final GetRollupJobsAction.Request request = new GetRollupJobsAction.Request(taskId);
-        final GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, request).actionGet();
-
-        if (response.getJobs() != null && response.getJobs().isEmpty() == false) {
-            assertThat("Expect 1 rollup job with id " + taskId, response.getJobs().size(), equalTo(1));
-            return response.getJobs().iterator().next().getStatus();
-        }
-        return null;
-    }
-
-    @After
-    public void cleanup() throws ExecutionException, InterruptedException {
-        GetRollupJobsAction.Request getRequest = new GetRollupJobsAction.Request("_all");
-        GetRollupJobsAction.Response response = client().execute(GetRollupJobsAction.INSTANCE, getRequest).get();
-
-        for (GetRollupJobsAction.JobWrapper job : response.getJobs()) {
-            StopRollupJobAction.Request stopRequest = new StopRollupJobAction.Request(job.getJob().getId());
-            try {
-                client().execute(StopRollupJobAction.INSTANCE, stopRequest).get();
-            } catch (ElasticsearchException e) {
-                //
-            }
-
-            DeleteRollupJobAction.Request deleteRequest = new DeleteRollupJobAction.Request(job.getJob().getId());
-            client().execute(DeleteRollupJobAction.INSTANCE, deleteRequest).get();
-        }
-    }
-}
diff --git a/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java
new file mode 100644
index 0000000000000..b0142ae141853
--- /dev/null
+++ b/x-pack/qa/multi-node/src/test/java/org/elasticsearch/multi_node/RollupIT.java
@@ -0,0 +1,326 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License;
+ * you may not use this file except in compliance with the Elastic License.
+ */
+package org.elasticsearch.multi_node;
+
+import org.apache.http.HttpStatus;
+import org.apache.http.util.EntityUtils;
+import org.elasticsearch.client.Request;
+import org.elasticsearch.client.Response;
+import org.elasticsearch.common.settings.SecureString;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
+import org.elasticsearch.common.xcontent.XContentHelper;
+import org.elasticsearch.common.xcontent.json.JsonXContent;
+import org.elasticsearch.common.xcontent.support.XContentMapValues;
+import org.elasticsearch.rest.RestStatus;
+import org.elasticsearch.test.ESTestCase;
+import org.elasticsearch.test.rest.ESRestTestCase;
+import org.elasticsearch.xpack.core.rollup.job.RollupJob;
+import org.elasticsearch.xpack.core.watcher.support.xcontent.ObjectPath;
+import org.junit.After;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.nio.charset.StandardCharsets;
+import java.time.Instant;
+import java.time.ZoneId;
+import java.time.ZonedDateTime;
+import java.time.format.DateTimeFormatter;
+import java.util.List;
+import java.util.Map;
+import java.util.concurrent.TimeUnit;
+import java.util.stream.Collectors;
+
+import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue;
+import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.isOneOf;
+
+public class RollupIT extends ESRestTestCase {
+
+    @Override
+    protected Settings restClientSettings() {
+        return getClientSettings("super-user", "x-pack-super-password");
+    }
+
+    @Override
+    protected Settings restAdminSettings() {
+        return getClientSettings("super-user", "x-pack-super-password");
+    }
+
+    private Settings getClientSettings(final String username, final String password) {
+        final String token = basicAuthHeaderValue(username, new SecureString(password.toCharArray()));
+        return Settings.builder().put(ThreadContext.PREFIX + ".Authorization", token).build();
+    }
+
+    static Map toMap(Response response) throws IOException {
+        return toMap(EntityUtils.toString(response.getEntity()));
+    }
+
+    static Map toMap(String response) throws IOException {
+        return XContentHelper.convertToMap(JsonXContent.jsonXContent, response, false);
+    }
+
+    @After
+    public void clearRollupMetadata() throws Exception {
+        deleteAllJobs();
+        waitForPendingTasks();
+        // indices will be deleted by the ESRestTestCase class
+    }
+
+    public void testBigRollup() throws Exception {
+        final int numDocs = 200;
+
+        // index documents for the rollup job
+        final StringBuilder bulk = new StringBuilder();
+        for (int i = 0; i < numDocs; i++) {
+            bulk.append("{\"index\":{\"_index\":\"rollup-docs\",\"_type\":\"_doc\"}}\n");
+            ZonedDateTime zdt = ZonedDateTime.ofInstant(Instant.ofEpochSecond(1531221196 + (60*i)), ZoneId.of("UTC"));
+            String date = zdt.format(DateTimeFormatter.ISO_OFFSET_DATE_TIME);
+            bulk.append("{\"timestamp\":\"").append(date).append("\",\"value\":").append(i).append("}\n");
+        }
+        bulk.append("\r\n");
+
+        final Request bulkRequest = new Request("POST", "/_bulk");
+        bulkRequest.addParameter("refresh", "true");
+        bulkRequest.setJsonEntity(bulk.toString());
+        client().performRequest(bulkRequest);
+        // create the rollup job
+        final Request createRollupJobRequest = new Request("PUT", "/_xpack/rollup/job/rollup-job-test");
+        createRollupJobRequest.setJsonEntity("{"
+            + "\"index_pattern\":\"rollup-*\","
+            + "\"rollup_index\":\"results-rollup\","
+            + "\"cron\":\"*/1 * * * * ?\","             // fast cron and big page size so test runs quickly
+            + "\"page_size\":20,"
+            + "\"groups\":{"
+            + "    \"date_histogram\":{"
+            + "        \"field\":\"timestamp\","
+            + "        \"interval\":\"5m\""
+            + "      }"
+            + "},"
+            + "\"metrics\":["
+            + "    {\"field\":\"value\",\"metrics\":[\"min\",\"max\",\"sum\"]}"
+            + "]"
+            + "}");
+
+        Map createRollupJobResponse = toMap(client().performRequest(createRollupJobRequest));
+        assertThat(createRollupJobResponse.get("acknowledged"), equalTo(Boolean.TRUE));
+
+        // start the rollup job
+        final Request startRollupJobRequest = new Request("POST", "_xpack/rollup/job/rollup-job-test/_start");
+        Map startRollupJobResponse = toMap(client().performRequest(startRollupJobRequest));
+        assertThat(startRollupJobResponse.get("started"), equalTo(Boolean.TRUE));
+
+        assertRollUpJob("rollup-job-test");
+
+        // Wait for the job to finish, by watching how many rollup docs we've indexed
+        assertBusy(() -> {
+            final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/rollup-job-test");
+            Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
+            assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
+
+            Map job = getJob(getRollupJobResponse, "rollup-job-test");
+            if (job != null) {
+                assertThat(ObjectPath.eval("status.job_state", job), equalTo("started"));
+                assertThat(ObjectPath.eval("stats.rollups_indexed", job), equalTo(41));
+            }
+        }, 30L, TimeUnit.SECONDS);
+
+        // Refresh the rollup index to make sure all newly indexed docs are searchable
+        final Request refreshRollupIndex = new Request("POST", "results-rollup/_refresh");
+        toMap(client().performRequest(refreshRollupIndex));
+
+        String jsonRequestBody = "{\n" +
+            "  \"size\": 0,\n" +
+            "  \"query\": {\n" +
+            "    \"match_all\": {}\n" +
+            "  },\n" +
+            "  \"aggs\": {\n" +
+            "    \"date_histo\": {\n" +
+            "      \"date_histogram\": {\n" +
+            "        \"field\": \"timestamp\",\n" +
+            "        \"interval\": \"1h\"\n" +
+            "      },\n" +
+            "      \"aggs\": {\n" +
+            "        \"the_max\": {\n" +
+            "          \"max\": {\n" +
+            "            \"field\": \"value\"\n" +
+            "          }\n" +
+            "        }\n" +
+            "      }\n" +
+            "    }\n" +
+            "  }\n" +
+            "}";
+
+        Request request = new Request("GET", "rollup-docs/_search");
+        request.setJsonEntity(jsonRequestBody);
+        Response liveResponse = client().performRequest(request);
+        Map liveBody = toMap(liveResponse);
+
+        request = new Request("GET", "results-rollup/_rollup_search");
+        request.setJsonEntity(jsonRequestBody);
+        Response rollupResponse = client().performRequest(request);
+        Map rollupBody = toMap(rollupResponse);
+
+        // Do the live agg results match the rollup agg results?
+        assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
+            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", rollupBody)));
+
+        request = new Request("GET", "rollup-docs/_rollup_search");
+        request.setJsonEntity(jsonRequestBody);
+        Response liveRollupResponse = client().performRequest(request);
+        Map liveRollupBody = toMap(liveRollupResponse);
+
+        // Does searching the live index via rollup_search work match the live search?
+        assertThat(ObjectPath.eval("aggregations.date_histo.buckets", liveBody),
+            equalTo(ObjectPath.eval("aggregations.date_histo.buckets", liveRollupBody)));
+
+    }
+
+    @SuppressWarnings("unchecked")
+    private void assertRollUpJob(final String rollupJob) throws Exception {
+        String[] states = new String[]{"indexing", "started"};
+        waitForRollUpJob(rollupJob, states);
+
+        // check that the rollup job is started using the RollUp API
+        final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob);
+        Map getRollupJobResponse = toMap(client().performRequest(getRollupJobRequest));
+        Map job = getJob(getRollupJobResponse, rollupJob);
+        if (job != null) {
+            assertThat(ObjectPath.eval("status.job_state", job), isOneOf(states));
+        }
+
+        // check that the rollup job is started using the Tasks API
+        final Request taskRequest = new Request("GET", "_tasks");
+        taskRequest.addParameter("detailed", "true");
+        taskRequest.addParameter("actions", "xpack/rollup/*");
+        Map taskResponse = toMap(client().performRequest(taskRequest));
+        Map taskResponseNodes = (Map) taskResponse.get("nodes");
+        Map taskResponseNode = (Map) taskResponseNodes.values().iterator().next();
+        Map taskResponseTasks = (Map) taskResponseNode.get("tasks");
+        Map taskResponseStatus = (Map) taskResponseTasks.values().iterator().next();
+        assertThat(ObjectPath.eval("status.job_state", taskResponseStatus), isOneOf(states));
+
+        // check that the rollup job is started using the Cluster State API
+        final Request clusterStateRequest = new Request("GET", "_cluster/state/metadata");
+        Map clusterStateResponse = toMap(client().performRequest(clusterStateRequest));
+        List> rollupJobTasks = ObjectPath.eval("metadata.persistent_tasks.tasks", clusterStateResponse);
+
+        boolean hasRollupTask = false;
+        for (Map task : rollupJobTasks) {
+            if (ObjectPath.eval("id", task).equals(rollupJob)) {
+                hasRollupTask = true;
+
+                final String jobStateField = "task.xpack/rollup/job.state.job_state";
+                assertThat("Expected field [" + jobStateField + "] to be started or indexing in " + task.get("id"),
+                    ObjectPath.eval(jobStateField, task), isOneOf(states));
+                break;
+            }
+        }
+        if (hasRollupTask == false) {
+            fail("Expected persistent task for [" + rollupJob + "] but none found.");
+        }
+
+    }
+
+    private void waitForRollUpJob(final String rollupJob,String[] expectedStates) throws Exception {
+        assertBusy(() -> {
+            final Request getRollupJobRequest = new Request("GET", "_xpack/rollup/job/" + rollupJob);
+            Response getRollupJobResponse = client().performRequest(getRollupJobRequest);
+            assertThat(getRollupJobResponse.getStatusLine().getStatusCode(), equalTo(RestStatus.OK.getStatus()));
+
+            Map job = getJob(getRollupJobResponse, rollupJob);
+            if (job != null) {
+                assertThat(ObjectPath.eval("status.job_state", job), isOneOf(expectedStates));
+            }
+        }, 30L, TimeUnit.SECONDS);
+    }
+
+    private Map getJob(Response response, String targetJobId) throws IOException {
+        return getJob(ESRestTestCase.entityAsMap(response), targetJobId);
+    }
+
+    @SuppressWarnings("unchecked")
+    private Map getJob(Map jobsMap, String targetJobId) throws IOException {
+
+        List> jobs =
+            (List>) XContentMapValues.extractValue("jobs", jobsMap);
+
+        if (jobs == null) {
+            return null;
+        }
+
+        for (Map job : jobs) {
+            String jobId = (String) ((Map) job.get("config")).get("id");
+            if (jobId.equals(targetJobId)) {
+                return job;
+            }
+        }
+        return null;
+    }
+
+    private void waitForPendingTasks() throws Exception {
+        ESTestCase.assertBusy(() -> {
+            try {
+                Request request = new Request("GET", "/_cat/tasks");
+                request.addParameter("detailed", "true");
+                Response response = adminClient().performRequest(request);
+                if (response.getStatusLine().getStatusCode() == HttpStatus.SC_OK) {
+                    try (BufferedReader responseReader = new BufferedReader(
+                        new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
+                        int activeTasks = 0;
+                        String line;
+                        StringBuilder tasksListString = new StringBuilder();
+                        while ((line = responseReader.readLine()) != null) {
+
+                            // We only care about Rollup jobs, otherwise this fails too easily due to unrelated tasks
+                            if (line.startsWith(RollupJob.NAME) == true) {
+                                activeTasks++;
+                                tasksListString.append(line);
+                                tasksListString.append('\n');
+                            }
+                        }
+                        assertEquals(activeTasks + " active tasks found:\n" + tasksListString, 0, activeTasks);
+                    }
+                }
+            } catch (IOException e) {
+                throw new AssertionError("Error getting active tasks list", e);
+            }
+        });
+    }
+
+    @SuppressWarnings("unchecked")
+    private void deleteAllJobs() throws Exception {
+        Request request = new Request("GET", "/_xpack/rollup/job/_all");
+        Response response = adminClient().performRequest(request);
+        Map jobs = ESRestTestCase.entityAsMap(response);
+        @SuppressWarnings("unchecked")
+        List> jobConfigs =
+            (List>) XContentMapValues.extractValue("jobs", jobs);
+
+        if (jobConfigs == null) {
+            return;
+        }
+
+        for (Map jobConfig : jobConfigs) {
+            logger.debug(jobConfig);
+            String jobId = (String) ((Map) jobConfig.get("config")).get("id");
+            logger.debug("Deleting job " + jobId);
+            try {
+                request = new Request("DELETE", "/_xpack/rollup/job/" + jobId);
+                adminClient().performRequest(request);
+            } catch (Exception e) {
+                // ok
+            }
+        }
+    }
+
+    private static String responseEntityToString(Response response) throws Exception {
+        try (BufferedReader reader = new BufferedReader(new InputStreamReader(response.getEntity().getContent(), StandardCharsets.UTF_8))) {
+            return reader.lines().collect(Collectors.joining("\n"));
+        }
+    }
+}

From 4a9fbe77153c031d299cdb2244f8db6ab74001ea Mon Sep 17 00:00:00 2001
From: Armin Braun 
Date: Mon, 16 Jul 2018 18:43:00 +0200
Subject: [PATCH 050/107] Scripting: Remove dead code from painless module
 (#32064)

---
 .../elasticsearch/painless/MethodWriter.java  |  8 ---
 .../painless/antlr/StashingTokenFactory.java  | 62 -------------------
 2 files changed, 70 deletions(-)
 delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java

diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
index e0a780d418843..5db7c6b3f712c 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
@@ -227,14 +227,6 @@ public static Type getType(Class clazz) {
         return Type.getType(clazz);
     }
 
-    public void writeBranch(final Label tru, final Label fals) {
-        if (tru != null) {
-            visitJumpInsn(Opcodes.IFNE, tru);
-        } else if (fals != null) {
-            visitJumpInsn(Opcodes.IFEQ, fals);
-        }
-    }
-
     /** Starts a new string concat.
      * @return the size of arguments pushed to stack (the object that does string concats, e.g. a StringBuilder)
      */
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java
deleted file mode 100644
index 3ac45705d5549..0000000000000
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/StashingTokenFactory.java
+++ /dev/null
@@ -1,62 +0,0 @@
-/*
- * Licensed to Elasticsearch under one or more contributor
- * license agreements. See the NOTICE file distributed with
- * this work for additional information regarding copyright
- * ownership. Elasticsearch licenses this file to you under
- * the Apache License, Version 2.0 (the "License"); you may
- * not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *    http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing,
- * software distributed under the License is distributed on an
- * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
- * KIND, either express or implied.  See the License for the
- * specific language governing permissions and limitations
- * under the License.
- */
-
-package org.elasticsearch.painless.antlr;
-
-import org.antlr.v4.runtime.CharStream;
-import org.antlr.v4.runtime.Lexer;
-import org.antlr.v4.runtime.Token;
-import org.antlr.v4.runtime.TokenFactory;
-import org.antlr.v4.runtime.TokenSource;
-import org.antlr.v4.runtime.misc.Pair;
-
-/**
- * Token factory that preserves that last non-whitespace token so you can do token level lookbehind in the lexer.
- */
-public class StashingTokenFactory implements TokenFactory {
-    private final TokenFactory delegate;
-
-    private T lastToken;
-
-    public StashingTokenFactory(TokenFactory delegate) {
-        this.delegate = delegate;
-    }
-
-    public T getLastToken() {
-        return lastToken;
-    }
-
-    @Override
-    public T create(Pair source, int type, String text, int channel, int start, int stop, int line,
-            int charPositionInLine) {
-        return maybeStash(delegate.create(source, type, text, channel, start, stop, line, charPositionInLine));
-    }
-
-    @Override
-    public T create(int type, String text) {
-        return maybeStash(delegate.create(type, text));
-    }
-
-    private T maybeStash(T token) {
-        if (token.getChannel() == Lexer.DEFAULT_TOKEN_CHANNEL) {
-            lastToken = token;
-        }
-        return token;
-    }
-}

From 5f130a2af616905cf1fa8b8bb3240af822353c86 Mon Sep 17 00:00:00 2001
From: Jack Conradson 
Date: Mon, 16 Jul 2018 11:15:29 -0700
Subject: [PATCH 051/107] Painless: Separate PainlessLookup into PainlessLookup
 and PainlessLookupBuilder (#32054)

---
 .../painless/AnalyzerCaster.java              |   2 +-
 .../elasticsearch/painless/MethodWriter.java  |   2 +-
 .../painless/PainlessScriptEngine.java        |   8 +-
 .../painless/lookup/PainlessLookup.java       | 692 +---------------
 .../lookup/PainlessLookupBuilder.java         | 774 ++++++++++++++++++
 .../elasticsearch/painless/lookup/def.java    |  28 +
 .../painless/node/EAssignment.java            |   2 +-
 .../elasticsearch/painless/node/EBinary.java  |   2 +-
 .../painless/node/ECapturingFunctionRef.java  |   2 +-
 .../elasticsearch/painless/node/EComp.java    |   2 +-
 .../elasticsearch/painless/node/ELambda.java  |   2 +-
 .../painless/node/EListInit.java              |   2 +-
 .../elasticsearch/painless/node/EMapInit.java |   2 +-
 .../elasticsearch/painless/node/EUnary.java   |   2 +-
 .../elasticsearch/painless/node/PBrace.java   |   2 +-
 .../painless/node/PCallInvoke.java            |   2 +-
 .../elasticsearch/painless/node/PField.java   |   2 +-
 .../painless/node/PSubDefArray.java           |   2 +-
 .../painless/node/PSubDefCall.java            |   2 +-
 .../painless/node/PSubDefField.java           |   2 +-
 .../elasticsearch/painless/node/SEach.java    |   2 +-
 .../painless/node/SSubEachIterable.java       |   2 +-
 .../painless/BaseClassTests.java              |   3 +-
 .../elasticsearch/painless/DebugTests.java    |   3 +-
 .../org/elasticsearch/painless/Debugger.java  |   4 +-
 .../painless/DefBootstrapTests.java           |   3 +-
 .../painless/PainlessDocGenerator.java        |  12 +-
 .../painless/ScriptTestCase.java              |   3 +-
 .../painless/node/NodeToStringTests.java      |   3 +-
 29 files changed, 847 insertions(+), 722 deletions(-)
 create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java
 create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/def.java

diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java
index 69ef57faad63e..457ec82a5e429 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java
@@ -21,7 +21,7 @@
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.lookup.PainlessCast;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 
 import java.util.Objects;
 
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
index 5db7c6b3f712c..c339e7bfb2613 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/MethodWriter.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless;
 
 import org.elasticsearch.painless.lookup.PainlessCast;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.objectweb.asm.ClassVisitor;
 import org.objectweb.asm.Label;
 import org.objectweb.asm.Opcodes;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
index 4560fd85a6589..1687cb24cb639 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessScriptEngine.java
@@ -24,7 +24,7 @@
 import org.elasticsearch.common.component.AbstractComponent;
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.painless.Compiler.Loader;
-import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.elasticsearch.script.ExecutableScript;
 import org.elasticsearch.script.ScriptContext;
@@ -102,9 +102,11 @@ public PainlessScriptEngine(Settings settings, Map, List, List> entry : contexts.entrySet()) {
             ScriptContext context = entry.getKey();
             if (context.instanceClazz.equals(SearchScript.class) || context.instanceClazz.equals(ExecutableScript.class)) {
-                contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class, new PainlessLookup(entry.getValue())));
+                contextsToCompilers.put(context, new Compiler(GenericElasticsearchScript.class,
+                    new PainlessLookupBuilder(entry.getValue()).build()));
             } else {
-                contextsToCompilers.put(context, new Compiler(context.instanceClazz, new PainlessLookup(entry.getValue())));
+                contextsToCompilers.put(context, new Compiler(context.instanceClazz,
+                    new PainlessLookupBuilder(entry.getValue()).build()));
             }
         }
 
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java
index 5833767fbd3d2..feeaf4d34bcdd 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java
@@ -19,25 +19,10 @@
 
 package org.elasticsearch.painless.lookup;
 
-import org.elasticsearch.painless.spi.Whitelist;
-import org.elasticsearch.painless.spi.WhitelistClass;
-import org.elasticsearch.painless.spi.WhitelistConstructor;
-import org.elasticsearch.painless.spi.WhitelistField;
-import org.elasticsearch.painless.spi.WhitelistMethod;
-import org.objectweb.asm.Type;
-
-import java.lang.invoke.MethodHandle;
-import java.lang.invoke.MethodHandles;
-import java.lang.reflect.Modifier;
-import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Collection;
 import java.util.Collections;
-import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
-import java.util.Stack;
-import java.util.regex.Pattern;
 
 /**
  * The entire API for Painless.  Also used as a whitelist for checking for legal
@@ -45,18 +30,6 @@
  */
 public final class PainlessLookup {
 
-    private static final Map methodCache = new HashMap<>();
-    private static final Map fieldCache = new HashMap<>();
-
-    private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$");
-
-    /** Marker class for def type to be used during type analysis. */
-    public static final class def {
-        private def() {
-
-        }
-    }
-
     public static Class getBoxedType(Class clazz) {
         if (clazz == boolean.class) {
             return Boolean.class;
@@ -205,22 +178,6 @@ public static String ClassToName(Class clazz) {
         return clazz.getCanonicalName().replace('$', '.');
     }
 
-    private static String buildMethodCacheKey(String structName, String methodName, List> arguments) {
-        StringBuilder key = new StringBuilder();
-        key.append(structName);
-        key.append(methodName);
-
-        for (Class argument : arguments) {
-            key.append(argument.getName());
-        }
-
-        return key.toString();
-    }
-
-    private static String buildFieldCacheKey(String structName, String fieldName, String typeName) {
-        return structName + fieldName + typeName;
-    }
-
     public Collection getStructs() {
         return javaClassesToPainlessStructs.values();
     }
@@ -228,652 +185,9 @@ public Collection getStructs() {
     private final Map> painlessTypesToJavaClasses;
     private final Map, PainlessClass> javaClassesToPainlessStructs;
 
-    public PainlessLookup(List whitelists) {
-        painlessTypesToJavaClasses = new HashMap<>();
-        javaClassesToPainlessStructs = new HashMap<>();
-
-        String origin = null;
-
-        painlessTypesToJavaClasses.put("def", def.class);
-        javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class)));
-
-        try {
-            // first iteration collects all the Painless type names that
-            // are used for validation during the second iteration
-            for (Whitelist whitelist : whitelists) {
-                for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
-                    String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
-                    PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
-
-                    if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) {
-                        throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " +
-                            "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]");
-                    }
-
-                    origin = whitelistStruct.origin;
-                    addStruct(whitelist.javaClassLoader, whitelistStruct);
-
-                    painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
-                    javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct);
-                }
-            }
-
-            // second iteration adds all the constructors, methods, and fields that will
-            // be available in Painless along with validating they exist and all their types have
-            // been white-listed during the first iteration
-            for (Whitelist whitelist : whitelists) {
-                for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
-                    String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
-
-                    for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) {
-                        origin = whitelistConstructor.origin;
-                        addConstructor(painlessTypeName, whitelistConstructor);
-                    }
-
-                    for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) {
-                        origin = whitelistMethod.origin;
-                        addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod);
-                    }
-
-                    for (WhitelistField whitelistField : whitelistStruct.whitelistFields) {
-                        origin = whitelistField.origin;
-                        addField(painlessTypeName, whitelistField);
-                    }
-                }
-            }
-        } catch (Exception exception) {
-            throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
-        }
-
-        // goes through each Painless struct and determines the inheritance list,
-        // and then adds all inherited types to the Painless struct's whitelist
-        for (Class javaClass : javaClassesToPainlessStructs.keySet()) {
-            PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass);
-
-            List painlessSuperStructs = new ArrayList<>();
-            Class javaSuperClass = painlessStruct.clazz.getSuperclass();
-
-            Stack> javaInteraceLookups = new Stack<>();
-            javaInteraceLookups.push(painlessStruct.clazz);
-
-            // adds super classes to the inheritance list
-            if (javaSuperClass != null && javaSuperClass.isInterface() == false) {
-                while (javaSuperClass != null) {
-                    PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass);
-
-                    if (painlessSuperStruct != null) {
-                        painlessSuperStructs.add(painlessSuperStruct.name);
-                    }
-
-                    javaInteraceLookups.push(javaSuperClass);
-                    javaSuperClass = javaSuperClass.getSuperclass();
-                }
-            }
-
-            // adds all super interfaces to the inheritance list
-            while (javaInteraceLookups.isEmpty() == false) {
-                Class javaInterfaceLookup = javaInteraceLookups.pop();
-
-                for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) {
-                    PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface);
-
-                    if (painlessInterfaceStruct != null) {
-                        String painlessInterfaceStructName = painlessInterfaceStruct.name;
-
-                        if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) {
-                            painlessSuperStructs.add(painlessInterfaceStructName);
-                        }
-
-                        for (Class javaPushInterface : javaInterfaceLookup.getInterfaces()) {
-                            javaInteraceLookups.push(javaPushInterface);
-                        }
-                    }
-                }
-            }
-
-            // copies methods and fields from super structs to the parent struct
-            copyStruct(painlessStruct.name, painlessSuperStructs);
-
-            // copies methods and fields from Object into interface types
-            if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) {
-                PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class);
-
-                if (painlessObjectStruct != null) {
-                    copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name));
-                }
-            }
-        }
-
-        // precompute runtime classes
-        for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) {
-            addRuntimeClass(painlessStruct);
-        }
-
-        // copy all structs to make them unmodifiable for outside users:
-        for (Map.Entry,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) {
-            entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue())));
-        }
-    }
-
-    private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) {
-        String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
-        String importedPainlessTypeName = painlessTypeName;
-
-        if (TYPE_NAME_PATTERN.matcher(painlessTypeName).matches() == false) {
-            throw new IllegalArgumentException("invalid struct type name [" + painlessTypeName + "]");
-        }
-
-        int index = whitelistStruct.javaClassName.lastIndexOf('.');
-
-        if (index != -1) {
-            importedPainlessTypeName = whitelistStruct.javaClassName.substring(index + 1).replace('$', '.');
-        }
-
-        Class javaClass;
-
-        if      ("void".equals(whitelistStruct.javaClassName))    javaClass = void.class;
-        else if ("boolean".equals(whitelistStruct.javaClassName)) javaClass = boolean.class;
-        else if ("byte".equals(whitelistStruct.javaClassName))    javaClass = byte.class;
-        else if ("short".equals(whitelistStruct.javaClassName))   javaClass = short.class;
-        else if ("char".equals(whitelistStruct.javaClassName))    javaClass = char.class;
-        else if ("int".equals(whitelistStruct.javaClassName))     javaClass = int.class;
-        else if ("long".equals(whitelistStruct.javaClassName))    javaClass = long.class;
-        else if ("float".equals(whitelistStruct.javaClassName))   javaClass = float.class;
-        else if ("double".equals(whitelistStruct.javaClassName))  javaClass = double.class;
-        else {
-            try {
-                javaClass = Class.forName(whitelistStruct.javaClassName, true, whitelistClassLoader);
-            } catch (ClassNotFoundException cnfe) {
-                throw new IllegalArgumentException("invalid java class name [" + whitelistStruct.javaClassName + "]" +
-                        " for struct [" + painlessTypeName + "]");
-            }
-        }
-
-        PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass);
-
-        if (existingStruct == null) {
-            PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass));
-            painlessTypesToJavaClasses.put(painlessTypeName, javaClass);
-            javaClassesToPainlessStructs.put(javaClass, struct);
-        } else if (existingStruct.clazz.equals(javaClass) == false) {
-            throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " +
-                    "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " +
-                    "[" + existingStruct.clazz.getName() + "]");
-        }
-
-        if (painlessTypeName.equals(importedPainlessTypeName)) {
-            if (whitelistStruct.onlyFQNJavaClassName == false) {
-                throw new IllegalArgumentException("must use only_fqn parameter on type [" + painlessTypeName + "] with no package");
-            }
-        } else {
-            Class importedJavaClass = painlessTypesToJavaClasses.get(importedPainlessTypeName);
-
-            if (importedJavaClass == null) {
-                if (whitelistStruct.onlyFQNJavaClassName == false) {
-                    if (existingStruct != null) {
-                        throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
-                    }
-
-                    painlessTypesToJavaClasses.put(importedPainlessTypeName, javaClass);
-                }
-            } else if (importedJavaClass.equals(javaClass) == false) {
-                throw new IllegalArgumentException("imported name [" + painlessTypeName + "] is used to " +
-                    "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] " +
-                    "and [" + importedJavaClass.getName() + "]");
-            } else if (whitelistStruct.onlyFQNJavaClassName) {
-                throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
-            }
-        }
-    }
-
-    private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) {
-        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
-
-        if (ownerStruct == null) {
-            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " +
-                    "parameters " + whitelistConstructor.painlessParameterTypeNames);
-        }
-
-        List> painlessParametersTypes = new ArrayList<>(whitelistConstructor.painlessParameterTypeNames.size());
-        Class[] javaClassParameters = new Class[whitelistConstructor.painlessParameterTypeNames.size()];
-
-        for (int parameterCount = 0; parameterCount < whitelistConstructor.painlessParameterTypeNames.size(); ++parameterCount) {
-            String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount);
-
-            try {
-                Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
-
-                painlessParametersTypes.add(painlessParameterClass);
-                javaClassParameters[parameterCount] = defClassToObjectClass(painlessParameterClass);
-            } catch (IllegalArgumentException iae) {
-                throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " +
-                        "with owner struct [" + ownerStructName + "] and constructor parameters " +
-                        whitelistConstructor.painlessParameterTypeNames, iae);
-            }
-        }
-
-        java.lang.reflect.Constructor javaConstructor;
-
-        try {
-            javaConstructor = ownerStruct.clazz.getConstructor(javaClassParameters);
-        } catch (NoSuchMethodException exception) {
-            throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
-                    " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception);
-        }
-
-        PainlessMethodKey painlessMethodKey = new PainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size());
-        PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey);
-
-        if (painlessConstructor == null) {
-            org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor);
-            MethodHandle javaHandle;
-
-            try {
-                javaHandle = MethodHandles.publicLookup().in(ownerStruct.clazz).unreflectConstructor(javaConstructor);
-            } catch (IllegalAccessException exception) {
-                throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
-                        " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames);
-            }
-
-            painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "", painlessParametersTypes),
-                    key -> new PainlessMethod("", ownerStruct, null, void.class, painlessParametersTypes,
-                            asmConstructor, javaConstructor.getModifiers(), javaHandle));
-            ownerStruct.constructors.put(painlessMethodKey, painlessConstructor);
-        } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){
-            throw new IllegalArgumentException(
-                    "illegal duplicate constructors [" + painlessMethodKey + "] found within the struct [" + ownerStruct.name + "] " +
-                    "with parameters " + painlessParametersTypes + " and " + painlessConstructor.arguments);
-        }
-    }
-
-    private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) {
-        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
-
-        if (ownerStruct == null) {
-            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
-                    "name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
-        }
-
-        if (TYPE_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) {
-            throw new IllegalArgumentException("invalid method name" +
-                    " [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "].");
-        }
-
-        Class javaAugmentedClass;
-
-        if (whitelistMethod.javaAugmentedClassName != null) {
-            try {
-                javaAugmentedClass = Class.forName(whitelistMethod.javaAugmentedClassName, true, whitelistClassLoader);
-            } catch (ClassNotFoundException cnfe) {
-                throw new IllegalArgumentException("augmented class [" + whitelistMethod.javaAugmentedClassName + "] " +
-                        "not found for method with name [" + whitelistMethod.javaMethodName + "] " +
-                        "and parameters " + whitelistMethod.painlessParameterTypeNames, cnfe);
-            }
-        } else {
-            javaAugmentedClass = null;
-        }
-
-        int augmentedOffset = javaAugmentedClass == null ? 0 : 1;
-
-        List> painlessParametersTypes = new ArrayList<>(whitelistMethod.painlessParameterTypeNames.size());
-        Class[] javaClassParameters = new Class[whitelistMethod.painlessParameterTypeNames.size() + augmentedOffset];
-
-        if (javaAugmentedClass != null) {
-            javaClassParameters[0] = ownerStruct.clazz;
-        }
-
-        for (int parameterCount = 0; parameterCount < whitelistMethod.painlessParameterTypeNames.size(); ++parameterCount) {
-            String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount);
-
-            try {
-                Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
-
-                painlessParametersTypes.add(painlessParameterClass);
-                javaClassParameters[parameterCount + augmentedOffset] = defClassToObjectClass(painlessParameterClass);
-            } catch (IllegalArgumentException iae) {
-                throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " +
-                        "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
-                        "and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
-            }
-        }
-
-        Class javaImplClass = javaAugmentedClass == null ? ownerStruct.clazz : javaAugmentedClass;
-        java.lang.reflect.Method javaMethod;
-
-        try {
-            javaMethod = javaImplClass.getMethod(whitelistMethod.javaMethodName, javaClassParameters);
-        } catch (NoSuchMethodException nsme) {
-            throw new IllegalArgumentException("method with name [" + whitelistMethod.javaMethodName + "] " +
-                    "and parameters " + whitelistMethod.painlessParameterTypeNames + " not found for class [" +
-                    javaImplClass.getName() + "]", nsme);
-        }
-
-        Class painlessReturnClass;
-
-        try {
-            painlessReturnClass = getJavaClassFromPainlessType(whitelistMethod.painlessReturnTypeName);
-        } catch (IllegalArgumentException iae) {
-            throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " +
-                    "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
-                    "and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
-        }
-
-        if (javaMethod.getReturnType() != defClassToObjectClass(painlessReturnClass)) {
-            throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " +
-                    "does not match the return type class [" + javaMethod.getReturnType() + "] for the " +
-                    "method with name [" + whitelistMethod.javaMethodName + "] " +
-                    "and parameters " + whitelistMethod.painlessParameterTypeNames);
-        }
-
-        PainlessMethodKey painlessMethodKey =
-                new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size());
-
-        if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) {
-            PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey);
-
-            if (painlessMethod == null) {
-                org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
-                MethodHandle javaMethodHandle;
-
-                try {
-                    javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
-                } catch (IllegalAccessException exception) {
-                    throw new IllegalArgumentException("method handle not found for method with name " +
-                        "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
-                }
-
-                painlessMethod = methodCache.computeIfAbsent(
-                        buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
-                        key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass,
-                                painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
-                ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod);
-            } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass &&
-                    painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
-                throw new IllegalArgumentException("illegal duplicate static methods [" + painlessMethodKey + "] " +
-                        "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
-                        "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
-                        "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
-            }
-        } else {
-            PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey);
-
-            if (painlessMethod == null) {
-                org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
-                MethodHandle javaMethodHandle;
-
-                try {
-                    javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
-                } catch (IllegalAccessException exception) {
-                    throw new IllegalArgumentException("method handle not found for method with name " +
-                        "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
-                }
-
-                painlessMethod = methodCache.computeIfAbsent(
-                        buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
-                        key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass,
-                                painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
-                ownerStruct.methods.put(painlessMethodKey, painlessMethod);
-            } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) &&
-                painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
-                throw new IllegalArgumentException("illegal duplicate member methods [" + painlessMethodKey + "] " +
-                    "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
-                    "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
-                    "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
-            }
-        }
-    }
-
-    private void addField(String ownerStructName, WhitelistField whitelistField) {
-        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
-
-        if (ownerStruct == null) {
-            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
-                    "name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName);
-        }
-
-        if (TYPE_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) {
-            throw new IllegalArgumentException("invalid field name " +
-                    "[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "].");
-        }
-
-        java.lang.reflect.Field javaField;
-
-        try {
-            javaField = ownerStruct.clazz.getField(whitelistField.javaFieldName);
-        } catch (NoSuchFieldException exception) {
-            throw new IllegalArgumentException("field [" + whitelistField.javaFieldName + "] " +
-                    "not found for class [" + ownerStruct.clazz.getName() + "].");
-        }
-
-        Class painlessFieldClass;
-
-        try {
-            painlessFieldClass = getJavaClassFromPainlessType(whitelistField.painlessFieldTypeName);
-        } catch (IllegalArgumentException iae) {
-            throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " +
-                "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae);
-        }
-
-        if (Modifier.isStatic(javaField.getModifiers())) {
-            if (Modifier.isFinal(javaField.getModifiers()) == false) {
-                throw new IllegalArgumentException("static [" + whitelistField.javaFieldName + "] " +
-                        "with owner struct [" + ownerStruct.name + "] is not final");
-            }
-
-            PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName);
-
-            if (painlessField == null) {
-                painlessField = fieldCache.computeIfAbsent(
-                        buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
-                        key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
-                                ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null));
-                ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField);
-            } else if (painlessField.clazz != painlessFieldClass) {
-                throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " +
-                    "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
-            }
-        } else {
-            MethodHandle javaMethodHandleGetter;
-            MethodHandle javaMethodHandleSetter;
-
-            try {
-                if (Modifier.isStatic(javaField.getModifiers()) == false) {
-                    javaMethodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField);
-                    javaMethodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField);
-                } else {
-                    javaMethodHandleGetter = null;
-                    javaMethodHandleSetter = null;
-                }
-            } catch (IllegalAccessException exception) {
-                throw new IllegalArgumentException("getter/setter [" + whitelistField.javaFieldName + "]" +
-                    " not found for class [" + ownerStruct.clazz.getName() + "].");
-            }
-
-            PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName);
-
-            if (painlessField == null) {
-                painlessField = fieldCache.computeIfAbsent(
-                        buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
-                        key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
-                                ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter));
-                ownerStruct.members.put(whitelistField.javaFieldName, painlessField);
-            } else if (painlessField.clazz != painlessFieldClass) {
-                throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " +
-                    "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
-            }
-        }
-    }
-
-    private void copyStruct(String struct, List children) {
-        final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct));
-
-        if (owner == null) {
-            throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy.");
-        }
-
-        for (int count = 0; count < children.size(); ++count) {
-            final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count)));
-
-            if (child == null) {
-                throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" +
-                    " not defined for copy to owner struct [" + owner.name + "].");
-            }
-
-            if (!child.clazz.isAssignableFrom(owner.clazz)) {
-                throw new ClassCastException("Child struct [" + child.name + "]" +
-                    " is not a super type of owner struct [" + owner.name + "] in copy.");
-            }
-
-            for (Map.Entry kvPair : child.methods.entrySet()) {
-                PainlessMethodKey methodKey = kvPair.getKey();
-                PainlessMethod method = kvPair.getValue();
-                if (owner.methods.get(methodKey) == null) {
-                    // TODO: some of these are no longer valid or outright don't work
-                    // TODO: since classes may not come from the Painless classloader
-                    // TODO: and it was dependent on the order of the extends which
-                    // TODO: which no longer exists since this is generated automatically
-                    // sanity check, look for missing covariant/generic override
-                    /*if (owner.clazz.isInterface() && child.clazz == Object.class) {
-                        // ok
-                    } else if (child.clazz == Spliterator.OfPrimitive.class || child.clazz == PrimitiveIterator.class) {
-                        // ok, we rely on generics erasure for these (its guaranteed in the javadocs though!!!!)
-                    } else if (Constants.JRE_IS_MINIMUM_JAVA9 && owner.clazz == LocalDate.class) {
-                        // ok, java 9 added covariant override for LocalDate.getEra() to return IsoEra:
-                        // https://bugs.openjdk.java.net/browse/JDK-8072746
-                    } else {
-                        try {
-                            // TODO: we *have* to remove all these public members and use getter methods to encapsulate!
-                            final Class impl;
-                            final Class arguments[];
-                            if (method.augmentation != null) {
-                                impl = method.augmentation;
-                                arguments = new Class[method.arguments.size() + 1];
-                                arguments[0] = method.owner.clazz;
-                                for (int i = 0; i < method.arguments.size(); i++) {
-                                    arguments[i + 1] = method.arguments.get(i).clazz;
-                                }
-                            } else {
-                                impl = owner.clazz;
-                                arguments = new Class[method.arguments.size()];
-                                for (int i = 0; i < method.arguments.size(); i++) {
-                                    arguments[i] = method.arguments.get(i).clazz;
-                                }
-                            }
-                            java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments);
-                            if (m.getReturnType() != method.rtn.clazz) {
-                                throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name);
-                            }
-                            if (m.isBridge() && !Modifier.isVolatile(method.modifiers)) {
-                                // its a bridge in the destination, but not in the source, but it might still be ok, check generics:
-                                java.lang.reflect.Method source = child.clazz.getMethod(method.method.getName(), arguments);
-                                if (!Arrays.equals(source.getGenericParameterTypes(), source.getParameterTypes())) {
-                                    throw new IllegalStateException("missing generic override for: " + m + " in " + owner.name);
-                                }
-                            }
-                        } catch (ReflectiveOperationException e) {
-                            throw new AssertionError(e);
-                        }
-                    }*/
-                    owner.methods.put(methodKey, method);
-                }
-            }
-
-            for (PainlessField field : child.members.values()) {
-                if (owner.members.get(field.name) == null) {
-                    owner.members.put(field.name,
-                        new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter));
-                }
-            }
-        }
-    }
-
-    /**
-     * Precomputes a more efficient structure for dynamic method/field access.
-     */
-    private void addRuntimeClass(final PainlessClass struct) {
-        // add all getters/setters
-        for (Map.Entry method : struct.methods.entrySet()) {
-            String name = method.getKey().name;
-            PainlessMethod m = method.getValue();
-
-            if (m.arguments.size() == 0 &&
-                name.startsWith("get") &&
-                name.length() > 3 &&
-                Character.isUpperCase(name.charAt(3))) {
-                StringBuilder newName = new StringBuilder();
-                newName.append(Character.toLowerCase(name.charAt(3)));
-                newName.append(name.substring(4));
-                struct.getters.putIfAbsent(newName.toString(), m.handle);
-            } else if (m.arguments.size() == 0 &&
-                name.startsWith("is") &&
-                name.length() > 2 &&
-                Character.isUpperCase(name.charAt(2))) {
-                StringBuilder newName = new StringBuilder();
-                newName.append(Character.toLowerCase(name.charAt(2)));
-                newName.append(name.substring(3));
-                struct.getters.putIfAbsent(newName.toString(), m.handle);
-            }
-
-            if (m.arguments.size() == 1 &&
-                name.startsWith("set") &&
-                name.length() > 3 &&
-                Character.isUpperCase(name.charAt(3))) {
-                StringBuilder newName = new StringBuilder();
-                newName.append(Character.toLowerCase(name.charAt(3)));
-                newName.append(name.substring(4));
-                struct.setters.putIfAbsent(newName.toString(), m.handle);
-            }
-        }
-
-        // add all members
-        for (Map.Entry member : struct.members.entrySet()) {
-            struct.getters.put(member.getKey(), member.getValue().getter);
-            struct.setters.put(member.getKey(), member.getValue().setter);
-        }
-    }
-
-    /** computes the functional interface method for a class, or returns null */
-    private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) {
-        if (!clazz.clazz.isInterface()) {
-            return null;
-        }
-        // if its marked with this annotation, we fail if the conditions don't hold (means whitelist bug)
-        // otherwise, this annotation is pretty useless.
-        boolean hasAnnotation = clazz.clazz.isAnnotationPresent(FunctionalInterface.class);
-        List methods = new ArrayList<>();
-        for (java.lang.reflect.Method m : clazz.clazz.getMethods()) {
-            // default interface methods don't count
-            if (m.isDefault()) {
-                continue;
-            }
-            // static methods don't count
-            if (Modifier.isStatic(m.getModifiers())) {
-                continue;
-            }
-            // if its from Object, it doesn't count
-            try {
-                Object.class.getMethod(m.getName(), m.getParameterTypes());
-                continue;
-            } catch (ReflectiveOperationException e) {
-                // it counts
-            }
-            methods.add(m);
-        }
-        if (methods.size() != 1) {
-            if (hasAnnotation) {
-                throw new IllegalArgumentException("Class: " + clazz.name +
-                    " is marked with FunctionalInterface but doesn't fit the bill: " + methods);
-            }
-            return null;
-        }
-        // inspect the one method found from the reflection API, it should match the whitelist!
-        java.lang.reflect.Method oneMethod = methods.get(0);
-        PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount()));
-        if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) {
-            throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " +
-                "method is not whitelisted!");
-        }
-        return painless;
+    PainlessLookup(Map> painlessTypesToJavaClasses, Map, PainlessClass> javaClassesToPainlessStructs) {
+        this.painlessTypesToJavaClasses = Collections.unmodifiableMap(painlessTypesToJavaClasses);
+        this.javaClassesToPainlessStructs = Collections.unmodifiableMap(javaClassesToPainlessStructs);
     }
 
     public boolean isSimplePainlessType(String painlessType) {
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java
new file mode 100644
index 0000000000000..1dadce318d672
--- /dev/null
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java
@@ -0,0 +1,774 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.painless.lookup;
+
+import org.elasticsearch.painless.spi.Whitelist;
+import org.elasticsearch.painless.spi.WhitelistClass;
+import org.elasticsearch.painless.spi.WhitelistConstructor;
+import org.elasticsearch.painless.spi.WhitelistField;
+import org.elasticsearch.painless.spi.WhitelistMethod;
+import org.objectweb.asm.Type;
+
+import java.lang.invoke.MethodHandle;
+import java.lang.invoke.MethodHandles;
+import java.lang.reflect.Modifier;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+import java.util.Stack;
+import java.util.regex.Pattern;
+
+public class PainlessLookupBuilder {
+    private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$");
+
+    private static final Map methodCache = new HashMap<>();
+    private static final Map fieldCache = new HashMap<>();
+
+    private static String buildMethodCacheKey(String structName, String methodName, List> arguments) {
+        StringBuilder key = new StringBuilder();
+        key.append(structName);
+        key.append(methodName);
+
+        for (Class argument : arguments) {
+            key.append(argument.getName());
+        }
+
+        return key.toString();
+    }
+
+    private static String buildFieldCacheKey(String structName, String fieldName, String typeName) {
+        return structName + fieldName + typeName;
+    }
+
+    private final Map> painlessTypesToJavaClasses;
+    private final Map, PainlessClass> javaClassesToPainlessStructs;
+
+    public PainlessLookupBuilder(List whitelists) {
+        painlessTypesToJavaClasses = new HashMap<>();
+        javaClassesToPainlessStructs = new HashMap<>();
+
+        String origin = null;
+
+        painlessTypesToJavaClasses.put("def", def.class);
+        javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class)));
+
+        try {
+            // first iteration collects all the Painless type names that
+            // are used for validation during the second iteration
+            for (Whitelist whitelist : whitelists) {
+                for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
+                    String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
+                    PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
+
+                    if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) {
+                        throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " +
+                            "[" + painlessStruct.clazz.getName() + "] and [" + whitelistStruct.javaClassName + "]");
+                    }
+
+                    origin = whitelistStruct.origin;
+                    addStruct(whitelist.javaClassLoader, whitelistStruct);
+
+                    painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName));
+                    javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct);
+                }
+            }
+
+            // second iteration adds all the constructors, methods, and fields that will
+            // be available in Painless along with validating they exist and all their types have
+            // been white-listed during the first iteration
+            for (Whitelist whitelist : whitelists) {
+                for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) {
+                    String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
+
+                    for (WhitelistConstructor whitelistConstructor : whitelistStruct.whitelistConstructors) {
+                        origin = whitelistConstructor.origin;
+                        addConstructor(painlessTypeName, whitelistConstructor);
+                    }
+
+                    for (WhitelistMethod whitelistMethod : whitelistStruct.whitelistMethods) {
+                        origin = whitelistMethod.origin;
+                        addMethod(whitelist.javaClassLoader, painlessTypeName, whitelistMethod);
+                    }
+
+                    for (WhitelistField whitelistField : whitelistStruct.whitelistFields) {
+                        origin = whitelistField.origin;
+                        addField(painlessTypeName, whitelistField);
+                    }
+                }
+            }
+        } catch (Exception exception) {
+            throw new IllegalArgumentException("error loading whitelist(s) " + origin, exception);
+        }
+
+        // goes through each Painless struct and determines the inheritance list,
+        // and then adds all inherited types to the Painless struct's whitelist
+        for (Class javaClass : javaClassesToPainlessStructs.keySet()) {
+            PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass);
+
+            List painlessSuperStructs = new ArrayList<>();
+            Class javaSuperClass = painlessStruct.clazz.getSuperclass();
+
+            Stack> javaInteraceLookups = new Stack<>();
+            javaInteraceLookups.push(painlessStruct.clazz);
+
+            // adds super classes to the inheritance list
+            if (javaSuperClass != null && javaSuperClass.isInterface() == false) {
+                while (javaSuperClass != null) {
+                    PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass);
+
+                    if (painlessSuperStruct != null) {
+                        painlessSuperStructs.add(painlessSuperStruct.name);
+                    }
+
+                    javaInteraceLookups.push(javaSuperClass);
+                    javaSuperClass = javaSuperClass.getSuperclass();
+                }
+            }
+
+            // adds all super interfaces to the inheritance list
+            while (javaInteraceLookups.isEmpty() == false) {
+                Class javaInterfaceLookup = javaInteraceLookups.pop();
+
+                for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) {
+                    PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface);
+
+                    if (painlessInterfaceStruct != null) {
+                        String painlessInterfaceStructName = painlessInterfaceStruct.name;
+
+                        if (painlessSuperStructs.contains(painlessInterfaceStructName) == false) {
+                            painlessSuperStructs.add(painlessInterfaceStructName);
+                        }
+
+                        for (Class javaPushInterface : javaInterfaceLookup.getInterfaces()) {
+                            javaInteraceLookups.push(javaPushInterface);
+                        }
+                    }
+                }
+            }
+
+            // copies methods and fields from super structs to the parent struct
+            copyStruct(painlessStruct.name, painlessSuperStructs);
+
+            // copies methods and fields from Object into interface types
+            if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) {
+                PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class);
+
+                if (painlessObjectStruct != null) {
+                    copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name));
+                }
+            }
+        }
+
+        // precompute runtime classes
+        for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) {
+            addRuntimeClass(painlessStruct);
+        }
+
+        // copy all structs to make them unmodifiable for outside users:
+        for (Map.Entry,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) {
+            entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue())));
+        }
+    }
+
+    private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) {
+        String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.');
+        String importedPainlessTypeName = painlessTypeName;
+
+        if (TYPE_NAME_PATTERN.matcher(painlessTypeName).matches() == false) {
+            throw new IllegalArgumentException("invalid struct type name [" + painlessTypeName + "]");
+        }
+
+        int index = whitelistStruct.javaClassName.lastIndexOf('.');
+
+        if (index != -1) {
+            importedPainlessTypeName = whitelistStruct.javaClassName.substring(index + 1).replace('$', '.');
+        }
+
+        Class javaClass;
+
+        if      ("void".equals(whitelistStruct.javaClassName))    javaClass = void.class;
+        else if ("boolean".equals(whitelistStruct.javaClassName)) javaClass = boolean.class;
+        else if ("byte".equals(whitelistStruct.javaClassName))    javaClass = byte.class;
+        else if ("short".equals(whitelistStruct.javaClassName))   javaClass = short.class;
+        else if ("char".equals(whitelistStruct.javaClassName))    javaClass = char.class;
+        else if ("int".equals(whitelistStruct.javaClassName))     javaClass = int.class;
+        else if ("long".equals(whitelistStruct.javaClassName))    javaClass = long.class;
+        else if ("float".equals(whitelistStruct.javaClassName))   javaClass = float.class;
+        else if ("double".equals(whitelistStruct.javaClassName))  javaClass = double.class;
+        else {
+            try {
+                javaClass = Class.forName(whitelistStruct.javaClassName, true, whitelistClassLoader);
+            } catch (ClassNotFoundException cnfe) {
+                throw new IllegalArgumentException("invalid java class name [" + whitelistStruct.javaClassName + "]" +
+                    " for struct [" + painlessTypeName + "]");
+            }
+        }
+
+        PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass);
+
+        if (existingStruct == null) {
+            PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass));
+            painlessTypesToJavaClasses.put(painlessTypeName, javaClass);
+            javaClassesToPainlessStructs.put(javaClass, struct);
+        } else if (existingStruct.clazz.equals(javaClass) == false) {
+            throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " +
+                "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " +
+                "[" + existingStruct.clazz.getName() + "]");
+        }
+
+        if (painlessTypeName.equals(importedPainlessTypeName)) {
+            if (whitelistStruct.onlyFQNJavaClassName == false) {
+                throw new IllegalArgumentException("must use only_fqn parameter on type [" + painlessTypeName + "] with no package");
+            }
+        } else {
+            Class importedJavaClass = painlessTypesToJavaClasses.get(importedPainlessTypeName);
+
+            if (importedJavaClass == null) {
+                if (whitelistStruct.onlyFQNJavaClassName == false) {
+                    if (existingStruct != null) {
+                        throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
+                    }
+
+                    painlessTypesToJavaClasses.put(importedPainlessTypeName, javaClass);
+                }
+            } else if (importedJavaClass.equals(javaClass) == false) {
+                throw new IllegalArgumentException("imported name [" + painlessTypeName + "] is used to " +
+                    "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] " +
+                    "and [" + importedJavaClass.getName() + "]");
+            } else if (whitelistStruct.onlyFQNJavaClassName) {
+                throw new IllegalArgumentException("inconsistent only_fqn parameters found for type [" + painlessTypeName + "]");
+            }
+        }
+    }
+
+    private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) {
+        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
+
+        if (ownerStruct == null) {
+            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " +
+                "parameters " + whitelistConstructor.painlessParameterTypeNames);
+        }
+
+        List> painlessParametersTypes = new ArrayList<>(whitelistConstructor.painlessParameterTypeNames.size());
+        Class[] javaClassParameters = new Class[whitelistConstructor.painlessParameterTypeNames.size()];
+
+        for (int parameterCount = 0; parameterCount < whitelistConstructor.painlessParameterTypeNames.size(); ++parameterCount) {
+            String painlessParameterTypeName = whitelistConstructor.painlessParameterTypeNames.get(parameterCount);
+
+            try {
+                Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
+
+                painlessParametersTypes.add(painlessParameterClass);
+                javaClassParameters[parameterCount] = PainlessLookup.defClassToObjectClass(painlessParameterClass);
+            } catch (IllegalArgumentException iae) {
+                throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " +
+                    "with owner struct [" + ownerStructName + "] and constructor parameters " +
+                    whitelistConstructor.painlessParameterTypeNames, iae);
+            }
+        }
+
+        java.lang.reflect.Constructor javaConstructor;
+
+        try {
+            javaConstructor = ownerStruct.clazz.getConstructor(javaClassParameters);
+        } catch (NoSuchMethodException exception) {
+            throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
+                " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception);
+        }
+
+        PainlessMethodKey painlessMethodKey = new PainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size());
+        PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey);
+
+        if (painlessConstructor == null) {
+            org.objectweb.asm.commons.Method asmConstructor = org.objectweb.asm.commons.Method.getMethod(javaConstructor);
+            MethodHandle javaHandle;
+
+            try {
+                javaHandle = MethodHandles.publicLookup().in(ownerStruct.clazz).unreflectConstructor(javaConstructor);
+            } catch (IllegalAccessException exception) {
+                throw new IllegalArgumentException("constructor not defined for owner struct [" + ownerStructName + "] " +
+                    " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames);
+            }
+
+            painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "", painlessParametersTypes),
+                key -> new PainlessMethod("", ownerStruct, null, void.class, painlessParametersTypes,
+                    asmConstructor, javaConstructor.getModifiers(), javaHandle));
+            ownerStruct.constructors.put(painlessMethodKey, painlessConstructor);
+        } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){
+            throw new IllegalArgumentException(
+                "illegal duplicate constructors [" + painlessMethodKey + "] found within the struct [" + ownerStruct.name + "] " +
+                    "with parameters " + painlessParametersTypes + " and " + painlessConstructor.arguments);
+        }
+    }
+
+    private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) {
+        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
+
+        if (ownerStruct == null) {
+            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
+                "name [" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
+        }
+
+        if (TYPE_NAME_PATTERN.matcher(whitelistMethod.javaMethodName).matches() == false) {
+            throw new IllegalArgumentException("invalid method name" +
+                " [" + whitelistMethod.javaMethodName + "] for owner struct [" + ownerStructName + "].");
+        }
+
+        Class javaAugmentedClass;
+
+        if (whitelistMethod.javaAugmentedClassName != null) {
+            try {
+                javaAugmentedClass = Class.forName(whitelistMethod.javaAugmentedClassName, true, whitelistClassLoader);
+            } catch (ClassNotFoundException cnfe) {
+                throw new IllegalArgumentException("augmented class [" + whitelistMethod.javaAugmentedClassName + "] " +
+                    "not found for method with name [" + whitelistMethod.javaMethodName + "] " +
+                    "and parameters " + whitelistMethod.painlessParameterTypeNames, cnfe);
+            }
+        } else {
+            javaAugmentedClass = null;
+        }
+
+        int augmentedOffset = javaAugmentedClass == null ? 0 : 1;
+
+        List> painlessParametersTypes = new ArrayList<>(whitelistMethod.painlessParameterTypeNames.size());
+        Class[] javaClassParameters = new Class[whitelistMethod.painlessParameterTypeNames.size() + augmentedOffset];
+
+        if (javaAugmentedClass != null) {
+            javaClassParameters[0] = ownerStruct.clazz;
+        }
+
+        for (int parameterCount = 0; parameterCount < whitelistMethod.painlessParameterTypeNames.size(); ++parameterCount) {
+            String painlessParameterTypeName = whitelistMethod.painlessParameterTypeNames.get(parameterCount);
+
+            try {
+                Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName);
+
+                painlessParametersTypes.add(painlessParameterClass);
+                javaClassParameters[parameterCount + augmentedOffset] = PainlessLookup.defClassToObjectClass(painlessParameterClass);
+            } catch (IllegalArgumentException iae) {
+                throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " +
+                    "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
+                    "and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
+            }
+        }
+
+        Class javaImplClass = javaAugmentedClass == null ? ownerStruct.clazz : javaAugmentedClass;
+        java.lang.reflect.Method javaMethod;
+
+        try {
+            javaMethod = javaImplClass.getMethod(whitelistMethod.javaMethodName, javaClassParameters);
+        } catch (NoSuchMethodException nsme) {
+            throw new IllegalArgumentException("method with name [" + whitelistMethod.javaMethodName + "] " +
+                "and parameters " + whitelistMethod.painlessParameterTypeNames + " not found for class [" +
+                javaImplClass.getName() + "]", nsme);
+        }
+
+        Class painlessReturnClass;
+
+        try {
+            painlessReturnClass = getJavaClassFromPainlessType(whitelistMethod.painlessReturnTypeName);
+        } catch (IllegalArgumentException iae) {
+            throw new IllegalArgumentException("struct not defined for return type [" + whitelistMethod.painlessReturnTypeName + "] " +
+                "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " +
+                "and parameters " + whitelistMethod.painlessParameterTypeNames, iae);
+        }
+
+        if (javaMethod.getReturnType() != PainlessLookup.defClassToObjectClass(painlessReturnClass)) {
+            throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " +
+                "does not match the return type class [" + javaMethod.getReturnType() + "] for the " +
+                "method with name [" + whitelistMethod.javaMethodName + "] " +
+                "and parameters " + whitelistMethod.painlessParameterTypeNames);
+        }
+
+        PainlessMethodKey painlessMethodKey =
+            new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size());
+
+        if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) {
+            PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey);
+
+            if (painlessMethod == null) {
+                org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
+                MethodHandle javaMethodHandle;
+
+                try {
+                    javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
+                } catch (IllegalAccessException exception) {
+                    throw new IllegalArgumentException("method handle not found for method with name " +
+                        "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
+                }
+
+                painlessMethod = methodCache.computeIfAbsent(
+                    buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
+                    key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass,
+                        painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
+                ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod);
+            } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass &&
+                painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
+                throw new IllegalArgumentException("illegal duplicate static methods [" + painlessMethodKey + "] " +
+                    "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
+                    "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
+                    "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
+            }
+        } else {
+            PainlessMethod painlessMethod = ownerStruct.methods.get(painlessMethodKey);
+
+            if (painlessMethod == null) {
+                org.objectweb.asm.commons.Method asmMethod = org.objectweb.asm.commons.Method.getMethod(javaMethod);
+                MethodHandle javaMethodHandle;
+
+                try {
+                    javaMethodHandle = MethodHandles.publicLookup().in(javaImplClass).unreflect(javaMethod);
+                } catch (IllegalAccessException exception) {
+                    throw new IllegalArgumentException("method handle not found for method with name " +
+                        "[" + whitelistMethod.javaMethodName + "] and parameters " + whitelistMethod.painlessParameterTypeNames);
+                }
+
+                painlessMethod = methodCache.computeIfAbsent(
+                    buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes),
+                    key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass,
+                        painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle));
+                ownerStruct.methods.put(painlessMethodKey, painlessMethod);
+            } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) &&
+                painlessMethod.arguments.equals(painlessParametersTypes)) == false) {
+                throw new IllegalArgumentException("illegal duplicate member methods [" + painlessMethodKey + "] " +
+                    "found within the struct [" + ownerStruct.name + "] with name [" + whitelistMethod.javaMethodName + "], " +
+                    "return types [" + painlessReturnClass + "] and [" + painlessMethod.rtn + "], " +
+                    "and parameters " + painlessParametersTypes + " and " + painlessMethod.arguments);
+            }
+        }
+    }
+
+    private void addField(String ownerStructName, WhitelistField whitelistField) {
+        PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName));
+
+        if (ownerStruct == null) {
+            throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " +
+                "name [" + whitelistField.javaFieldName + "] and type " + whitelistField.painlessFieldTypeName);
+        }
+
+        if (TYPE_NAME_PATTERN.matcher(whitelistField.javaFieldName).matches() == false) {
+            throw new IllegalArgumentException("invalid field name " +
+                "[" + whitelistField.painlessFieldTypeName + "] for owner struct [" + ownerStructName + "].");
+        }
+
+        java.lang.reflect.Field javaField;
+
+        try {
+            javaField = ownerStruct.clazz.getField(whitelistField.javaFieldName);
+        } catch (NoSuchFieldException exception) {
+            throw new IllegalArgumentException("field [" + whitelistField.javaFieldName + "] " +
+                "not found for class [" + ownerStruct.clazz.getName() + "].");
+        }
+
+        Class painlessFieldClass;
+
+        try {
+            painlessFieldClass = getJavaClassFromPainlessType(whitelistField.painlessFieldTypeName);
+        } catch (IllegalArgumentException iae) {
+            throw new IllegalArgumentException("struct not defined for return type [" + whitelistField.painlessFieldTypeName + "] " +
+                "with owner struct [" + ownerStructName + "] and field with name [" + whitelistField.javaFieldName + "]", iae);
+        }
+
+        if (Modifier.isStatic(javaField.getModifiers())) {
+            if (Modifier.isFinal(javaField.getModifiers()) == false) {
+                throw new IllegalArgumentException("static [" + whitelistField.javaFieldName + "] " +
+                    "with owner struct [" + ownerStruct.name + "] is not final");
+            }
+
+            PainlessField painlessField = ownerStruct.staticMembers.get(whitelistField.javaFieldName);
+
+            if (painlessField == null) {
+                painlessField = fieldCache.computeIfAbsent(
+                    buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
+                    key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
+                        ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null));
+                ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField);
+            } else if (painlessField.clazz != painlessFieldClass) {
+                throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " +
+                    "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
+            }
+        } else {
+            MethodHandle javaMethodHandleGetter;
+            MethodHandle javaMethodHandleSetter;
+
+            try {
+                if (Modifier.isStatic(javaField.getModifiers()) == false) {
+                    javaMethodHandleGetter = MethodHandles.publicLookup().unreflectGetter(javaField);
+                    javaMethodHandleSetter = MethodHandles.publicLookup().unreflectSetter(javaField);
+                } else {
+                    javaMethodHandleGetter = null;
+                    javaMethodHandleSetter = null;
+                }
+            } catch (IllegalAccessException exception) {
+                throw new IllegalArgumentException("getter/setter [" + whitelistField.javaFieldName + "]" +
+                    " not found for class [" + ownerStruct.clazz.getName() + "].");
+            }
+
+            PainlessField painlessField = ownerStruct.members.get(whitelistField.javaFieldName);
+
+            if (painlessField == null) {
+                painlessField = fieldCache.computeIfAbsent(
+                    buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()),
+                    key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(),
+                        ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter));
+                ownerStruct.members.put(whitelistField.javaFieldName, painlessField);
+            } else if (painlessField.clazz != painlessFieldClass) {
+                throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " +
+                    "found within the struct [" + ownerStruct.name + "] with type [" + whitelistField.painlessFieldTypeName + "]");
+            }
+        }
+    }
+
+    private void copyStruct(String struct, List children) {
+        final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct));
+
+        if (owner == null) {
+            throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy.");
+        }
+
+        for (int count = 0; count < children.size(); ++count) {
+            final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count)));
+
+            if (child == null) {
+                throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" +
+                    " not defined for copy to owner struct [" + owner.name + "].");
+            }
+
+            if (!child.clazz.isAssignableFrom(owner.clazz)) {
+                throw new ClassCastException("Child struct [" + child.name + "]" +
+                    " is not a super type of owner struct [" + owner.name + "] in copy.");
+            }
+
+            for (Map.Entry kvPair : child.methods.entrySet()) {
+                PainlessMethodKey methodKey = kvPair.getKey();
+                PainlessMethod method = kvPair.getValue();
+                if (owner.methods.get(methodKey) == null) {
+                    // TODO: some of these are no longer valid or outright don't work
+                    // TODO: since classes may not come from the Painless classloader
+                    // TODO: and it was dependent on the order of the extends which
+                    // TODO: which no longer exists since this is generated automatically
+                    // sanity check, look for missing covariant/generic override
+                    /*if (owner.clazz.isInterface() && child.clazz == Object.class) {
+                        // ok
+                    } else if (child.clazz == Spliterator.OfPrimitive.class || child.clazz == PrimitiveIterator.class) {
+                        // ok, we rely on generics erasure for these (its guaranteed in the javadocs though!!!!)
+                    } else if (Constants.JRE_IS_MINIMUM_JAVA9 && owner.clazz == LocalDate.class) {
+                        // ok, java 9 added covariant override for LocalDate.getEra() to return IsoEra:
+                        // https://bugs.openjdk.java.net/browse/JDK-8072746
+                    } else {
+                        try {
+                            // TODO: we *have* to remove all these public members and use getter methods to encapsulate!
+                            final Class impl;
+                            final Class arguments[];
+                            if (method.augmentation != null) {
+                                impl = method.augmentation;
+                                arguments = new Class[method.arguments.size() + 1];
+                                arguments[0] = method.owner.clazz;
+                                for (int i = 0; i < method.arguments.size(); i++) {
+                                    arguments[i + 1] = method.arguments.get(i).clazz;
+                                }
+                            } else {
+                                impl = owner.clazz;
+                                arguments = new Class[method.arguments.size()];
+                                for (int i = 0; i < method.arguments.size(); i++) {
+                                    arguments[i] = method.arguments.get(i).clazz;
+                                }
+                            }
+                            java.lang.reflect.Method m = impl.getMethod(method.method.getName(), arguments);
+                            if (m.getReturnType() != method.rtn.clazz) {
+                                throw new IllegalStateException("missing covariant override for: " + m + " in " + owner.name);
+                            }
+                            if (m.isBridge() && !Modifier.isVolatile(method.modifiers)) {
+                                // its a bridge in the destination, but not in the source, but it might still be ok, check generics:
+                                java.lang.reflect.Method source = child.clazz.getMethod(method.method.getName(), arguments);
+                                if (!Arrays.equals(source.getGenericParameterTypes(), source.getParameterTypes())) {
+                                    throw new IllegalStateException("missing generic override for: " + m + " in " + owner.name);
+                                }
+                            }
+                        } catch (ReflectiveOperationException e) {
+                            throw new AssertionError(e);
+                        }
+                    }*/
+                    owner.methods.put(methodKey, method);
+                }
+            }
+
+            for (PainlessField field : child.members.values()) {
+                if (owner.members.get(field.name) == null) {
+                    owner.members.put(field.name,
+                        new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter));
+                }
+            }
+        }
+    }
+
+    /**
+     * Precomputes a more efficient structure for dynamic method/field access.
+     */
+    private void addRuntimeClass(final PainlessClass struct) {
+        // add all getters/setters
+        for (Map.Entry method : struct.methods.entrySet()) {
+            String name = method.getKey().name;
+            PainlessMethod m = method.getValue();
+
+            if (m.arguments.size() == 0 &&
+                name.startsWith("get") &&
+                name.length() > 3 &&
+                Character.isUpperCase(name.charAt(3))) {
+                StringBuilder newName = new StringBuilder();
+                newName.append(Character.toLowerCase(name.charAt(3)));
+                newName.append(name.substring(4));
+                struct.getters.putIfAbsent(newName.toString(), m.handle);
+            } else if (m.arguments.size() == 0 &&
+                name.startsWith("is") &&
+                name.length() > 2 &&
+                Character.isUpperCase(name.charAt(2))) {
+                StringBuilder newName = new StringBuilder();
+                newName.append(Character.toLowerCase(name.charAt(2)));
+                newName.append(name.substring(3));
+                struct.getters.putIfAbsent(newName.toString(), m.handle);
+            }
+
+            if (m.arguments.size() == 1 &&
+                name.startsWith("set") &&
+                name.length() > 3 &&
+                Character.isUpperCase(name.charAt(3))) {
+                StringBuilder newName = new StringBuilder();
+                newName.append(Character.toLowerCase(name.charAt(3)));
+                newName.append(name.substring(4));
+                struct.setters.putIfAbsent(newName.toString(), m.handle);
+            }
+        }
+
+        // add all members
+        for (Map.Entry member : struct.members.entrySet()) {
+            struct.getters.put(member.getKey(), member.getValue().getter);
+            struct.setters.put(member.getKey(), member.getValue().setter);
+        }
+    }
+
+    /** computes the functional interface method for a class, or returns null */
+    private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) {
+        if (!clazz.clazz.isInterface()) {
+            return null;
+        }
+        // if its marked with this annotation, we fail if the conditions don't hold (means whitelist bug)
+        // otherwise, this annotation is pretty useless.
+        boolean hasAnnotation = clazz.clazz.isAnnotationPresent(FunctionalInterface.class);
+        List methods = new ArrayList<>();
+        for (java.lang.reflect.Method m : clazz.clazz.getMethods()) {
+            // default interface methods don't count
+            if (m.isDefault()) {
+                continue;
+            }
+            // static methods don't count
+            if (Modifier.isStatic(m.getModifiers())) {
+                continue;
+            }
+            // if its from Object, it doesn't count
+            try {
+                Object.class.getMethod(m.getName(), m.getParameterTypes());
+                continue;
+            } catch (ReflectiveOperationException e) {
+                // it counts
+            }
+            methods.add(m);
+        }
+        if (methods.size() != 1) {
+            if (hasAnnotation) {
+                throw new IllegalArgumentException("Class: " + clazz.name +
+                    " is marked with FunctionalInterface but doesn't fit the bill: " + methods);
+            }
+            return null;
+        }
+        // inspect the one method found from the reflection API, it should match the whitelist!
+        java.lang.reflect.Method oneMethod = methods.get(0);
+        PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount()));
+        if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) {
+            throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " +
+                "method is not whitelisted!");
+        }
+        return painless;
+    }
+
+    public Class getJavaClassFromPainlessType(String painlessType) {
+        Class javaClass = painlessTypesToJavaClasses.get(painlessType);
+
+        if (javaClass != null) {
+            return javaClass;
+        }
+        int arrayDimensions = 0;
+        int arrayIndex = painlessType.indexOf('[');
+
+        if (arrayIndex != -1) {
+            int length = painlessType.length();
+
+            while (arrayIndex < length) {
+                if (painlessType.charAt(arrayIndex) == '[' && ++arrayIndex < length && painlessType.charAt(arrayIndex++) == ']') {
+                    ++arrayDimensions;
+                } else {
+                    throw new IllegalArgumentException("invalid painless type [" + painlessType + "].");
+                }
+            }
+
+            painlessType = painlessType.substring(0, painlessType.indexOf('['));
+            javaClass = painlessTypesToJavaClasses.get(painlessType);
+
+            char braces[] = new char[arrayDimensions];
+            Arrays.fill(braces, '[');
+            String descriptor = new String(braces);
+
+            if (javaClass == boolean.class) {
+                descriptor += "Z";
+            } else if (javaClass == byte.class) {
+                descriptor += "B";
+            } else if (javaClass == short.class) {
+                descriptor += "S";
+            } else if (javaClass == char.class) {
+                descriptor += "C";
+            } else if (javaClass == int.class) {
+                descriptor += "I";
+            } else if (javaClass == long.class) {
+                descriptor += "J";
+            } else if (javaClass == float.class) {
+                descriptor += "F";
+            } else if (javaClass == double.class) {
+                descriptor += "D";
+            } else {
+                descriptor += "L" + javaClass.getName() + ";";
+            }
+
+            try {
+                return Class.forName(descriptor);
+            } catch (ClassNotFoundException cnfe) {
+                throw new IllegalStateException("invalid painless type [" + painlessType + "]", cnfe);
+            }
+        }
+
+        throw new IllegalArgumentException("invalid painless type [" + painlessType + "]");
+    }
+
+    public PainlessLookup build() {
+        return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessStructs);
+    }
+}
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/def.java
new file mode 100644
index 0000000000000..4336236be3f12
--- /dev/null
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/def.java
@@ -0,0 +1,28 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.painless.lookup;
+
+/** Marker class for def type to be used during type analysis. */
+public final class def {
+
+    private def() {
+
+    }
+}
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java
index dda246b5f6cda..a0a29ed59ddde 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java
@@ -23,7 +23,7 @@
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessCast;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java
index 46fbeefd6f557..422300072dc2f 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java
@@ -22,7 +22,7 @@
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java
index a3e1b4bde6a86..c0345b6308c3e 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java
@@ -22,7 +22,7 @@
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.FunctionRef;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java
index c0fccab8e8a8e..806204d051ae0 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java
@@ -22,7 +22,7 @@
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
index a7b7a41fe051a..8977f4f0ef329 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java
@@ -22,7 +22,7 @@
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.lookup.PainlessMethod;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.FunctionRef;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java
index 518f1953525a6..820cce685edcd 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java
@@ -21,7 +21,7 @@
 
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java
index 45158aedcf787..b6c7fb80af95f 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java
@@ -21,7 +21,7 @@
 
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java
index 8e293556eac01..3a5102ebdc99c 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java
@@ -22,7 +22,7 @@
 import org.elasticsearch.painless.AnalyzerCaster;
 import org.elasticsearch.painless.DefBootstrap;
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java
index ec7d0f6d7bb7a..5b282abdce9fa 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java
index 12ff483248367..f23ae9f188704 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java
@@ -23,7 +23,7 @@
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
 import org.elasticsearch.painless.lookup.PainlessClass;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java
index 8d27162fc367b..78a18b91ab2c6 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java
@@ -23,7 +23,7 @@
 import org.elasticsearch.painless.lookup.PainlessField;
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessClass;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java
index 8e30d43432953..ccbc25db4f25e 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.DefBootstrap;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java
index 0882f19177006..a9021000e2dad 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.DefBootstrap;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java
index 41fcf563d241c..1c081c9422ecb 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.DefBootstrap;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Location;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java
index e7d18ece0590d..c402d8982d89e 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java
@@ -20,7 +20,7 @@
 package org.elasticsearch.painless.node;
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Locals.Variable;
diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java
index faee2ed74a6d0..cfc87536b6b7b 100644
--- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java
+++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java
@@ -25,7 +25,7 @@
 import org.elasticsearch.painless.lookup.PainlessCast;
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
-import org.elasticsearch.painless.lookup.PainlessLookup.def;
+import org.elasticsearch.painless.lookup.def;
 import org.elasticsearch.painless.Globals;
 import org.elasticsearch.painless.Locals;
 import org.elasticsearch.painless.Locals.Variable;
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java
index 78e5814e963f7..c0e0bd7ed9d05 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/BaseClassTests.java
@@ -24,6 +24,7 @@
 import java.util.Map;
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 
 import static java.util.Collections.emptyMap;
@@ -37,7 +38,7 @@
  */
 public class BaseClassTests extends ScriptTestCase {
 
-    private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+    private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
 
     public abstract static class Gets {
 
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java
index 987eef31eeeaf..7edc90bb0a001 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DebugTests.java
@@ -23,6 +23,7 @@
 import org.elasticsearch.common.io.stream.BytesStreamOutput;
 import org.elasticsearch.common.io.stream.StreamInput;
 import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.elasticsearch.script.ScriptException;
 
@@ -36,7 +37,7 @@
 import static org.hamcrest.Matchers.not;
 
 public class DebugTests extends ScriptTestCase {
-    private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+    private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
 
     public void testExplain() {
         // Debug.explain can explain an object
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java
index 0d5e2748b7b32..73adf92779d48 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/Debugger.java
@@ -19,7 +19,7 @@
 
 package org.elasticsearch.painless;
 
-import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.objectweb.asm.util.Textifier;
 
@@ -40,7 +40,7 @@ static String toString(Class iface, String source, CompilerSettings settings)
         PrintWriter outputWriter = new PrintWriter(output);
         Textifier textifier = new Textifier();
         try {
-            new Compiler(iface, new PainlessLookup(Whitelist.BASE_WHITELISTS))
+            new Compiler(iface, new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build())
                     .compile("", source, settings, textifier);
         } catch (RuntimeException e) {
             textifier.print(outputWriter);
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java
index ab4844dd58bd9..07f45ff67c07d 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/DefBootstrapTests.java
@@ -28,11 +28,12 @@
 import java.util.HashMap;
 
 import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.elasticsearch.test.ESTestCase;
 
 public class DefBootstrapTests extends ESTestCase {
-    private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+    private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
 
     /** calls toString() on integers, twice */
     public void testOneType() throws Throwable {
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java
index 5177d64cbdb06..5e8e6ad47d813 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java
@@ -20,13 +20,16 @@
 package org.elasticsearch.painless;
 
 import org.apache.logging.log4j.Logger;
-import org.elasticsearch.core.internal.io.IOUtils;
 import org.elasticsearch.common.io.PathUtils;
 import org.elasticsearch.common.logging.ESLoggerFactory;
-import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.core.internal.io.IOUtils;
+import org.elasticsearch.painless.lookup.PainlessClass;
 import org.elasticsearch.painless.lookup.PainlessField;
+import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.lookup.PainlessMethod;
-import org.elasticsearch.painless.lookup.PainlessClass;
+import org.elasticsearch.painless.spi.Whitelist;
+
 import java.io.IOException;
 import java.io.PrintStream;
 import java.lang.reflect.Modifier;
@@ -42,14 +45,13 @@
 
 import static java.util.Comparator.comparing;
 import static java.util.stream.Collectors.toList;
-import static org.elasticsearch.painless.spi.Whitelist.BASE_WHITELISTS;
 
 /**
  * Generates an API reference from the method and type whitelists in {@link PainlessLookup}.
  */
 public class PainlessDocGenerator {
 
-    private static final PainlessLookup PAINLESS_LOOKUP = new PainlessLookup(BASE_WHITELISTS);
+    private static final PainlessLookup PAINLESS_LOOKUP = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
     private static final Logger logger = ESLoggerFactory.getLogger(PainlessDocGenerator.class);
     private static final Comparator FIELD_NAME = comparing(f -> f.name);
     private static final Comparator METHOD_NAME = comparing(m -> m.name);
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java
index 1a4770e560a7e..eebf1d701ee32 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/ScriptTestCase.java
@@ -25,6 +25,7 @@
 import org.elasticsearch.common.settings.Settings;
 import org.elasticsearch.painless.antlr.Walker;
 import org.elasticsearch.painless.lookup.PainlessLookup;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.spi.Whitelist;
 import org.elasticsearch.script.ExecutableScript;
 import org.elasticsearch.script.ScriptContext;
@@ -91,7 +92,7 @@ public Object exec(String script, Map vars, boolean picky) {
     public Object exec(String script, Map vars, Map compileParams, Scorer scorer, boolean picky) {
         // test for ambiguity errors before running the actual script if picky is true
         if (picky) {
-            PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+            PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
             ScriptClassInfo scriptClassInfo = new ScriptClassInfo(painlessLookup, GenericElasticsearchScript.class);
             CompilerSettings pickySettings = new CompilerSettings();
             pickySettings.setPicky(true);
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java
index 3e9f724743faa..86d365e0fcc7a 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java
@@ -23,6 +23,7 @@
 import org.elasticsearch.painless.lookup.PainlessLookup;
 import org.elasticsearch.painless.lookup.PainlessCast;
 import org.elasticsearch.painless.lookup.PainlessField;
+import org.elasticsearch.painless.lookup.PainlessLookupBuilder;
 import org.elasticsearch.painless.lookup.PainlessMethod;
 import org.elasticsearch.painless.lookup.PainlessMethodKey;
 import org.elasticsearch.painless.lookup.PainlessClass;
@@ -48,7 +49,7 @@
  * Tests {@link Object#toString} implementations on all extensions of {@link ANode}.
  */
 public class NodeToStringTests extends ESTestCase {
-    private final PainlessLookup painlessLookup = new PainlessLookup(Whitelist.BASE_WHITELISTS);
+    private final PainlessLookup painlessLookup = new PainlessLookupBuilder(Whitelist.BASE_WHITELISTS).build();
 
     public void testEAssignment() {
         assertToString(

From 1dd027901567a30b59a5a51617af2a52c1d61256 Mon Sep 17 00:00:00 2001
From: DeDe Morton 
Date: Mon, 16 Jul 2018 12:11:24 -0700
Subject: [PATCH 052/107] Use correct formatting for links (#29460)

---
 docs/reference/how-to/recipes.asciidoc | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/docs/reference/how-to/recipes.asciidoc b/docs/reference/how-to/recipes.asciidoc
index e798f8819d000..451e192ad6ad2 100644
--- a/docs/reference/how-to/recipes.asciidoc
+++ b/docs/reference/how-to/recipes.asciidoc
@@ -3,8 +3,8 @@
 
 This section includes a few recipes to help with common problems:
 
-* mixing-exact-search-with-stemming
-* consistent-scoring
+* <>
+* <>
 
 include::recipes/stemming.asciidoc[]
 include::recipes/scoring.asciidoc[]

From 53f029bd65d09d31b0b29c0ca02eb3a79e123cf0 Mon Sep 17 00:00:00 2001
From: Michael Basnight 
Date: Mon, 16 Jul 2018 14:20:16 -0500
Subject: [PATCH 053/107] Watcher: Store username on watch execution (#31873)

There is currently no way to see what user executed a watch. This commit
adds the decrypted username to each execution in the watch history, in a
new field "user".

Closes #31772
---
 .../rest-api/watcher/execute-watch.asciidoc   |  4 +-
 .../core/security/authc/Authentication.java   | 10 +++-
 .../execution/WatchExecutionContext.java      | 25 +++++++++
 .../core/watcher/history/WatchRecord.java     | 17 ++++--
 .../WatcherIndexTemplateRegistryField.java    |  3 +-
 .../src/main/resources/watch-history.json     |  3 +
 .../execution/ExecutionServiceTests.java      | 31 ++++++++++
 .../roles.yml                                 |  1 +
 .../20_test_run_as_execute_watch.yml          | 56 +++++++++++++++++++
 9 files changed, 140 insertions(+), 10 deletions(-)

diff --git a/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc b/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc
index 91cd89bca6d41..ec2c60c543bab 100644
--- a/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc
+++ b/x-pack/docs/en/rest-api/watcher/execute-watch.asciidoc
@@ -263,7 +263,8 @@ This is an example of the output:
           "type": "index"
         }
       ]
-    }
+    },
+    "user": "test_admin" <4>
   }
 }
 --------------------------------------------------
@@ -281,6 +282,7 @@ This is an example of the output:
 <1> The id of the watch record as it would be stored in the `.watcher-history` index.
 <2> The watch record document as it would be stored in the `.watcher-history` index.
 <3> The watch execution results.
+<4> The user used to execute the watch.
 
 You can set a different execution mode for every action by associating the mode
 name with the action id:
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java
index 2a2fdd95d61a9..161d9d449990f 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authc/Authentication.java
@@ -88,13 +88,17 @@ static Authentication deserializeHeaderAndPutInContext(String header, ThreadCont
             throws IOException, IllegalArgumentException {
         assert ctx.getTransient(AuthenticationField.AUTHENTICATION_KEY) == null;
 
+        Authentication authentication = decode(header);
+        ctx.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
+        return authentication;
+    }
+
+    public static Authentication decode(String header) throws IOException {
         byte[] bytes = Base64.getDecoder().decode(header);
         StreamInput input = StreamInput.wrap(bytes);
         Version version = Version.readVersion(input);
         input.setVersion(version);
-        Authentication authentication = new Authentication(input);
-        ctx.putTransient(AuthenticationField.AUTHENTICATION_KEY, authentication);
-        return authentication;
+        return new Authentication(input);
     }
 
     /**
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java
index 62216ff681e82..dbbff33dcefe9 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/execution/WatchExecutionContext.java
@@ -8,6 +8,8 @@
 import org.elasticsearch.common.CheckedSupplier;
 import org.elasticsearch.common.unit.TimeValue;
 import org.elasticsearch.common.util.concurrent.ConcurrentCollections;
+import org.elasticsearch.xpack.core.security.authc.Authentication;
+import org.elasticsearch.xpack.core.security.authc.AuthenticationField;
 import org.elasticsearch.xpack.core.watcher.actions.ActionWrapperResult;
 import org.elasticsearch.xpack.core.watcher.condition.Condition;
 import org.elasticsearch.xpack.core.watcher.history.WatchRecord;
@@ -18,6 +20,7 @@
 import org.elasticsearch.xpack.core.watcher.watch.Watch;
 import org.joda.time.DateTime;
 
+import java.io.IOException;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.Map;
@@ -43,6 +46,7 @@ public abstract class WatchExecutionContext {
     private Transform.Result transformResult;
     private ConcurrentMap actionsResults = ConcurrentCollections.newConcurrentMap();
     private String nodeId;
+    private String user;
 
     public WatchExecutionContext(String watchId, DateTime executionTime, TriggerEvent triggerEvent, TimeValue defaultThrottlePeriod) {
         this.id = new Wid(watchId, executionTime);
@@ -85,6 +89,7 @@ public Watch watch() {
     public final void ensureWatchExists(CheckedSupplier supplier) throws Exception {
         if (watch == null) {
             watch = supplier.get();
+            user = WatchExecutionContext.getUsernameFromWatch(watch);
         }
     }
 
@@ -137,6 +142,11 @@ public String getNodeId() {
         return nodeId;
     }
 
+    /**
+     * @return The user that executes the watch, which will be stored in the watch history
+     */
+    public String getUser() { return user; }
+
     public void start() {
         assert phase == ExecutionPhase.AWAITS_EXECUTION;
         relativeStartTime = System.nanoTime();
@@ -243,4 +253,19 @@ public WatchRecord finish() {
     public WatchExecutionSnapshot createSnapshot(Thread executionThread) {
         return new WatchExecutionSnapshot(this, executionThread.getStackTrace());
     }
+
+    /**
+     * Given a watch, this extracts and decodes the relevant auth header and returns the principal of the user that is
+     * executing the watch.
+     */
+    public static String getUsernameFromWatch(Watch watch) throws IOException {
+        if (watch != null && watch.status() != null && watch.status().getHeaders() != null) {
+            String header = watch.status().getHeaders().get(AuthenticationField.AUTHENTICATION_KEY);
+            if (header != null) {
+                Authentication auth = Authentication.decode(header);
+                return auth.getUser().principal();
+            }
+        }
+        return null;
+    }
 }
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java
index 74e7b2115faa9..2b28c2f15c9c7 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/history/WatchRecord.java
@@ -43,12 +43,14 @@ public abstract class WatchRecord implements ToXContentObject {
     private static final ParseField METADATA = new ParseField("metadata");
     private static final ParseField EXECUTION_RESULT = new ParseField("result");
     private static final ParseField EXCEPTION = new ParseField("exception");
+    private static final ParseField USER = new ParseField("user");
 
     protected final Wid id;
     protected final Watch watch;
     private final String nodeId;
     protected final TriggerEvent triggerEvent;
     protected final ExecutionState state;
+    private final String user;
 
     // only emitted to xcontent in "debug" mode
     protected final Map vars;
@@ -60,7 +62,7 @@ public abstract class WatchRecord implements ToXContentObject {
 
     private WatchRecord(Wid id, TriggerEvent triggerEvent, ExecutionState state, Map vars, ExecutableInput input,
                         ExecutableCondition condition, Map metadata, Watch watch, WatchExecutionResult executionResult,
-                        String nodeId) {
+                        String nodeId, String user) {
         this.id = id;
         this.triggerEvent = triggerEvent;
         this.state = state;
@@ -71,15 +73,16 @@ private WatchRecord(Wid id, TriggerEvent triggerEvent, ExecutionState state, Map
         this.executionResult = executionResult;
         this.watch = watch;
         this.nodeId = nodeId;
+        this.user = user;
     }
 
     private WatchRecord(Wid id, TriggerEvent triggerEvent, ExecutionState state, String nodeId) {
-        this(id, triggerEvent, state, Collections.emptyMap(), null, null, null, null, null, nodeId);
+        this(id, triggerEvent, state, Collections.emptyMap(), null, null, null, null, null, nodeId, null);
     }
 
     private WatchRecord(WatchRecord record, ExecutionState state) {
         this(record.id, record.triggerEvent, state, record.vars, record.input, record.condition, record.metadata, record.watch,
-                record.executionResult, record.nodeId);
+                record.executionResult, record.nodeId, record.user);
     }
 
     private WatchRecord(WatchExecutionContext context, ExecutionState state) {
@@ -88,12 +91,13 @@ private WatchRecord(WatchExecutionContext context, ExecutionState state) {
                 context.watch() != null ? context.watch().condition() : null,
                 context.watch() != null ? context.watch().metadata() : null,
                 context.watch(),
-                null, context.getNodeId());
+                null, context.getNodeId(), context.getUser());
     }
 
     private WatchRecord(WatchExecutionContext context, WatchExecutionResult executionResult) {
         this(context.id(), context.triggerEvent(), getState(executionResult), context.vars(), context.watch().input(),
-                context.watch().condition(), context.watch().metadata(), context.watch(), executionResult, context.getNodeId());
+                context.watch().condition(), context.watch().metadata(), context.watch(), executionResult, context.getNodeId(),
+                context.getUser());
     }
 
     public static ExecutionState getState(WatchExecutionResult executionResult) {
@@ -152,6 +156,9 @@ public final XContentBuilder toXContent(XContentBuilder builder, Params params)
         builder.field(NODE.getPreferredName(), nodeId);
         builder.field(STATE.getPreferredName(), state.id());
 
+        if (user != null) {
+            builder.field(USER.getPreferredName(), user);
+        }
         if (watch != null && watch.status() != null) {
             builder.field(STATUS.getPreferredName(), watch.status(), params);
         }
diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java
index 25e2c928d9a57..b42506b81b3d4 100644
--- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java
+++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/watcher/support/WatcherIndexTemplateRegistryField.java
@@ -13,8 +13,9 @@ public final class WatcherIndexTemplateRegistryField {
     // version 6: upgrade to ES 6, removal of _status field
     // version 7: add full exception stack traces for better debugging
     // version 8: fix slack attachment property not to be dynamic, causing field type issues
+    // version 9: add a user field defining which user executed the watch
     // Note: if you change this, also inform the kibana team around the watcher-ui
-    public static final String INDEX_TEMPLATE_VERSION = "8";
+    public static final String INDEX_TEMPLATE_VERSION = "9";
     public static final String HISTORY_TEMPLATE_NAME = ".watch-history-" + INDEX_TEMPLATE_VERSION;
     public static final String TRIGGERED_TEMPLATE_NAME = ".triggered_watches";
     public static final String WATCHES_TEMPLATE_NAME = ".watches";
diff --git a/x-pack/plugin/core/src/main/resources/watch-history.json b/x-pack/plugin/core/src/main/resources/watch-history.json
index 86a967fc14fe5..9a4a96409b043 100644
--- a/x-pack/plugin/core/src/main/resources/watch-history.json
+++ b/x-pack/plugin/core/src/main/resources/watch-history.json
@@ -120,6 +120,9 @@
         "messages": {
           "type": "text"
         },
+        "user": {
+          "type": "text"
+        },
         "exception" : {
           "type" : "object",
           "enabled" : false
diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java
index 73f0e82072055..d3f46d3d452f7 100644
--- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java
+++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/execution/ExecutionServiceTests.java
@@ -31,6 +31,9 @@
 import org.elasticsearch.index.get.GetResult;
 import org.elasticsearch.test.ESTestCase;
 import org.elasticsearch.threadpool.ThreadPool;
+import org.elasticsearch.xpack.core.security.authc.Authentication;
+import org.elasticsearch.xpack.core.security.authc.AuthenticationField;
+import org.elasticsearch.xpack.core.security.user.User;
 import org.elasticsearch.xpack.core.watcher.actions.Action;
 import org.elasticsearch.xpack.core.watcher.actions.ActionStatus;
 import org.elasticsearch.xpack.core.watcher.actions.ActionWrapper;
@@ -85,6 +88,7 @@
 import static java.util.Arrays.asList;
 import static java.util.Collections.singletonMap;
 import static org.elasticsearch.common.unit.TimeValue.timeValueSeconds;
+import static org.hamcrest.Matchers.equalTo;
 import static org.hamcrest.Matchers.greaterThan;
 import static org.hamcrest.Matchers.hasSize;
 import static org.hamcrest.Matchers.instanceOf;
@@ -1072,6 +1076,33 @@ public void testManualWatchExecutionContextGetsAlwaysExecuted() throws Exception
         assertThat(watchRecord.state(), is(ExecutionState.EXECUTED));
     }
 
+    public void testLoadingWatchExecutionUser() throws Exception {
+        DateTime now = now(UTC);
+        Watch watch = mock(Watch.class);
+        WatchStatus status = mock(WatchStatus.class);
+        ScheduleTriggerEvent event = new ScheduleTriggerEvent("_id", now, now);
+
+        // Should be null
+        TriggeredExecutionContext context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5));
+        context.ensureWatchExists(() -> watch);
+        assertNull(context.getUser());
+
+        // Should still be null, header is not yet set
+        when(watch.status()).thenReturn(status);
+        context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5));
+        context.ensureWatchExists(() -> watch);
+        assertNull(context.getUser());
+
+        Authentication authentication = new Authentication(new User("joe", "admin"),
+            new Authentication.RealmRef("native_realm", "native", "node1"), null);
+
+        // Should no longer be null now that the proper header is set
+        when(status.getHeaders()).thenReturn(Collections.singletonMap(AuthenticationField.AUTHENTICATION_KEY, authentication.encode()));
+        context = new TriggeredExecutionContext(watch.id(), now, event, timeValueSeconds(5));
+        context.ensureWatchExists(() -> watch);
+        assertThat(context.getUser(), equalTo("joe"));
+    }
+
     private WatchExecutionContext createMockWatchExecutionContext(String watchId, DateTime executionTime) {
         WatchExecutionContext ctx = mock(WatchExecutionContext.class);
         when(ctx.id()).thenReturn(new Wid(watchId, executionTime));
diff --git a/x-pack/qa/smoke-test-watcher-with-security/roles.yml b/x-pack/qa/smoke-test-watcher-with-security/roles.yml
index bebfa883fcb15..b52fe6c5c5914 100644
--- a/x-pack/qa/smoke-test-watcher-with-security/roles.yml
+++ b/x-pack/qa/smoke-test-watcher-with-security/roles.yml
@@ -21,6 +21,7 @@ watcher_manager:
   run_as:
     - powerless_user
     - watcher_manager
+    - x_pack_rest_user
 
 watcher_monitor:
   cluster:
diff --git a/x-pack/qa/smoke-test-watcher-with-security/src/test/resources/rest-api-spec/test/watcher/watcher_and_security/20_test_run_as_execute_watch.yml b/x-pack/qa/smoke-test-watcher-with-security/src/test/resources/rest-api-spec/test/watcher/watcher_and_security/20_test_run_as_execute_watch.yml
index 9bc7724b2c0f4..7a0634f5187b1 100644
--- a/x-pack/qa/smoke-test-watcher-with-security/src/test/resources/rest-api-spec/test/watcher/watcher_and_security/20_test_run_as_execute_watch.yml
+++ b/x-pack/qa/smoke-test-watcher-with-security/src/test/resources/rest-api-spec/test/watcher/watcher_and_security/20_test_run_as_execute_watch.yml
@@ -74,10 +74,63 @@ teardown:
         id: "my_watch"
   - match: { watch_record.watch_id: "my_watch" }
   - match: { watch_record.state: "executed" }
+  - match: { watch_record.user: "watcher_manager" }
 
 
 
 
+---
+"Test watch is runas user properly recorded":
+  - do:
+      xpack.watcher.put_watch:
+        id: "my_watch"
+        body:  >
+          {
+            "trigger": {
+              "schedule" : { "cron" : "0 0 0 1 * ? 2099" }
+            },
+            "input": {
+              "search" : {
+                "request" : {
+                  "indices" : [ "my_test_index" ],
+                  "body" :{
+                    "query" : { "match_all": {} }
+                  }
+                }
+              }
+            },
+            "condition" : {
+              "compare" : {
+                "ctx.payload.hits.total" : {
+                  "gte" : 1
+                }
+              }
+            },
+            "actions": {
+              "logging": {
+                "logging": {
+                  "text": "Successfully ran my_watch to test for search input"
+                }
+              }
+            }
+          }
+  - match: { _id: "my_watch" }
+
+  - do:
+      xpack.watcher.get_watch:
+        id: "my_watch"
+  - match: { _id: "my_watch" }
+  - is_false: watch.status.headers
+
+  - do:
+      headers: { es-security-runas-user: x_pack_rest_user }
+      xpack.watcher.execute_watch:
+        id: "my_watch"
+  - match: { watch_record.watch_id: "my_watch" }
+  - match: { watch_record.state: "executed" }
+  - match: { watch_record.user: "x_pack_rest_user" }
+
+
 ---
 "Test watch search input does not work against index user is not allowed to read":
 
@@ -130,6 +183,7 @@ teardown:
   - match: { watch_record.watch_id: "my_watch" }
   # because we are not allowed to read the index, there wont be any data
   - match: { watch_record.state: "execution_not_needed" }
+  - match: { watch_record.user: "watcher_manager" }
 
 
 ---
@@ -272,6 +326,7 @@ teardown:
         id: "my_watch"
   - match: { watch_record.watch_id: "my_watch" }
   - match: { watch_record.state: "executed" }
+  - match: { watch_record.user: "watcher_manager" }
 
   - do:
       get:
@@ -320,6 +375,7 @@ teardown:
         id: "my_watch"
   - match: { watch_record.watch_id: "my_watch" }
   - match: { watch_record.state: "executed" }
+  - match: { watch_record.user: "watcher_manager" }
 
   - do:
       get:

From e32552626495701abed23cdaaf07aa676594cc51 Mon Sep 17 00:00:00 2001
From: debadair 
Date: Mon, 16 Jul 2018 12:47:57 -0700
Subject: [PATCH 054/107] Tweaked Elasticsearch Service links for SEO

---
 docs/reference/setup/install.asciidoc | 10 ++++++----
 1 file changed, 6 insertions(+), 4 deletions(-)

diff --git a/docs/reference/setup/install.asciidoc b/docs/reference/setup/install.asciidoc
index 4433ffb8c3876..c0ebfb60fa7b2 100644
--- a/docs/reference/setup/install.asciidoc
+++ b/docs/reference/setup/install.asciidoc
@@ -3,10 +3,12 @@
 
 [float]
 === Hosted Elasticsearch
-Elasticsearch can be run on your own hardware or using our hosted
-Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
-available on AWS and GCP. You can
-https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
+
+You can run Elasticsearch on your own hardware, or use our
+https://www.elastic.co/cloud/elasticsearch-service[hosted Elasticsearch Service]
+on Elastic Cloud. The Elasticsearch Service is available on both AWS and GCP.
+https://www.elastic.co/cloud/elasticsearch-service/signup[Try out the
+Elasticsearch Service for free].
 
 [float]
 === Installing Elasticsearch Yourself

From e514ad0fea56b3547693c823c932c53c1eb151a0 Mon Sep 17 00:00:00 2001
From: debadair 
Date: Mon, 16 Jul 2018 12:58:08 -0700
Subject: [PATCH 055/107] Tweaked Elasticsearch Service links for SEO

---
 docs/reference/getting-started.asciidoc | 9 +++++----
 1 file changed, 5 insertions(+), 4 deletions(-)

diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc
index ff00c310a4308..e2dae2856886e 100755
--- a/docs/reference/getting-started.asciidoc
+++ b/docs/reference/getting-started.asciidoc
@@ -106,10 +106,11 @@ With that out of the way, let's get started with the fun part...
 
 [TIP]
 ==============
-You can skip installation completely by using our hosted
-Elasticsearch Service on https://www.elastic.co/cloud[Elastic Cloud], which is
-available on AWS and GCP. You can
-https://www.elastic.co/cloud/elasticsearch-service/signup[try out the hosted service] for free.
+You can skip having to install Elasticsearch by using our
+https://www.elastic.co/cloud/elasticsearch-service[hosted Elasticsearch Service]
+on Elastic Cloud. The Elasticsearch Service is available on both AWS and GCP.
+https://www.elastic.co/cloud/elasticsearch-service/signup[Try out the
+Elasticsearch Service for free].
 ==============
 
 Elasticsearch requires at least Java 8. Specifically as of this writing, it is recommended that you use the Oracle JDK version {jdk}. Java installation varies from platform to platform so we won't go into those details here. Oracle's recommended installation documentation can be found on http://docs.oracle.com/javase/8/docs/technotes/guides/install/install_overview.html[Oracle's website]. Suffice to say, before you install Elasticsearch, please check your Java version first by running (and then install/upgrade accordingly if needed):

From 50d8fa851af9527f4be8fedc3ace8b6996ef2d2a Mon Sep 17 00:00:00 2001
From: Andy Bristol 
Date: Mon, 16 Jul 2018 13:02:53 -0700
Subject: [PATCH 056/107] [test] turn on host io cache for opensuse (#32053)

The hope is that this will resolve the problems with very slow io we're
seeing on this box in #30295
---
 Vagrantfile | 5 +++++
 1 file changed, 5 insertions(+)

diff --git a/Vagrantfile b/Vagrantfile
index 7322399fed576..de344e1818360 100644
--- a/Vagrantfile
+++ b/Vagrantfile
@@ -115,6 +115,11 @@ Vagrant.configure(2) do |config|
   'opensuse-42'.tap do |box|
     config.vm.define box, define_opts do |config|
       config.vm.box = 'elastic/opensuse-42-x86_64'
+
+      # https://github.com/elastic/elasticsearch/issues/30295
+      config.vm.provider 'virtualbox' do |vbox|
+        vbox.customize ['storagectl', :id, '--name', 'SATA Controller', '--hostiocache', 'on']
+      end
       suse_common config, box
     end
   end

From c0ffec77c305a7a17680e2550b9a2e4deba11af5 Mon Sep 17 00:00:00 2001
From: ahmedakef 
Date: Mon, 16 Jul 2018 22:00:57 +0200
Subject: [PATCH 057/107] DOCS: put LIMIT 10 to the SQL query (#32065)

Provides a more precise equivalent SQL query for the aggregation example in the getting started guide.
---
 docs/reference/getting-started.asciidoc | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc
index e2dae2856886e..b89021e1cfe59 100755
--- a/docs/reference/getting-started.asciidoc
+++ b/docs/reference/getting-started.asciidoc
@@ -1122,7 +1122,7 @@ In SQL, the above aggregation is similar in concept to:
 
 [source,sh]
 --------------------------------------------------
-SELECT state, COUNT(*) FROM bank GROUP BY state ORDER BY COUNT(*) DESC
+SELECT state, COUNT(*) FROM bank GROUP BY state ORDER BY COUNT(*) DESC LIMIT 10;
 --------------------------------------------------
 
 And the response (partially shown):

From ee4ef861b568d392c0fc125b41d6fb5104603366 Mon Sep 17 00:00:00 2001
From: Costin Leau 
Date: Mon, 16 Jul 2018 23:42:44 +0300
Subject: [PATCH 058/107] SQL: allow LEFT and RIGHT as function names (#32066)

Due to the way ANTLR works, any declared tokens need to be accounted for
manually inside function names (otherwise a different rule gets applied).

Fix #32046
---
 x-pack/plugin/sql/src/main/antlr/SqlBase.g4   |    7 +-
 .../xpack/sql/parser/ExpressionBuilder.java   |    3 +-
 .../xpack/sql/parser/SqlBaseBaseListener.java |   12 +
 .../xpack/sql/parser/SqlBaseBaseVisitor.java  |    7 +
 .../xpack/sql/parser/SqlBaseListener.java     |   10 +
 .../xpack/sql/parser/SqlBaseParser.java       | 1628 +++++++++--------
 .../xpack/sql/parser/SqlBaseVisitor.java      |    6 +
 .../xpack/sql/parser/ExpressionTests.java     |    8 +
 .../xpack/sql/parser/SqlParserTests.java      |    6 +
 9 files changed, 922 insertions(+), 765 deletions(-)

diff --git a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4 b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
index 2c3288babd6d8..490a5486b42c7 100644
--- a/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
+++ b/x-pack/plugin/sql/src/main/antlr/SqlBase.g4
@@ -243,7 +243,12 @@ functionExpression
     ;
     
 functionTemplate
-    : identifier '(' (setQuantifier? expression (',' expression)*)? ')'
+    : functionName '(' (setQuantifier? expression (',' expression)*)? ')'
+    ;
+functionName
+    : LEFT 
+    | RIGHT 
+    | identifier
     ;
     
 constant
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
index 66ec98ea53c82..e202803b2610a 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/ExpressionBuilder.java
@@ -396,8 +396,7 @@ public Function visitExtractExpression(ExtractExpressionContext ctx) {
     @Override
     public Function visitFunctionExpression(FunctionExpressionContext ctx) {
         FunctionTemplateContext template = ctx.functionTemplate();
-
-        String name = visitIdentifier(template.identifier());
+        String name = template.functionName().getText();
         boolean isDistinct = template.setQuantifier() != null && template.setQuantifier().DISTINCT() != null;
         UnresolvedFunction.ResolutionType resolutionType =
                 isDistinct ? UnresolvedFunction.ResolutionType.DISTINCT : UnresolvedFunction.ResolutionType.STANDARD;
diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
index b353bcf6521f9..72c417992e343 100644
--- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
+++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseListener.java
@@ -803,6 +803,18 @@ class SqlBaseBaseListener implements SqlBaseListener {
    * 

The default implementation does nothing.

*/ @Override public void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void enterFunctionName(SqlBaseParser.FunctionNameContext ctx) { } + /** + * {@inheritDoc} + * + *

The default implementation does nothing.

+ */ + @Override public void exitFunctionName(SqlBaseParser.FunctionNameContext ctx) { } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java index d40ae6daa6e34..fd35ec421f66c 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseBaseVisitor.java @@ -473,6 +473,13 @@ class SqlBaseBaseVisitor extends AbstractParseTreeVisitor implements SqlBa * {@link #visitChildren} on {@code ctx}.

*/ @Override public T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx) { return visitChildren(ctx); } + /** + * {@inheritDoc} + * + *

The default implementation returns the result of calling + * {@link #visitChildren} on {@code ctx}.

+ */ + @Override public T visitFunctionName(SqlBaseParser.FunctionNameContext ctx) { return visitChildren(ctx); } /** * {@inheritDoc} * diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java index 70fe777384c85..18b2a21c02efd 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseListener.java @@ -745,6 +745,16 @@ interface SqlBaseListener extends ParseTreeListener { * @param ctx the parse tree */ void exitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx); + /** + * Enter a parse tree produced by {@link SqlBaseParser#functionName}. + * @param ctx the parse tree + */ + void enterFunctionName(SqlBaseParser.FunctionNameContext ctx); + /** + * Exit a parse tree produced by {@link SqlBaseParser#functionName}. + * @param ctx the parse tree + */ + void exitFunctionName(SqlBaseParser.FunctionNameContext ctx); /** * Enter a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link SqlBaseParser#constant}. diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java index 32a1b062feebf..c91758dadbc3a 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseParser.java @@ -44,10 +44,11 @@ class SqlBaseParser extends Parser { RULE_predicate = 24, RULE_pattern = 25, RULE_patternEscape = 26, RULE_valueExpression = 27, RULE_primaryExpression = 28, RULE_castExpression = 29, RULE_castTemplate = 30, RULE_extractExpression = 31, RULE_extractTemplate = 32, RULE_functionExpression = 33, - RULE_functionTemplate = 34, RULE_constant = 35, RULE_comparisonOperator = 36, - RULE_booleanValue = 37, RULE_dataType = 38, RULE_qualifiedName = 39, RULE_identifier = 40, - RULE_tableIdentifier = 41, RULE_quoteIdentifier = 42, RULE_unquoteIdentifier = 43, - RULE_number = 44, RULE_string = 45, RULE_nonReserved = 46; + RULE_functionTemplate = 34, RULE_functionName = 35, RULE_constant = 36, + RULE_comparisonOperator = 37, RULE_booleanValue = 38, RULE_dataType = 39, + RULE_qualifiedName = 40, RULE_identifier = 41, RULE_tableIdentifier = 42, + RULE_quoteIdentifier = 43, RULE_unquoteIdentifier = 44, RULE_number = 45, + RULE_string = 46, RULE_nonReserved = 47; public static final String[] ruleNames = { "singleStatement", "singleExpression", "statement", "query", "queryNoWith", "limitClause", "queryTerm", "orderBy", "querySpecification", "fromClause", @@ -56,9 +57,9 @@ class SqlBaseParser extends Parser { "relationPrimary", "expression", "booleanExpression", "predicated", "predicate", "pattern", "patternEscape", "valueExpression", "primaryExpression", "castExpression", "castTemplate", "extractExpression", "extractTemplate", "functionExpression", - "functionTemplate", "constant", "comparisonOperator", "booleanValue", - "dataType", "qualifiedName", "identifier", "tableIdentifier", "quoteIdentifier", - "unquoteIdentifier", "number", "string", "nonReserved" + "functionTemplate", "functionName", "constant", "comparisonOperator", + "booleanValue", "dataType", "qualifiedName", "identifier", "tableIdentifier", + "quoteIdentifier", "unquoteIdentifier", "number", "string", "nonReserved" }; private static final String[] _LITERAL_NAMES = { @@ -172,9 +173,9 @@ public final SingleStatementContext singleStatement() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(94); + setState(96); statement(); - setState(95); + setState(97); match(EOF); } } @@ -219,9 +220,9 @@ public final SingleExpressionContext singleExpression() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(97); + setState(99); expression(); - setState(98); + setState(100); match(EOF); } } @@ -607,14 +608,14 @@ public final StatementContext statement() throws RecognitionException { enterRule(_localctx, 4, RULE_statement); int _la; try { - setState(209); + setState(211); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,21,_ctx) ) { case 1: _localctx = new StatementDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(100); + setState(102); query(); } break; @@ -622,27 +623,27 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ExplainContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(101); + setState(103); match(EXPLAIN); - setState(115); + setState(117); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,2,_ctx) ) { case 1: { - setState(102); + setState(104); match(T__0); - setState(111); + setState(113); _errHandler.sync(this); _la = _input.LA(1); while (((((_la - 28)) & ~0x3f) == 0 && ((1L << (_la - 28)) & ((1L << (FORMAT - 28)) | (1L << (PLAN - 28)) | (1L << (VERIFY - 28)))) != 0)) { { - setState(109); + setState(111); switch (_input.LA(1)) { case PLAN: { - setState(103); + setState(105); match(PLAN); - setState(104); + setState(106); ((ExplainContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ALL) | (1L << ANALYZED) | (1L << EXECUTABLE) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED))) != 0)) ) { @@ -654,9 +655,9 @@ public final StatementContext statement() throws RecognitionException { break; case FORMAT: { - setState(105); + setState(107); match(FORMAT); - setState(106); + setState(108); ((ExplainContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -668,9 +669,9 @@ public final StatementContext statement() throws RecognitionException { break; case VERIFY: { - setState(107); + setState(109); match(VERIFY); - setState(108); + setState(110); ((ExplainContext)_localctx).verify = booleanValue(); } break; @@ -678,16 +679,16 @@ public final StatementContext statement() throws RecognitionException { throw new NoViableAltException(this); } } - setState(113); + setState(115); _errHandler.sync(this); _la = _input.LA(1); } - setState(114); + setState(116); match(T__1); } break; } - setState(117); + setState(119); statement(); } break; @@ -695,27 +696,27 @@ public final StatementContext statement() throws RecognitionException { _localctx = new DebugContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(118); + setState(120); match(DEBUG); - setState(130); + setState(132); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,5,_ctx) ) { case 1: { - setState(119); + setState(121); match(T__0); - setState(126); + setState(128); _errHandler.sync(this); _la = _input.LA(1); while (_la==FORMAT || _la==PLAN) { { - setState(124); + setState(126); switch (_input.LA(1)) { case PLAN: { - setState(120); + setState(122); match(PLAN); - setState(121); + setState(123); ((DebugContext)_localctx).type = _input.LT(1); _la = _input.LA(1); if ( !(_la==ANALYZED || _la==OPTIMIZED) ) { @@ -727,9 +728,9 @@ public final StatementContext statement() throws RecognitionException { break; case FORMAT: { - setState(122); + setState(124); match(FORMAT); - setState(123); + setState(125); ((DebugContext)_localctx).format = _input.LT(1); _la = _input.LA(1); if ( !(_la==GRAPHVIZ || _la==TEXT) ) { @@ -743,16 +744,16 @@ public final StatementContext statement() throws RecognitionException { throw new NoViableAltException(this); } } - setState(128); + setState(130); _errHandler.sync(this); _la = _input.LA(1); } - setState(129); + setState(131); match(T__1); } break; } - setState(132); + setState(134); statement(); } break; @@ -760,24 +761,24 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowTablesContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(133); + setState(135); match(SHOW); - setState(134); + setState(136); match(TABLES); - setState(139); + setState(141); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(136); + setState(138); _la = _input.LA(1); if (_la==LIKE) { { - setState(135); + setState(137); match(LIKE); } } - setState(138); + setState(140); pattern(); } } @@ -788,18 +789,18 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(141); + setState(143); match(SHOW); - setState(142); + setState(144); match(COLUMNS); - setState(143); + setState(145); _la = _input.LA(1); if ( !(_la==FROM || _la==IN) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(144); + setState(146); tableIdentifier(); } break; @@ -807,14 +808,14 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowColumnsContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(145); + setState(147); _la = _input.LA(1); if ( !(_la==DESC || _la==DESCRIBE) ) { _errHandler.recoverInline(this); } else { consume(); } - setState(146); + setState(148); tableIdentifier(); } break; @@ -822,24 +823,24 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowFunctionsContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(147); + setState(149); match(SHOW); - setState(148); + setState(150); match(FUNCTIONS); - setState(153); + setState(155); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(150); + setState(152); _la = _input.LA(1); if (_la==LIKE) { { - setState(149); + setState(151); match(LIKE); } } - setState(152); + setState(154); pattern(); } } @@ -850,9 +851,9 @@ public final StatementContext statement() throws RecognitionException { _localctx = new ShowSchemasContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(155); + setState(157); match(SHOW); - setState(156); + setState(158); match(SCHEMAS); } break; @@ -860,9 +861,9 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysCatalogsContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(157); + setState(159); match(SYS); - setState(158); + setState(160); match(CATALOGS); } break; @@ -870,69 +871,69 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysTablesContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(159); + setState(161); match(SYS); - setState(160); + setState(162); match(TABLES); - setState(166); + setState(168); _la = _input.LA(1); if (_la==CATALOG) { { - setState(161); - match(CATALOG); setState(163); + match(CATALOG); + setState(165); _la = _input.LA(1); if (_la==LIKE) { { - setState(162); + setState(164); match(LIKE); } } - setState(165); + setState(167); ((SysTablesContext)_localctx).clusterPattern = pattern(); } } - setState(172); + setState(174); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(169); + setState(171); _la = _input.LA(1); if (_la==LIKE) { { - setState(168); + setState(170); match(LIKE); } } - setState(171); + setState(173); ((SysTablesContext)_localctx).tablePattern = pattern(); } } - setState(183); + setState(185); _la = _input.LA(1); if (_la==TYPE) { { - setState(174); + setState(176); match(TYPE); - setState(175); + setState(177); string(); - setState(180); + setState(182); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(176); + setState(178); match(T__2); - setState(177); + setState(179); string(); } } - setState(182); + setState(184); _errHandler.sync(this); _la = _input.LA(1); } @@ -945,55 +946,55 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysColumnsContext(_localctx); enterOuterAlt(_localctx, 11); { - setState(185); + setState(187); match(SYS); - setState(186); + setState(188); match(COLUMNS); - setState(189); + setState(191); _la = _input.LA(1); if (_la==CATALOG) { { - setState(187); + setState(189); match(CATALOG); - setState(188); + setState(190); ((SysColumnsContext)_localctx).cluster = string(); } } - setState(196); + setState(198); _la = _input.LA(1); if (_la==TABLE) { { - setState(191); - match(TABLE); setState(193); + match(TABLE); + setState(195); _la = _input.LA(1); if (_la==LIKE) { { - setState(192); + setState(194); match(LIKE); } } - setState(195); + setState(197); ((SysColumnsContext)_localctx).indexPattern = pattern(); } } - setState(202); + setState(204); _la = _input.LA(1); if (((((_la - 40)) & ~0x3f) == 0 && ((1L << (_la - 40)) & ((1L << (LIKE - 40)) | (1L << (PARAM - 40)) | (1L << (STRING - 40)))) != 0)) { { - setState(199); + setState(201); _la = _input.LA(1); if (_la==LIKE) { { - setState(198); + setState(200); match(LIKE); } } - setState(201); + setState(203); ((SysColumnsContext)_localctx).columnPattern = pattern(); } } @@ -1004,9 +1005,9 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysTypesContext(_localctx); enterOuterAlt(_localctx, 12); { - setState(204); + setState(206); match(SYS); - setState(205); + setState(207); match(TYPES); } break; @@ -1014,11 +1015,11 @@ public final StatementContext statement() throws RecognitionException { _localctx = new SysTableTypesContext(_localctx); enterOuterAlt(_localctx, 13); { - setState(206); + setState(208); match(SYS); - setState(207); + setState(209); match(TABLE); - setState(208); + setState(210); match(TYPES); } break; @@ -1072,34 +1073,34 @@ public final QueryContext query() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(220); + setState(222); _la = _input.LA(1); if (_la==WITH) { { - setState(211); + setState(213); match(WITH); - setState(212); + setState(214); namedQuery(); - setState(217); + setState(219); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(213); + setState(215); match(T__2); - setState(214); + setState(216); namedQuery(); } } - setState(219); + setState(221); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(222); + setState(224); queryNoWith(); } } @@ -1155,42 +1156,42 @@ public final QueryNoWithContext queryNoWith() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(224); + setState(226); queryTerm(); - setState(235); + setState(237); _la = _input.LA(1); if (_la==ORDER) { { - setState(225); + setState(227); match(ORDER); - setState(226); + setState(228); match(BY); - setState(227); + setState(229); orderBy(); - setState(232); + setState(234); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(228); + setState(230); match(T__2); - setState(229); + setState(231); orderBy(); } } - setState(234); + setState(236); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(238); + setState(240); _la = _input.LA(1); if (_la==LIMIT || _la==LIMIT_ESC) { { - setState(237); + setState(239); limitClause(); } } @@ -1239,14 +1240,14 @@ public final LimitClauseContext limitClause() throws RecognitionException { enterRule(_localctx, 10, RULE_limitClause); int _la; try { - setState(245); + setState(247); switch (_input.LA(1)) { case LIMIT: enterOuterAlt(_localctx, 1); { - setState(240); + setState(242); match(LIMIT); - setState(241); + setState(243); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1259,9 +1260,9 @@ public final LimitClauseContext limitClause() throws RecognitionException { case LIMIT_ESC: enterOuterAlt(_localctx, 2); { - setState(242); + setState(244); match(LIMIT_ESC); - setState(243); + setState(245); ((LimitClauseContext)_localctx).limit = _input.LT(1); _la = _input.LA(1); if ( !(_la==ALL || _la==INTEGER_VALUE) ) { @@ -1269,7 +1270,7 @@ public final LimitClauseContext limitClause() throws RecognitionException { } else { consume(); } - setState(244); + setState(246); match(ESC_END); } break; @@ -1342,13 +1343,13 @@ public final QueryTermContext queryTerm() throws RecognitionException { QueryTermContext _localctx = new QueryTermContext(_ctx, getState()); enterRule(_localctx, 12, RULE_queryTerm); try { - setState(252); + setState(254); switch (_input.LA(1)) { case SELECT: _localctx = new QueryPrimaryDefaultContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(247); + setState(249); querySpecification(); } break; @@ -1356,11 +1357,11 @@ public final QueryTermContext queryTerm() throws RecognitionException { _localctx = new SubqueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(248); + setState(250); match(T__0); - setState(249); + setState(251); queryNoWith(); - setState(250); + setState(252); match(T__1); } break; @@ -1412,13 +1413,13 @@ public final OrderByContext orderBy() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(254); - expression(); setState(256); + expression(); + setState(258); _la = _input.LA(1); if (_la==ASC || _la==DESC) { { - setState(255); + setState(257); ((OrderByContext)_localctx).ordering = _input.LT(1); _la = _input.LA(1); if ( !(_la==ASC || _la==DESC) ) { @@ -1497,75 +1498,75 @@ public final QuerySpecificationContext querySpecification() throws RecognitionEx try { enterOuterAlt(_localctx, 1); { - setState(258); - match(SELECT); setState(260); + match(SELECT); + setState(262); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(259); + setState(261); setQuantifier(); } } - setState(262); + setState(264); selectItem(); - setState(267); + setState(269); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(263); + setState(265); match(T__2); - setState(264); + setState(266); selectItem(); } } - setState(269); + setState(271); _errHandler.sync(this); _la = _input.LA(1); } - setState(271); + setState(273); _la = _input.LA(1); if (_la==FROM) { { - setState(270); + setState(272); fromClause(); } } - setState(275); + setState(277); _la = _input.LA(1); if (_la==WHERE) { { - setState(273); + setState(275); match(WHERE); - setState(274); + setState(276); ((QuerySpecificationContext)_localctx).where = booleanExpression(0); } } - setState(280); + setState(282); _la = _input.LA(1); if (_la==GROUP) { { - setState(277); + setState(279); match(GROUP); - setState(278); + setState(280); match(BY); - setState(279); + setState(281); groupBy(); } } - setState(284); + setState(286); _la = _input.LA(1); if (_la==HAVING) { { - setState(282); + setState(284); match(HAVING); - setState(283); + setState(285); ((QuerySpecificationContext)_localctx).having = booleanExpression(0); } } @@ -1617,23 +1618,23 @@ public final FromClauseContext fromClause() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(286); + setState(288); match(FROM); - setState(287); + setState(289); relation(); - setState(292); + setState(294); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(288); + setState(290); match(T__2); - setState(289); + setState(291); relation(); } } - setState(294); + setState(296); _errHandler.sync(this); _la = _input.LA(1); } @@ -1686,30 +1687,30 @@ public final GroupByContext groupBy() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(296); + setState(298); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(295); + setState(297); setQuantifier(); } } - setState(298); + setState(300); groupingElement(); - setState(303); + setState(305); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(299); + setState(301); match(T__2); - setState(300); + setState(302); groupingElement(); } } - setState(305); + setState(307); _errHandler.sync(this); _la = _input.LA(1); } @@ -1764,7 +1765,7 @@ public final GroupingElementContext groupingElement() throws RecognitionExceptio _localctx = new SingleGroupingSetContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(306); + setState(308); groupingExpressions(); } } @@ -1810,47 +1811,47 @@ public final GroupingExpressionsContext groupingExpressions() throws Recognition enterRule(_localctx, 24, RULE_groupingExpressions); int _la; try { - setState(321); + setState(323); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,41,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(308); + setState(310); match(T__0); - setState(317); + setState(319); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << LEFT) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RIGHT) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(309); + setState(311); expression(); - setState(314); + setState(316); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(310); + setState(312); match(T__2); - setState(311); + setState(313); expression(); } } - setState(316); + setState(318); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(319); + setState(321); match(T__1); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(320); + setState(322); expression(); } break; @@ -1901,15 +1902,15 @@ public final NamedQueryContext namedQuery() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(323); + setState(325); ((NamedQueryContext)_localctx).name = identifier(); - setState(324); + setState(326); match(AS); - setState(325); + setState(327); match(T__0); - setState(326); + setState(328); queryNoWith(); - setState(327); + setState(329); match(T__1); } } @@ -1953,7 +1954,7 @@ public final SetQuantifierContext setQuantifier() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(329); + setState(331); _la = _input.LA(1); if ( !(_la==ALL || _la==DISTINCT) ) { _errHandler.recoverInline(this); @@ -2016,22 +2017,22 @@ public final SelectItemContext selectItem() throws RecognitionException { _localctx = new SelectExpressionContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(331); + setState(333); expression(); - setState(336); + setState(338); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(333); + setState(335); _la = _input.LA(1); if (_la==AS) { { - setState(332); + setState(334); match(AS); } } - setState(335); + setState(337); identifier(); } } @@ -2085,19 +2086,19 @@ public final RelationContext relation() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(338); + setState(340); relationPrimary(); - setState(342); + setState(344); _errHandler.sync(this); _la = _input.LA(1); while ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << FULL) | (1L << INNER) | (1L << JOIN) | (1L << LEFT) | (1L << NATURAL) | (1L << RIGHT))) != 0)) { { { - setState(339); + setState(341); joinRelation(); } } - setState(344); + setState(346); _errHandler.sync(this); _la = _input.LA(1); } @@ -2151,7 +2152,7 @@ public final JoinRelationContext joinRelation() throws RecognitionException { enterRule(_localctx, 34, RULE_joinRelation); int _la; try { - setState(356); + setState(358); switch (_input.LA(1)) { case FULL: case INNER: @@ -2161,18 +2162,18 @@ public final JoinRelationContext joinRelation() throws RecognitionException { enterOuterAlt(_localctx, 1); { { - setState(345); + setState(347); joinType(); } - setState(346); + setState(348); match(JOIN); - setState(347); - ((JoinRelationContext)_localctx).right = relationPrimary(); setState(349); + ((JoinRelationContext)_localctx).right = relationPrimary(); + setState(351); _la = _input.LA(1); if (_la==ON || _la==USING) { { - setState(348); + setState(350); joinCriteria(); } } @@ -2182,13 +2183,13 @@ public final JoinRelationContext joinRelation() throws RecognitionException { case NATURAL: enterOuterAlt(_localctx, 2); { - setState(351); + setState(353); match(NATURAL); - setState(352); + setState(354); joinType(); - setState(353); + setState(355); match(JOIN); - setState(354); + setState(356); ((JoinRelationContext)_localctx).right = relationPrimary(); } break; @@ -2237,17 +2238,17 @@ public final JoinTypeContext joinType() throws RecognitionException { enterRule(_localctx, 36, RULE_joinType); int _la; try { - setState(373); + setState(375); switch (_input.LA(1)) { case INNER: case JOIN: enterOuterAlt(_localctx, 1); { - setState(359); + setState(361); _la = _input.LA(1); if (_la==INNER) { { - setState(358); + setState(360); match(INNER); } } @@ -2257,13 +2258,13 @@ public final JoinTypeContext joinType() throws RecognitionException { case LEFT: enterOuterAlt(_localctx, 2); { - setState(361); - match(LEFT); setState(363); + match(LEFT); + setState(365); _la = _input.LA(1); if (_la==OUTER) { { - setState(362); + setState(364); match(OUTER); } } @@ -2273,13 +2274,13 @@ public final JoinTypeContext joinType() throws RecognitionException { case RIGHT: enterOuterAlt(_localctx, 3); { - setState(365); - match(RIGHT); setState(367); + match(RIGHT); + setState(369); _la = _input.LA(1); if (_la==OUTER) { { - setState(366); + setState(368); match(OUTER); } } @@ -2289,13 +2290,13 @@ public final JoinTypeContext joinType() throws RecognitionException { case FULL: enterOuterAlt(_localctx, 4); { - setState(369); - match(FULL); setState(371); + match(FULL); + setState(373); _la = _input.LA(1); if (_la==OUTER) { { - setState(370); + setState(372); match(OUTER); } } @@ -2353,43 +2354,43 @@ public final JoinCriteriaContext joinCriteria() throws RecognitionException { enterRule(_localctx, 38, RULE_joinCriteria); int _la; try { - setState(389); + setState(391); switch (_input.LA(1)) { case ON: enterOuterAlt(_localctx, 1); { - setState(375); + setState(377); match(ON); - setState(376); + setState(378); booleanExpression(0); } break; case USING: enterOuterAlt(_localctx, 2); { - setState(377); + setState(379); match(USING); - setState(378); + setState(380); match(T__0); - setState(379); + setState(381); identifier(); - setState(384); + setState(386); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(380); + setState(382); match(T__2); - setState(381); + setState(383); identifier(); } } - setState(386); + setState(388); _errHandler.sync(this); _la = _input.LA(1); } - setState(387); + setState(389); match(T__1); } break; @@ -2494,29 +2495,29 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio enterRule(_localctx, 40, RULE_relationPrimary); int _la; try { - setState(416); + setState(418); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,60,_ctx) ) { case 1: _localctx = new TableNameContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(391); + setState(393); tableIdentifier(); - setState(396); + setState(398); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(393); + setState(395); _la = _input.LA(1); if (_la==AS) { { - setState(392); + setState(394); match(AS); } } - setState(395); + setState(397); qualifiedName(); } } @@ -2527,26 +2528,26 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio _localctx = new AliasedQueryContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(398); + setState(400); match(T__0); - setState(399); + setState(401); queryNoWith(); - setState(400); + setState(402); match(T__1); - setState(405); + setState(407); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(402); + setState(404); _la = _input.LA(1); if (_la==AS) { { - setState(401); + setState(403); match(AS); } } - setState(404); + setState(406); qualifiedName(); } } @@ -2557,26 +2558,26 @@ public final RelationPrimaryContext relationPrimary() throws RecognitionExceptio _localctx = new AliasedRelationContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(407); + setState(409); match(T__0); - setState(408); + setState(410); relation(); - setState(409); + setState(411); match(T__1); - setState(414); + setState(416); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << AS) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(411); + setState(413); _la = _input.LA(1); if (_la==AS) { { - setState(410); + setState(412); match(AS); } } - setState(413); + setState(415); qualifiedName(); } } @@ -2625,7 +2626,7 @@ public final ExpressionContext expression() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(418); + setState(420); booleanExpression(0); } } @@ -2834,7 +2835,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc int _alt; enterOuterAlt(_localctx, 1); { - setState(469); + setState(471); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,64,_ctx) ) { case 1: @@ -2843,9 +2844,9 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _ctx = _localctx; _prevctx = _localctx; - setState(421); + setState(423); match(NOT); - setState(422); + setState(424); booleanExpression(8); } break; @@ -2854,13 +2855,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new ExistsContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(423); + setState(425); match(EXISTS); - setState(424); + setState(426); match(T__0); - setState(425); + setState(427); query(); - setState(426); + setState(428); match(T__1); } break; @@ -2869,29 +2870,29 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new StringQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(428); + setState(430); match(QUERY); - setState(429); + setState(431); match(T__0); - setState(430); + setState(432); ((StringQueryContext)_localctx).queryString = string(); - setState(435); + setState(437); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(431); + setState(433); match(T__2); - setState(432); + setState(434); ((StringQueryContext)_localctx).options = string(); } } - setState(437); + setState(439); _errHandler.sync(this); _la = _input.LA(1); } - setState(438); + setState(440); match(T__1); } break; @@ -2900,33 +2901,33 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(440); + setState(442); match(MATCH); - setState(441); + setState(443); match(T__0); - setState(442); + setState(444); ((MatchQueryContext)_localctx).singleField = qualifiedName(); - setState(443); + setState(445); match(T__2); - setState(444); + setState(446); ((MatchQueryContext)_localctx).queryString = string(); - setState(449); + setState(451); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(445); + setState(447); match(T__2); - setState(446); + setState(448); ((MatchQueryContext)_localctx).options = string(); } } - setState(451); + setState(453); _errHandler.sync(this); _la = _input.LA(1); } - setState(452); + setState(454); match(T__1); } break; @@ -2935,33 +2936,33 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new MultiMatchQueryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(454); + setState(456); match(MATCH); - setState(455); + setState(457); match(T__0); - setState(456); + setState(458); ((MultiMatchQueryContext)_localctx).multiFields = string(); - setState(457); + setState(459); match(T__2); - setState(458); + setState(460); ((MultiMatchQueryContext)_localctx).queryString = string(); - setState(463); + setState(465); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(459); + setState(461); match(T__2); - setState(460); + setState(462); ((MultiMatchQueryContext)_localctx).options = string(); } } - setState(465); + setState(467); _errHandler.sync(this); _la = _input.LA(1); } - setState(466); + setState(468); match(T__1); } break; @@ -2970,13 +2971,13 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new BooleanDefaultContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(468); + setState(470); predicated(); } break; } _ctx.stop = _input.LT(-1); - setState(479); + setState(481); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,66,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -2984,7 +2985,7 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(477); + setState(479); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,65,_ctx) ) { case 1: @@ -2992,11 +2993,11 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(471); + setState(473); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(472); + setState(474); ((LogicalBinaryContext)_localctx).operator = match(AND); - setState(473); + setState(475); ((LogicalBinaryContext)_localctx).right = booleanExpression(3); } break; @@ -3005,18 +3006,18 @@ private BooleanExpressionContext booleanExpression(int _p) throws RecognitionExc _localctx = new LogicalBinaryContext(new BooleanExpressionContext(_parentctx, _parentState)); ((LogicalBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_booleanExpression); - setState(474); + setState(476); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(475); + setState(477); ((LogicalBinaryContext)_localctx).operator = match(OR); - setState(476); + setState(478); ((LogicalBinaryContext)_localctx).right = booleanExpression(2); } break; } } } - setState(481); + setState(483); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,66,_ctx); } @@ -3065,14 +3066,14 @@ public final PredicatedContext predicated() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(482); - valueExpression(0); setState(484); + valueExpression(0); + setState(486); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,67,_ctx) ) { case 1: { - setState(483); + setState(485); predicate(); } break; @@ -3148,142 +3149,142 @@ public final PredicateContext predicate() throws RecognitionException { enterRule(_localctx, 48, RULE_predicate); int _la; try { - setState(532); + setState(534); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,75,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(487); + setState(489); _la = _input.LA(1); if (_la==NOT) { { - setState(486); + setState(488); match(NOT); } } - setState(489); + setState(491); ((PredicateContext)_localctx).kind = match(BETWEEN); - setState(490); + setState(492); ((PredicateContext)_localctx).lower = valueExpression(0); - setState(491); + setState(493); match(AND); - setState(492); + setState(494); ((PredicateContext)_localctx).upper = valueExpression(0); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(495); + setState(497); _la = _input.LA(1); if (_la==NOT) { { - setState(494); + setState(496); match(NOT); } } - setState(497); + setState(499); ((PredicateContext)_localctx).kind = match(IN); - setState(498); + setState(500); match(T__0); - setState(499); + setState(501); expression(); - setState(504); + setState(506); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(500); + setState(502); match(T__2); - setState(501); + setState(503); expression(); } } - setState(506); + setState(508); _errHandler.sync(this); _la = _input.LA(1); } - setState(507); + setState(509); match(T__1); } break; case 3: enterOuterAlt(_localctx, 3); { - setState(510); + setState(512); _la = _input.LA(1); if (_la==NOT) { { - setState(509); + setState(511); match(NOT); } } - setState(512); + setState(514); ((PredicateContext)_localctx).kind = match(IN); - setState(513); + setState(515); match(T__0); - setState(514); + setState(516); query(); - setState(515); + setState(517); match(T__1); } break; case 4: enterOuterAlt(_localctx, 4); { - setState(518); + setState(520); _la = _input.LA(1); if (_la==NOT) { { - setState(517); + setState(519); match(NOT); } } - setState(520); + setState(522); ((PredicateContext)_localctx).kind = match(LIKE); - setState(521); + setState(523); pattern(); } break; case 5: enterOuterAlt(_localctx, 5); { - setState(523); + setState(525); _la = _input.LA(1); if (_la==NOT) { { - setState(522); + setState(524); match(NOT); } } - setState(525); + setState(527); ((PredicateContext)_localctx).kind = match(RLIKE); - setState(526); + setState(528); ((PredicateContext)_localctx).regex = string(); } break; case 6: enterOuterAlt(_localctx, 6); { - setState(527); - match(IS); setState(529); + match(IS); + setState(531); _la = _input.LA(1); if (_la==NOT) { { - setState(528); + setState(530); match(NOT); } } - setState(531); + setState(533); ((PredicateContext)_localctx).kind = match(NULL); } break; @@ -3333,14 +3334,14 @@ public final PatternContext pattern() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(534); - ((PatternContext)_localctx).value = string(); setState(536); + ((PatternContext)_localctx).value = string(); + setState(538); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,76,_ctx) ) { case 1: { - setState(535); + setState(537); patternEscape(); } break; @@ -3388,25 +3389,25 @@ public final PatternEscapeContext patternEscape() throws RecognitionException { PatternEscapeContext _localctx = new PatternEscapeContext(_ctx, getState()); enterRule(_localctx, 52, RULE_patternEscape); try { - setState(544); + setState(546); switch (_input.LA(1)) { case ESCAPE: enterOuterAlt(_localctx, 1); { - setState(538); + setState(540); match(ESCAPE); - setState(539); + setState(541); ((PatternEscapeContext)_localctx).escape = string(); } break; case ESCAPE_ESC: enterOuterAlt(_localctx, 2); { - setState(540); + setState(542); match(ESCAPE_ESC); - setState(541); + setState(543); ((PatternEscapeContext)_localctx).escape = string(); - setState(542); + setState(544); match(ESC_END); } break; @@ -3551,7 +3552,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti int _alt; enterOuterAlt(_localctx, 1); { - setState(550); + setState(552); switch (_input.LA(1)) { case T__0: case ANALYZE: @@ -3567,12 +3568,14 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti case FORMAT: case FUNCTIONS: case GRAPHVIZ: + case LEFT: case MAPPED: case NULL: case OPTIMIZED: case PARSED: case PHYSICAL: case PLAN: + case RIGHT: case RLIKE: case QUERY: case SCHEMAS: @@ -3603,7 +3606,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _ctx = _localctx; _prevctx = _localctx; - setState(547); + setState(549); primaryExpression(); } break; @@ -3613,7 +3616,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ArithmeticUnaryContext(_localctx); _ctx = _localctx; _prevctx = _localctx; - setState(548); + setState(550); ((ArithmeticUnaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3621,7 +3624,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti } else { consume(); } - setState(549); + setState(551); valueExpression(4); } break; @@ -3629,7 +3632,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti throw new NoViableAltException(this); } _ctx.stop = _input.LT(-1); - setState(564); + setState(566); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,80,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { @@ -3637,7 +3640,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti if ( _parseListeners!=null ) triggerExitRuleEvent(); _prevctx = _localctx; { - setState(562); + setState(564); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,79,_ctx) ) { case 1: @@ -3645,9 +3648,9 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(552); + setState(554); if (!(precpred(_ctx, 3))) throw new FailedPredicateException(this, "precpred(_ctx, 3)"); - setState(553); + setState(555); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(((((_la - 88)) & ~0x3f) == 0 && ((1L << (_la - 88)) & ((1L << (ASTERISK - 88)) | (1L << (SLASH - 88)) | (1L << (PERCENT - 88)))) != 0)) ) { @@ -3655,7 +3658,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti } else { consume(); } - setState(554); + setState(556); ((ArithmeticBinaryContext)_localctx).right = valueExpression(4); } break; @@ -3664,9 +3667,9 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ArithmeticBinaryContext(new ValueExpressionContext(_parentctx, _parentState)); ((ArithmeticBinaryContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(555); + setState(557); if (!(precpred(_ctx, 2))) throw new FailedPredicateException(this, "precpred(_ctx, 2)"); - setState(556); + setState(558); ((ArithmeticBinaryContext)_localctx).operator = _input.LT(1); _la = _input.LA(1); if ( !(_la==PLUS || _la==MINUS) ) { @@ -3674,7 +3677,7 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti } else { consume(); } - setState(557); + setState(559); ((ArithmeticBinaryContext)_localctx).right = valueExpression(3); } break; @@ -3683,18 +3686,18 @@ private ValueExpressionContext valueExpression(int _p) throws RecognitionExcepti _localctx = new ComparisonContext(new ValueExpressionContext(_parentctx, _parentState)); ((ComparisonContext)_localctx).left = _prevctx; pushNewRecursionContext(_localctx, _startState, RULE_valueExpression); - setState(558); + setState(560); if (!(precpred(_ctx, 1))) throw new FailedPredicateException(this, "precpred(_ctx, 1)"); - setState(559); + setState(561); comparisonOperator(); - setState(560); + setState(562); ((ComparisonContext)_localctx).right = valueExpression(2); } break; } } } - setState(566); + setState(568); _errHandler.sync(this); _alt = getInterpreter().adaptivePredict(_input,80,_ctx); } @@ -3901,14 +3904,14 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce enterRule(_localctx, 56, RULE_primaryExpression); int _la; try { - setState(588); + setState(590); _errHandler.sync(this); switch ( getInterpreter().adaptivePredict(_input,82,_ctx) ) { case 1: _localctx = new CastContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(567); + setState(569); castExpression(); } break; @@ -3916,7 +3919,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new ExtractContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(568); + setState(570); extractExpression(); } break; @@ -3924,7 +3927,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new ConstantDefaultContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(569); + setState(571); constant(); } break; @@ -3932,7 +3935,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new StarContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(570); + setState(572); match(ASTERISK); } break; @@ -3940,18 +3943,18 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new StarContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(574); + setState(576); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(571); + setState(573); qualifiedName(); - setState(572); + setState(574); match(DOT); } } - setState(576); + setState(578); match(ASTERISK); } break; @@ -3959,7 +3962,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new FunctionContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(577); + setState(579); functionExpression(); } break; @@ -3967,11 +3970,11 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new SubqueryExpressionContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(578); + setState(580); match(T__0); - setState(579); + setState(581); query(); - setState(580); + setState(582); match(T__1); } break; @@ -3979,7 +3982,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new ColumnReferenceContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(582); + setState(584); identifier(); } break; @@ -3987,7 +3990,7 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new DereferenceContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(583); + setState(585); qualifiedName(); } break; @@ -3995,11 +3998,11 @@ public final PrimaryExpressionContext primaryExpression() throws RecognitionExce _localctx = new ParenthesizedExpressionContext(_localctx); enterOuterAlt(_localctx, 10); { - setState(584); + setState(586); match(T__0); - setState(585); + setState(587); expression(); - setState(586); + setState(588); match(T__1); } break; @@ -4045,23 +4048,23 @@ public final CastExpressionContext castExpression() throws RecognitionException CastExpressionContext _localctx = new CastExpressionContext(_ctx, getState()); enterRule(_localctx, 58, RULE_castExpression); try { - setState(595); + setState(597); switch (_input.LA(1)) { case CAST: enterOuterAlt(_localctx, 1); { - setState(590); + setState(592); castTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(591); + setState(593); match(FUNCTION_ESC); - setState(592); + setState(594); castTemplate(); - setState(593); + setState(595); match(ESC_END); } break; @@ -4114,17 +4117,17 @@ public final CastTemplateContext castTemplate() throws RecognitionException { try { enterOuterAlt(_localctx, 1); { - setState(597); + setState(599); match(CAST); - setState(598); + setState(600); match(T__0); - setState(599); + setState(601); expression(); - setState(600); + setState(602); match(AS); - setState(601); + setState(603); dataType(); - setState(602); + setState(604); match(T__1); } } @@ -4168,23 +4171,23 @@ public final ExtractExpressionContext extractExpression() throws RecognitionExce ExtractExpressionContext _localctx = new ExtractExpressionContext(_ctx, getState()); enterRule(_localctx, 62, RULE_extractExpression); try { - setState(609); + setState(611); switch (_input.LA(1)) { case EXTRACT: enterOuterAlt(_localctx, 1); { - setState(604); + setState(606); extractTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(605); + setState(607); match(FUNCTION_ESC); - setState(606); + setState(608); extractTemplate(); - setState(607); + setState(609); match(ESC_END); } break; @@ -4238,17 +4241,17 @@ public final ExtractTemplateContext extractTemplate() throws RecognitionExceptio try { enterOuterAlt(_localctx, 1); { - setState(611); + setState(613); match(EXTRACT); - setState(612); + setState(614); match(T__0); - setState(613); + setState(615); ((ExtractTemplateContext)_localctx).field = identifier(); - setState(614); + setState(616); match(FROM); - setState(615); + setState(617); valueExpression(0); - setState(616); + setState(618); match(T__1); } } @@ -4291,7 +4294,7 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx FunctionExpressionContext _localctx = new FunctionExpressionContext(_ctx, getState()); enterRule(_localctx, 66, RULE_functionExpression); try { - setState(623); + setState(625); switch (_input.LA(1)) { case ANALYZE: case ANALYZED: @@ -4303,11 +4306,13 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case FORMAT: case FUNCTIONS: case GRAPHVIZ: + case LEFT: case MAPPED: case OPTIMIZED: case PARSED: case PHYSICAL: case PLAN: + case RIGHT: case RLIKE: case QUERY: case SCHEMAS: @@ -4324,18 +4329,18 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(618); + setState(620); functionTemplate(); } break; case FUNCTION_ESC: enterOuterAlt(_localctx, 2); { - setState(619); + setState(621); match(FUNCTION_ESC); - setState(620); + setState(622); functionTemplate(); - setState(621); + setState(623); match(ESC_END); } break; @@ -4355,8 +4360,8 @@ public final FunctionExpressionContext functionExpression() throws RecognitionEx } public static class FunctionTemplateContext extends ParserRuleContext { - public IdentifierContext identifier() { - return getRuleContext(IdentifierContext.class,0); + public FunctionNameContext functionName() { + return getRuleContext(FunctionNameContext.class,0); } public List expression() { return getRuleContexts(ExpressionContext.class); @@ -4393,45 +4398,45 @@ public final FunctionTemplateContext functionTemplate() throws RecognitionExcept try { enterOuterAlt(_localctx, 1); { - setState(625); - identifier(); - setState(626); + setState(627); + functionName(); + setState(628); match(T__0); - setState(638); + setState(640); _la = _input.LA(1); - if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { + if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << T__0) | (1L << ALL) | (1L << ANALYZE) | (1L << ANALYZED) | (1L << CAST) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << DISTINCT) | (1L << EXECUTABLE) | (1L << EXISTS) | (1L << EXPLAIN) | (1L << EXTRACT) | (1L << FALSE) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << LEFT) | (1L << MAPPED) | (1L << MATCH) | (1L << NOT) | (1L << NULL) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RIGHT) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TRUE - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (FUNCTION_ESC - 64)) | (1L << (DATE_ESC - 64)) | (1L << (TIME_ESC - 64)) | (1L << (TIMESTAMP_ESC - 64)) | (1L << (GUID_ESC - 64)) | (1L << (PLUS - 64)) | (1L << (MINUS - 64)) | (1L << (ASTERISK - 64)) | (1L << (PARAM - 64)) | (1L << (STRING - 64)) | (1L << (INTEGER_VALUE - 64)) | (1L << (DECIMAL_VALUE - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(628); + setState(630); _la = _input.LA(1); if (_la==ALL || _la==DISTINCT) { { - setState(627); + setState(629); setQuantifier(); } } - setState(630); + setState(632); expression(); - setState(635); + setState(637); _errHandler.sync(this); _la = _input.LA(1); while (_la==T__2) { { { - setState(631); + setState(633); match(T__2); - setState(632); + setState(634); expression(); } } - setState(637); + setState(639); _errHandler.sync(this); _la = _input.LA(1); } } } - setState(640); + setState(642); match(T__1); } } @@ -4446,6 +4451,101 @@ public final FunctionTemplateContext functionTemplate() throws RecognitionExcept return _localctx; } + public static class FunctionNameContext extends ParserRuleContext { + public TerminalNode LEFT() { return getToken(SqlBaseParser.LEFT, 0); } + public TerminalNode RIGHT() { return getToken(SqlBaseParser.RIGHT, 0); } + public IdentifierContext identifier() { + return getRuleContext(IdentifierContext.class,0); + } + public FunctionNameContext(ParserRuleContext parent, int invokingState) { + super(parent, invokingState); + } + @Override public int getRuleIndex() { return RULE_functionName; } + @Override + public void enterRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).enterFunctionName(this); + } + @Override + public void exitRule(ParseTreeListener listener) { + if ( listener instanceof SqlBaseListener ) ((SqlBaseListener)listener).exitFunctionName(this); + } + @Override + public T accept(ParseTreeVisitor visitor) { + if ( visitor instanceof SqlBaseVisitor ) return ((SqlBaseVisitor)visitor).visitFunctionName(this); + else return visitor.visitChildren(this); + } + } + + public final FunctionNameContext functionName() throws RecognitionException { + FunctionNameContext _localctx = new FunctionNameContext(_ctx, getState()); + enterRule(_localctx, 70, RULE_functionName); + try { + setState(647); + switch (_input.LA(1)) { + case LEFT: + enterOuterAlt(_localctx, 1); + { + setState(644); + match(LEFT); + } + break; + case RIGHT: + enterOuterAlt(_localctx, 2); + { + setState(645); + match(RIGHT); + } + break; + case ANALYZE: + case ANALYZED: + case CATALOGS: + case COLUMNS: + case DEBUG: + case EXECUTABLE: + case EXPLAIN: + case FORMAT: + case FUNCTIONS: + case GRAPHVIZ: + case MAPPED: + case OPTIMIZED: + case PARSED: + case PHYSICAL: + case PLAN: + case RLIKE: + case QUERY: + case SCHEMAS: + case SHOW: + case SYS: + case TABLES: + case TEXT: + case TYPE: + case TYPES: + case VERIFY: + case IDENTIFIER: + case DIGIT_IDENTIFIER: + case QUOTED_IDENTIFIER: + case BACKQUOTED_IDENTIFIER: + enterOuterAlt(_localctx, 3); + { + setState(646); + identifier(); + } + break; + default: + throw new NoViableAltException(this); + } + } + catch (RecognitionException re) { + _localctx.exception = re; + _errHandler.reportError(this, re); + _errHandler.recover(this, re); + } + finally { + exitRule(); + } + return _localctx; + } + public static class ConstantContext extends ParserRuleContext { public ConstantContext(ParserRuleContext parent, int invokingState) { super(parent, invokingState); @@ -4636,16 +4736,16 @@ public T accept(ParseTreeVisitor visitor) { public final ConstantContext constant() throws RecognitionException { ConstantContext _localctx = new ConstantContext(_ctx, getState()); - enterRule(_localctx, 70, RULE_constant); + enterRule(_localctx, 72, RULE_constant); try { int _alt; - setState(667); + setState(674); switch (_input.LA(1)) { case NULL: _localctx = new NullLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(642); + setState(649); match(NULL); } break; @@ -4654,7 +4754,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new NumericLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(643); + setState(650); number(); } break; @@ -4663,7 +4763,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new BooleanLiteralContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(644); + setState(651); booleanValue(); } break; @@ -4671,7 +4771,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new StringLiteralContext(_localctx); enterOuterAlt(_localctx, 4); { - setState(646); + setState(653); _errHandler.sync(this); _alt = 1; do { @@ -4679,7 +4779,7 @@ public final ConstantContext constant() throws RecognitionException { case 1: { { - setState(645); + setState(652); match(STRING); } } @@ -4687,9 +4787,9 @@ public final ConstantContext constant() throws RecognitionException { default: throw new NoViableAltException(this); } - setState(648); + setState(655); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,89,_ctx); + _alt = getInterpreter().adaptivePredict(_input,90,_ctx); } while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ); } break; @@ -4697,7 +4797,7 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new ParamLiteralContext(_localctx); enterOuterAlt(_localctx, 5); { - setState(650); + setState(657); match(PARAM); } break; @@ -4705,11 +4805,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new DateEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 6); { - setState(651); + setState(658); match(DATE_ESC); - setState(652); + setState(659); string(); - setState(653); + setState(660); match(ESC_END); } break; @@ -4717,11 +4817,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new TimeEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 7); { - setState(655); + setState(662); match(TIME_ESC); - setState(656); + setState(663); string(); - setState(657); + setState(664); match(ESC_END); } break; @@ -4729,11 +4829,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new TimestampEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 8); { - setState(659); + setState(666); match(TIMESTAMP_ESC); - setState(660); + setState(667); string(); - setState(661); + setState(668); match(ESC_END); } break; @@ -4741,11 +4841,11 @@ public final ConstantContext constant() throws RecognitionException { _localctx = new GuidEscapedLiteralContext(_localctx); enterOuterAlt(_localctx, 9); { - setState(663); + setState(670); match(GUID_ESC); - setState(664); + setState(671); string(); - setState(665); + setState(672); match(ESC_END); } break; @@ -4792,12 +4892,12 @@ public T accept(ParseTreeVisitor visitor) { public final ComparisonOperatorContext comparisonOperator() throws RecognitionException { ComparisonOperatorContext _localctx = new ComparisonOperatorContext(_ctx, getState()); - enterRule(_localctx, 72, RULE_comparisonOperator); + enterRule(_localctx, 74, RULE_comparisonOperator); int _la; try { enterOuterAlt(_localctx, 1); { - setState(669); + setState(676); _la = _input.LA(1); if ( !(((((_la - 80)) & ~0x3f) == 0 && ((1L << (_la - 80)) & ((1L << (EQ - 80)) | (1L << (NEQ - 80)) | (1L << (LT - 80)) | (1L << (LTE - 80)) | (1L << (GT - 80)) | (1L << (GTE - 80)))) != 0)) ) { _errHandler.recoverInline(this); @@ -4841,12 +4941,12 @@ public T accept(ParseTreeVisitor visitor) { public final BooleanValueContext booleanValue() throws RecognitionException { BooleanValueContext _localctx = new BooleanValueContext(_ctx, getState()); - enterRule(_localctx, 74, RULE_booleanValue); + enterRule(_localctx, 76, RULE_booleanValue); int _la; try { enterOuterAlt(_localctx, 1); { - setState(671); + setState(678); _la = _input.LA(1); if ( !(_la==FALSE || _la==TRUE) ) { _errHandler.recoverInline(this); @@ -4899,12 +4999,12 @@ public T accept(ParseTreeVisitor visitor) { public final DataTypeContext dataType() throws RecognitionException { DataTypeContext _localctx = new DataTypeContext(_ctx, getState()); - enterRule(_localctx, 76, RULE_dataType); + enterRule(_localctx, 78, RULE_dataType); try { _localctx = new PrimitiveDataTypeContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(673); + setState(680); identifier(); } } @@ -4951,30 +5051,30 @@ public T accept(ParseTreeVisitor visitor) { public final QualifiedNameContext qualifiedName() throws RecognitionException { QualifiedNameContext _localctx = new QualifiedNameContext(_ctx, getState()); - enterRule(_localctx, 78, RULE_qualifiedName); + enterRule(_localctx, 80, RULE_qualifiedName); try { int _alt; enterOuterAlt(_localctx, 1); { - setState(680); + setState(687); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,91,_ctx); + _alt = getInterpreter().adaptivePredict(_input,92,_ctx); while ( _alt!=2 && _alt!=org.antlr.v4.runtime.atn.ATN.INVALID_ALT_NUMBER ) { if ( _alt==1 ) { { { - setState(675); + setState(682); identifier(); - setState(676); + setState(683); match(DOT); } } } - setState(682); + setState(689); _errHandler.sync(this); - _alt = getInterpreter().adaptivePredict(_input,91,_ctx); + _alt = getInterpreter().adaptivePredict(_input,92,_ctx); } - setState(683); + setState(690); identifier(); } } @@ -5017,15 +5117,15 @@ public T accept(ParseTreeVisitor visitor) { public final IdentifierContext identifier() throws RecognitionException { IdentifierContext _localctx = new IdentifierContext(_ctx, getState()); - enterRule(_localctx, 80, RULE_identifier); + enterRule(_localctx, 82, RULE_identifier); try { - setState(687); + setState(694); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: case BACKQUOTED_IDENTIFIER: enterOuterAlt(_localctx, 1); { - setState(685); + setState(692); quoteIdentifier(); } break; @@ -5058,7 +5158,7 @@ public final IdentifierContext identifier() throws RecognitionException { case DIGIT_IDENTIFIER: enterOuterAlt(_localctx, 2); { - setState(686); + setState(693); unquoteIdentifier(); } break; @@ -5108,46 +5208,46 @@ public T accept(ParseTreeVisitor visitor) { public final TableIdentifierContext tableIdentifier() throws RecognitionException { TableIdentifierContext _localctx = new TableIdentifierContext(_ctx, getState()); - enterRule(_localctx, 82, RULE_tableIdentifier); + enterRule(_localctx, 84, RULE_tableIdentifier); int _la; try { - setState(701); + setState(708); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,96,_ctx) ) { case 1: enterOuterAlt(_localctx, 1); { - setState(692); + setState(699); _la = _input.LA(1); if ((((_la) & ~0x3f) == 0 && ((1L << _la) & ((1L << ANALYZE) | (1L << ANALYZED) | (1L << CATALOGS) | (1L << COLUMNS) | (1L << DEBUG) | (1L << EXECUTABLE) | (1L << EXPLAIN) | (1L << FORMAT) | (1L << FUNCTIONS) | (1L << GRAPHVIZ) | (1L << MAPPED) | (1L << OPTIMIZED) | (1L << PARSED) | (1L << PHYSICAL) | (1L << PLAN) | (1L << RLIKE) | (1L << QUERY) | (1L << SCHEMAS) | (1L << SHOW) | (1L << SYS) | (1L << TABLES))) != 0) || ((((_la - 64)) & ~0x3f) == 0 && ((1L << (_la - 64)) & ((1L << (TEXT - 64)) | (1L << (TYPE - 64)) | (1L << (TYPES - 64)) | (1L << (VERIFY - 64)) | (1L << (IDENTIFIER - 64)) | (1L << (DIGIT_IDENTIFIER - 64)) | (1L << (QUOTED_IDENTIFIER - 64)) | (1L << (BACKQUOTED_IDENTIFIER - 64)))) != 0)) { { - setState(689); + setState(696); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(690); + setState(697); match(T__3); } } - setState(694); + setState(701); match(TABLE_IDENTIFIER); } break; case 2: enterOuterAlt(_localctx, 2); { - setState(698); + setState(705); _errHandler.sync(this); - switch ( getInterpreter().adaptivePredict(_input,94,_ctx) ) { + switch ( getInterpreter().adaptivePredict(_input,95,_ctx) ) { case 1: { - setState(695); + setState(702); ((TableIdentifierContext)_localctx).catalog = identifier(); - setState(696); + setState(703); match(T__3); } break; } - setState(700); + setState(707); ((TableIdentifierContext)_localctx).name = identifier(); } break; @@ -5212,15 +5312,15 @@ public T accept(ParseTreeVisitor visitor) { public final QuoteIdentifierContext quoteIdentifier() throws RecognitionException { QuoteIdentifierContext _localctx = new QuoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 84, RULE_quoteIdentifier); + enterRule(_localctx, 86, RULE_quoteIdentifier); try { - setState(705); + setState(712); switch (_input.LA(1)) { case QUOTED_IDENTIFIER: _localctx = new QuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(703); + setState(710); match(QUOTED_IDENTIFIER); } break; @@ -5228,7 +5328,7 @@ public final QuoteIdentifierContext quoteIdentifier() throws RecognitionExceptio _localctx = new BackQuotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(704); + setState(711); match(BACKQUOTED_IDENTIFIER); } break; @@ -5298,15 +5398,15 @@ public T accept(ParseTreeVisitor visitor) { public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionException { UnquoteIdentifierContext _localctx = new UnquoteIdentifierContext(_ctx, getState()); - enterRule(_localctx, 86, RULE_unquoteIdentifier); + enterRule(_localctx, 88, RULE_unquoteIdentifier); try { - setState(710); + setState(717); switch (_input.LA(1)) { case IDENTIFIER: _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(707); + setState(714); match(IDENTIFIER); } break; @@ -5338,7 +5438,7 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce _localctx = new UnquotedIdentifierContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(708); + setState(715); nonReserved(); } break; @@ -5346,7 +5446,7 @@ public final UnquoteIdentifierContext unquoteIdentifier() throws RecognitionExce _localctx = new DigitIdentifierContext(_localctx); enterOuterAlt(_localctx, 3); { - setState(709); + setState(716); match(DIGIT_IDENTIFIER); } break; @@ -5413,15 +5513,15 @@ public T accept(ParseTreeVisitor visitor) { public final NumberContext number() throws RecognitionException { NumberContext _localctx = new NumberContext(_ctx, getState()); - enterRule(_localctx, 88, RULE_number); + enterRule(_localctx, 90, RULE_number); try { - setState(714); + setState(721); switch (_input.LA(1)) { case DECIMAL_VALUE: _localctx = new DecimalLiteralContext(_localctx); enterOuterAlt(_localctx, 1); { - setState(712); + setState(719); match(DECIMAL_VALUE); } break; @@ -5429,7 +5529,7 @@ public final NumberContext number() throws RecognitionException { _localctx = new IntegerLiteralContext(_localctx); enterOuterAlt(_localctx, 2); { - setState(713); + setState(720); match(INTEGER_VALUE); } break; @@ -5472,12 +5572,12 @@ public T accept(ParseTreeVisitor visitor) { public final StringContext string() throws RecognitionException { StringContext _localctx = new StringContext(_ctx, getState()); - enterRule(_localctx, 90, RULE_string); + enterRule(_localctx, 92, RULE_string); int _la; try { enterOuterAlt(_localctx, 1); { - setState(716); + setState(723); _la = _input.LA(1); if ( !(_la==PARAM || _la==STRING) ) { _errHandler.recoverInline(this); @@ -5544,12 +5644,12 @@ public T accept(ParseTreeVisitor visitor) { public final NonReservedContext nonReserved() throws RecognitionException { NonReservedContext _localctx = new NonReservedContext(_ctx, getState()); - enterRule(_localctx, 92, RULE_nonReserved); + enterRule(_localctx, 94, RULE_nonReserved); int _la; try { enterOuterAlt(_localctx, 1); { - setState(718); + setState(725); _la = _input.LA(1); if ( !(((((_la - 6)) & ~0x3f) == 0 && ((1L << (_la - 6)) & ((1L << (ANALYZE - 6)) | (1L << (ANALYZED - 6)) | (1L << (CATALOGS - 6)) | (1L << (COLUMNS - 6)) | (1L << (DEBUG - 6)) | (1L << (EXECUTABLE - 6)) | (1L << (EXPLAIN - 6)) | (1L << (FORMAT - 6)) | (1L << (FUNCTIONS - 6)) | (1L << (GRAPHVIZ - 6)) | (1L << (MAPPED - 6)) | (1L << (OPTIMIZED - 6)) | (1L << (PARSED - 6)) | (1L << (PHYSICAL - 6)) | (1L << (PLAN - 6)) | (1L << (RLIKE - 6)) | (1L << (QUERY - 6)) | (1L << (SCHEMAS - 6)) | (1L << (SHOW - 6)) | (1L << (SYS - 6)) | (1L << (TABLES - 6)) | (1L << (TEXT - 6)) | (1L << (TYPE - 6)) | (1L << (TYPES - 6)) | (1L << (VERIFY - 6)))) != 0)) ) { _errHandler.recoverInline(this); @@ -5600,291 +5700,295 @@ private boolean valueExpression_sempred(ValueExpressionContext _localctx, int pr } public static final String _serializedATN = - "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02d3\4\2\t\2\4"+ + "\3\u0430\ud6d1\u8206\uad2d\u4417\uaef1\u8d80\uaadd\3l\u02da\4\2\t\2\4"+ "\3\t\3\4\4\t\4\4\5\t\5\4\6\t\6\4\7\t\7\4\b\t\b\4\t\t\t\4\n\t\n\4\13\t"+ "\13\4\f\t\f\4\r\t\r\4\16\t\16\4\17\t\17\4\20\t\20\4\21\t\21\4\22\t\22"+ "\4\23\t\23\4\24\t\24\4\25\t\25\4\26\t\26\4\27\t\27\4\30\t\30\4\31\t\31"+ "\4\32\t\32\4\33\t\33\4\34\t\34\4\35\t\35\4\36\t\36\4\37\t\37\4 \t \4!"+ "\t!\4\"\t\"\4#\t#\4$\t$\4%\t%\4&\t&\4\'\t\'\4(\t(\4)\t)\4*\t*\4+\t+\4"+ - ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\3\2\3\2\3\2\3\3\3\3\3\3\3\4\3\4\3\4\3"+ - "\4\3\4\3\4\3\4\3\4\3\4\7\4p\n\4\f\4\16\4s\13\4\3\4\5\4v\n\4\3\4\3\4\3"+ - "\4\3\4\3\4\3\4\3\4\7\4\177\n\4\f\4\16\4\u0082\13\4\3\4\5\4\u0085\n\4\3"+ - "\4\3\4\3\4\3\4\5\4\u008b\n\4\3\4\5\4\u008e\n\4\3\4\3\4\3\4\3\4\3\4\3\4"+ - "\3\4\3\4\3\4\5\4\u0099\n\4\3\4\5\4\u009c\n\4\3\4\3\4\3\4\3\4\3\4\3\4\3"+ - "\4\3\4\5\4\u00a6\n\4\3\4\5\4\u00a9\n\4\3\4\5\4\u00ac\n\4\3\4\5\4\u00af"+ - "\n\4\3\4\3\4\3\4\3\4\7\4\u00b5\n\4\f\4\16\4\u00b8\13\4\5\4\u00ba\n\4\3"+ - "\4\3\4\3\4\3\4\5\4\u00c0\n\4\3\4\3\4\5\4\u00c4\n\4\3\4\5\4\u00c7\n\4\3"+ - "\4\5\4\u00ca\n\4\3\4\5\4\u00cd\n\4\3\4\3\4\3\4\3\4\3\4\5\4\u00d4\n\4\3"+ - "\5\3\5\3\5\3\5\7\5\u00da\n\5\f\5\16\5\u00dd\13\5\5\5\u00df\n\5\3\5\3\5"+ - "\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u00e9\n\6\f\6\16\6\u00ec\13\6\5\6\u00ee\n"+ - "\6\3\6\5\6\u00f1\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u00f8\n\7\3\b\3\b\3\b\3\b"+ - "\3\b\5\b\u00ff\n\b\3\t\3\t\5\t\u0103\n\t\3\n\3\n\5\n\u0107\n\n\3\n\3\n"+ - "\3\n\7\n\u010c\n\n\f\n\16\n\u010f\13\n\3\n\5\n\u0112\n\n\3\n\3\n\5\n\u0116"+ - "\n\n\3\n\3\n\3\n\5\n\u011b\n\n\3\n\3\n\5\n\u011f\n\n\3\13\3\13\3\13\3"+ - "\13\7\13\u0125\n\13\f\13\16\13\u0128\13\13\3\f\5\f\u012b\n\f\3\f\3\f\3"+ - "\f\7\f\u0130\n\f\f\f\16\f\u0133\13\f\3\r\3\r\3\16\3\16\3\16\3\16\7\16"+ - "\u013b\n\16\f\16\16\16\u013e\13\16\5\16\u0140\n\16\3\16\3\16\5\16\u0144"+ - "\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21\5\21\u0150\n\21"+ - "\3\21\5\21\u0153\n\21\3\22\3\22\7\22\u0157\n\22\f\22\16\22\u015a\13\22"+ - "\3\23\3\23\3\23\3\23\5\23\u0160\n\23\3\23\3\23\3\23\3\23\3\23\5\23\u0167"+ - "\n\23\3\24\5\24\u016a\n\24\3\24\3\24\5\24\u016e\n\24\3\24\3\24\5\24\u0172"+ - "\n\24\3\24\3\24\5\24\u0176\n\24\5\24\u0178\n\24\3\25\3\25\3\25\3\25\3"+ - "\25\3\25\3\25\7\25\u0181\n\25\f\25\16\25\u0184\13\25\3\25\3\25\5\25\u0188"+ - "\n\25\3\26\3\26\5\26\u018c\n\26\3\26\5\26\u018f\n\26\3\26\3\26\3\26\3"+ - "\26\5\26\u0195\n\26\3\26\5\26\u0198\n\26\3\26\3\26\3\26\3\26\5\26\u019e"+ - "\n\26\3\26\5\26\u01a1\n\26\5\26\u01a3\n\26\3\27\3\27\3\30\3\30\3\30\3"+ - "\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01b4\n\30\f\30"+ - "\16\30\u01b7\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01c2"+ - "\n\30\f\30\16\30\u01c5\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ - "\30\7\30\u01d0\n\30\f\30\16\30\u01d3\13\30\3\30\3\30\3\30\5\30\u01d8\n"+ - "\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01e0\n\30\f\30\16\30\u01e3\13"+ - "\30\3\31\3\31\5\31\u01e7\n\31\3\32\5\32\u01ea\n\32\3\32\3\32\3\32\3\32"+ - "\3\32\3\32\5\32\u01f2\n\32\3\32\3\32\3\32\3\32\3\32\7\32\u01f9\n\32\f"+ - "\32\16\32\u01fc\13\32\3\32\3\32\3\32\5\32\u0201\n\32\3\32\3\32\3\32\3"+ - "\32\3\32\3\32\5\32\u0209\n\32\3\32\3\32\3\32\5\32\u020e\n\32\3\32\3\32"+ - "\3\32\3\32\5\32\u0214\n\32\3\32\5\32\u0217\n\32\3\33\3\33\5\33\u021b\n"+ - "\33\3\34\3\34\3\34\3\34\3\34\3\34\5\34\u0223\n\34\3\35\3\35\3\35\3\35"+ - "\5\35\u0229\n\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35\7\35"+ - "\u0235\n\35\f\35\16\35\u0238\13\35\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+ - "\5\36\u0241\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+ - "\3\36\5\36\u024f\n\36\3\37\3\37\3\37\3\37\3\37\5\37\u0256\n\37\3 \3 \3"+ - " \3 \3 \3 \3 \3!\3!\3!\3!\3!\5!\u0264\n!\3\"\3\"\3\"\3\"\3\"\3\"\3\"\3"+ - "#\3#\3#\3#\3#\5#\u0272\n#\3$\3$\3$\5$\u0277\n$\3$\3$\3$\7$\u027c\n$\f"+ - "$\16$\u027f\13$\5$\u0281\n$\3$\3$\3%\3%\3%\3%\6%\u0289\n%\r%\16%\u028a"+ - "\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\3%\5%\u029e\n%\3&\3&"+ - "\3\'\3\'\3(\3(\3)\3)\3)\7)\u02a9\n)\f)\16)\u02ac\13)\3)\3)\3*\3*\5*\u02b2"+ - "\n*\3+\3+\3+\5+\u02b7\n+\3+\3+\3+\3+\5+\u02bd\n+\3+\5+\u02c0\n+\3,\3,"+ - "\5,\u02c4\n,\3-\3-\3-\5-\u02c9\n-\3.\3.\5.\u02cd\n.\3/\3/\3\60\3\60\3"+ - "\60\2\4.8\61\2\4\6\b\n\f\16\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64"+ - "\668:<>@BDFHJLNPRTVXZ\\^\2\20\b\2\7\7\t\t\31\31,,\62\62\66\66\4\2\"\""+ - "BB\4\2\t\t\62\62\4\2\37\37%%\3\2\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2\7\7"+ - "\27\27\3\2XY\3\2Z\\\3\2RW\4\2\35\35CC\3\2_`\20\2\b\t\22\24\31\31\33\33"+ - "\36\36!\",,\62\62\668:<>?ABDEGG\u032e\2`\3\2\2\2\4c\3\2\2\2\6\u00d3\3"+ - "\2\2\2\b\u00de\3\2\2\2\n\u00e2\3\2\2\2\f\u00f7\3\2\2\2\16\u00fe\3\2\2"+ - "\2\20\u0100\3\2\2\2\22\u0104\3\2\2\2\24\u0120\3\2\2\2\26\u012a\3\2\2\2"+ - "\30\u0134\3\2\2\2\32\u0143\3\2\2\2\34\u0145\3\2\2\2\36\u014b\3\2\2\2 "+ - "\u014d\3\2\2\2\"\u0154\3\2\2\2$\u0166\3\2\2\2&\u0177\3\2\2\2(\u0187\3"+ - "\2\2\2*\u01a2\3\2\2\2,\u01a4\3\2\2\2.\u01d7\3\2\2\2\60\u01e4\3\2\2\2\62"+ - "\u0216\3\2\2\2\64\u0218\3\2\2\2\66\u0222\3\2\2\28\u0228\3\2\2\2:\u024e"+ - "\3\2\2\2<\u0255\3\2\2\2>\u0257\3\2\2\2@\u0263\3\2\2\2B\u0265\3\2\2\2D"+ - "\u0271\3\2\2\2F\u0273\3\2\2\2H\u029d\3\2\2\2J\u029f\3\2\2\2L\u02a1\3\2"+ - "\2\2N\u02a3\3\2\2\2P\u02aa\3\2\2\2R\u02b1\3\2\2\2T\u02bf\3\2\2\2V\u02c3"+ - "\3\2\2\2X\u02c8\3\2\2\2Z\u02cc\3\2\2\2\\\u02ce\3\2\2\2^\u02d0\3\2\2\2"+ - "`a\5\6\4\2ab\7\2\2\3b\3\3\2\2\2cd\5,\27\2de\7\2\2\3e\5\3\2\2\2f\u00d4"+ - "\5\b\5\2gu\7\33\2\2hq\7\3\2\2ij\78\2\2jp\t\2\2\2kl\7\36\2\2lp\t\3\2\2"+ - "mn\7G\2\2np\5L\'\2oi\3\2\2\2ok\3\2\2\2om\3\2\2\2ps\3\2\2\2qo\3\2\2\2q"+ - "r\3\2\2\2rt\3\2\2\2sq\3\2\2\2tv\7\4\2\2uh\3\2\2\2uv\3\2\2\2vw\3\2\2\2"+ - "w\u00d4\5\6\4\2x\u0084\7\24\2\2y\u0080\7\3\2\2z{\78\2\2{\177\t\4\2\2|"+ - "}\7\36\2\2}\177\t\3\2\2~z\3\2\2\2~|\3\2\2\2\177\u0082\3\2\2\2\u0080~\3"+ - "\2\2\2\u0080\u0081\3\2\2\2\u0081\u0083\3\2\2\2\u0082\u0080\3\2\2\2\u0083"+ - "\u0085\7\4\2\2\u0084y\3\2\2\2\u0084\u0085\3\2\2\2\u0085\u0086\3\2\2\2"+ - "\u0086\u00d4\5\6\4\2\u0087\u0088\7>\2\2\u0088\u008d\7A\2\2\u0089\u008b"+ - "\7*\2\2\u008a\u0089\3\2\2\2\u008a\u008b\3\2\2\2\u008b\u008c\3\2\2\2\u008c"+ - "\u008e\5\64\33\2\u008d\u008a\3\2\2\2\u008d\u008e\3\2\2\2\u008e\u00d4\3"+ - "\2\2\2\u008f\u0090\7>\2\2\u0090\u0091\7\23\2\2\u0091\u0092\t\5\2\2\u0092"+ - "\u00d4\5T+\2\u0093\u0094\t\6\2\2\u0094\u00d4\5T+\2\u0095\u0096\7>\2\2"+ - "\u0096\u009b\7!\2\2\u0097\u0099\7*\2\2\u0098\u0097\3\2\2\2\u0098\u0099"+ - "\3\2\2\2\u0099\u009a\3\2\2\2\u009a\u009c\5\64\33\2\u009b\u0098\3\2\2\2"+ - "\u009b\u009c\3\2\2\2\u009c\u00d4\3\2\2\2\u009d\u009e\7>\2\2\u009e\u00d4"+ - "\7<\2\2\u009f\u00a0\7?\2\2\u00a0\u00d4\7\22\2\2\u00a1\u00a2\7?\2\2\u00a2"+ - "\u00a8\7A\2\2\u00a3\u00a5\7\21\2\2\u00a4\u00a6\7*\2\2\u00a5\u00a4\3\2"+ - "\2\2\u00a5\u00a6\3\2\2\2\u00a6\u00a7\3\2\2\2\u00a7\u00a9\5\64\33\2\u00a8"+ - "\u00a3\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00ae\3\2\2\2\u00aa\u00ac\7*"+ - "\2\2\u00ab\u00aa\3\2\2\2\u00ab\u00ac\3\2\2\2\u00ac\u00ad\3\2\2\2\u00ad"+ - "\u00af\5\64\33\2\u00ae\u00ab\3\2\2\2\u00ae\u00af\3\2\2\2\u00af\u00b9\3"+ - "\2\2\2\u00b0\u00b1\7D\2\2\u00b1\u00b6\5\\/\2\u00b2\u00b3\7\5\2\2\u00b3"+ - "\u00b5\5\\/\2\u00b4\u00b2\3\2\2\2\u00b5\u00b8\3\2\2\2\u00b6\u00b4\3\2"+ - "\2\2\u00b6\u00b7\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2\2\2\u00b9"+ - "\u00b0\3\2\2\2\u00b9\u00ba\3\2\2\2\u00ba\u00d4\3\2\2\2\u00bb\u00bc\7?"+ - "\2\2\u00bc\u00bf\7\23\2\2\u00bd\u00be\7\21\2\2\u00be\u00c0\5\\/\2\u00bf"+ - "\u00bd\3\2\2\2\u00bf\u00c0\3\2\2\2\u00c0\u00c6\3\2\2\2\u00c1\u00c3\7@"+ - "\2\2\u00c2\u00c4\7*\2\2\u00c3\u00c2\3\2\2\2\u00c3\u00c4\3\2\2\2\u00c4"+ - "\u00c5\3\2\2\2\u00c5\u00c7\5\64\33\2\u00c6\u00c1\3\2\2\2\u00c6\u00c7\3"+ - "\2\2\2\u00c7\u00cc\3\2\2\2\u00c8\u00ca\7*\2\2\u00c9\u00c8\3\2\2\2\u00c9"+ - "\u00ca\3\2\2\2\u00ca\u00cb\3\2\2\2\u00cb\u00cd\5\64\33\2\u00cc\u00c9\3"+ - "\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00d4\3\2\2\2\u00ce\u00cf\7?\2\2\u00cf"+ - "\u00d4\7E\2\2\u00d0\u00d1\7?\2\2\u00d1\u00d2\7@\2\2\u00d2\u00d4\7E\2\2"+ - "\u00d3f\3\2\2\2\u00d3g\3\2\2\2\u00d3x\3\2\2\2\u00d3\u0087\3\2\2\2\u00d3"+ - "\u008f\3\2\2\2\u00d3\u0093\3\2\2\2\u00d3\u0095\3\2\2\2\u00d3\u009d\3\2"+ - "\2\2\u00d3\u009f\3\2\2\2\u00d3\u00a1\3\2\2\2\u00d3\u00bb\3\2\2\2\u00d3"+ - "\u00ce\3\2\2\2\u00d3\u00d0\3\2\2\2\u00d4\7\3\2\2\2\u00d5\u00d6\7I\2\2"+ - "\u00d6\u00db\5\34\17\2\u00d7\u00d8\7\5\2\2\u00d8\u00da\5\34\17\2\u00d9"+ - "\u00d7\3\2\2\2\u00da\u00dd\3\2\2\2\u00db\u00d9\3\2\2\2\u00db\u00dc\3\2"+ - "\2\2\u00dc\u00df\3\2\2\2\u00dd\u00db\3\2\2\2\u00de\u00d5\3\2\2\2\u00de"+ - "\u00df\3\2\2\2\u00df\u00e0\3\2\2\2\u00e0\u00e1\5\n\6\2\u00e1\t\3\2\2\2"+ - "\u00e2\u00ed\5\16\b\2\u00e3\u00e4\7\64\2\2\u00e4\u00e5\7\17\2\2\u00e5"+ - "\u00ea\5\20\t\2\u00e6\u00e7\7\5\2\2\u00e7\u00e9\5\20\t\2\u00e8\u00e6\3"+ - "\2\2\2\u00e9\u00ec\3\2\2\2\u00ea\u00e8\3\2\2\2\u00ea\u00eb\3\2\2\2\u00eb"+ - "\u00ee\3\2\2\2\u00ec\u00ea\3\2\2\2\u00ed\u00e3\3\2\2\2\u00ed\u00ee\3\2"+ - "\2\2\u00ee\u00f0\3\2\2\2\u00ef\u00f1\5\f\7\2\u00f0\u00ef\3\2\2\2\u00f0"+ - "\u00f1\3\2\2\2\u00f1\13\3\2\2\2\u00f2\u00f3\7+\2\2\u00f3\u00f8\t\7\2\2"+ - "\u00f4\u00f5\7L\2\2\u00f5\u00f6\t\7\2\2\u00f6\u00f8\7Q\2\2\u00f7\u00f2"+ - "\3\2\2\2\u00f7\u00f4\3\2\2\2\u00f8\r\3\2\2\2\u00f9\u00ff\5\22\n\2\u00fa"+ - "\u00fb\7\3\2\2\u00fb\u00fc\5\n\6\2\u00fc\u00fd\7\4\2\2\u00fd\u00ff\3\2"+ - "\2\2\u00fe\u00f9\3\2\2\2\u00fe\u00fa\3\2\2\2\u00ff\17\3\2\2\2\u0100\u0102"+ - "\5,\27\2\u0101\u0103\t\b\2\2\u0102\u0101\3\2\2\2\u0102\u0103\3\2\2\2\u0103"+ - "\21\3\2\2\2\u0104\u0106\7=\2\2\u0105\u0107\5\36\20\2\u0106\u0105\3\2\2"+ - "\2\u0106\u0107\3\2\2\2\u0107\u0108\3\2\2\2\u0108\u010d\5 \21\2\u0109\u010a"+ - "\7\5\2\2\u010a\u010c\5 \21\2\u010b\u0109\3\2\2\2\u010c\u010f\3\2\2\2\u010d"+ - "\u010b\3\2\2\2\u010d\u010e\3\2\2\2\u010e\u0111\3\2\2\2\u010f\u010d\3\2"+ - "\2\2\u0110\u0112\5\24\13\2\u0111\u0110\3\2\2\2\u0111\u0112\3\2\2\2\u0112"+ - "\u0115\3\2\2\2\u0113\u0114\7H\2\2\u0114\u0116\5.\30\2\u0115\u0113\3\2"+ - "\2\2\u0115\u0116\3\2\2\2\u0116\u011a\3\2\2\2\u0117\u0118\7#\2\2\u0118"+ - "\u0119\7\17\2\2\u0119\u011b\5\26\f\2\u011a\u0117\3\2\2\2\u011a\u011b\3"+ - "\2\2\2\u011b\u011e\3\2\2\2\u011c\u011d\7$\2\2\u011d\u011f\5.\30\2\u011e"+ - "\u011c\3\2\2\2\u011e\u011f\3\2\2\2\u011f\23\3\2\2\2\u0120\u0121\7\37\2"+ - "\2\u0121\u0126\5\"\22\2\u0122\u0123\7\5\2\2\u0123\u0125\5\"\22\2\u0124"+ - "\u0122\3\2\2\2\u0125\u0128\3\2\2\2\u0126\u0124\3\2\2\2\u0126\u0127\3\2"+ - "\2\2\u0127\25\3\2\2\2\u0128\u0126\3\2\2\2\u0129\u012b\5\36\20\2\u012a"+ - "\u0129\3\2\2\2\u012a\u012b\3\2\2\2\u012b\u012c\3\2\2\2\u012c\u0131\5\30"+ - "\r\2\u012d\u012e\7\5\2\2\u012e\u0130\5\30\r\2\u012f\u012d\3\2\2\2\u0130"+ - "\u0133\3\2\2\2\u0131\u012f\3\2\2\2\u0131\u0132\3\2\2\2\u0132\27\3\2\2"+ - "\2\u0133\u0131\3\2\2\2\u0134\u0135\5\32\16\2\u0135\31\3\2\2\2\u0136\u013f"+ - "\7\3\2\2\u0137\u013c\5,\27\2\u0138\u0139\7\5\2\2\u0139\u013b\5,\27\2\u013a"+ - "\u0138\3\2\2\2\u013b\u013e\3\2\2\2\u013c\u013a\3\2\2\2\u013c\u013d\3\2"+ - "\2\2\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013f\u0137\3\2\2\2\u013f"+ - "\u0140\3\2\2\2\u0140\u0141\3\2\2\2\u0141\u0144\7\4\2\2\u0142\u0144\5,"+ - "\27\2\u0143\u0136\3\2\2\2\u0143\u0142\3\2\2\2\u0144\33\3\2\2\2\u0145\u0146"+ - "\5R*\2\u0146\u0147\7\f\2\2\u0147\u0148\7\3\2\2\u0148\u0149\5\n\6\2\u0149"+ - "\u014a\7\4\2\2\u014a\35\3\2\2\2\u014b\u014c\t\t\2\2\u014c\37\3\2\2\2\u014d"+ - "\u0152\5,\27\2\u014e\u0150\7\f\2\2\u014f\u014e\3\2\2\2\u014f\u0150\3\2"+ - "\2\2\u0150\u0151\3\2\2\2\u0151\u0153\5R*\2\u0152\u014f\3\2\2\2\u0152\u0153"+ - "\3\2\2\2\u0153!\3\2\2\2\u0154\u0158\5*\26\2\u0155\u0157\5$\23\2\u0156"+ - "\u0155\3\2\2\2\u0157\u015a\3\2\2\2\u0158\u0156\3\2\2\2\u0158\u0159\3\2"+ - "\2\2\u0159#\3\2\2\2\u015a\u0158\3\2\2\2\u015b\u015c\5&\24\2\u015c\u015d"+ - "\7(\2\2\u015d\u015f\5*\26\2\u015e\u0160\5(\25\2\u015f\u015e\3\2\2\2\u015f"+ - "\u0160\3\2\2\2\u0160\u0167\3\2\2\2\u0161\u0162\7.\2\2\u0162\u0163\5&\24"+ - "\2\u0163\u0164\7(\2\2\u0164\u0165\5*\26\2\u0165\u0167\3\2\2\2\u0166\u015b"+ - "\3\2\2\2\u0166\u0161\3\2\2\2\u0167%\3\2\2\2\u0168\u016a\7&\2\2\u0169\u0168"+ - "\3\2\2\2\u0169\u016a\3\2\2\2\u016a\u0178\3\2\2\2\u016b\u016d\7)\2\2\u016c"+ - "\u016e\7\65\2\2\u016d\u016c\3\2\2\2\u016d\u016e\3\2\2\2\u016e\u0178\3"+ - "\2\2\2\u016f\u0171\79\2\2\u0170\u0172\7\65\2\2\u0171\u0170\3\2\2\2\u0171"+ - "\u0172\3\2\2\2\u0172\u0178\3\2\2\2\u0173\u0175\7 \2\2\u0174\u0176\7\65"+ - "\2\2\u0175\u0174\3\2\2\2\u0175\u0176\3\2\2\2\u0176\u0178\3\2\2\2\u0177"+ - "\u0169\3\2\2\2\u0177\u016b\3\2\2\2\u0177\u016f\3\2\2\2\u0177\u0173\3\2"+ - "\2\2\u0178\'\3\2\2\2\u0179\u017a\7\61\2\2\u017a\u0188\5.\30\2\u017b\u017c"+ - "\7F\2\2\u017c\u017d\7\3\2\2\u017d\u0182\5R*\2\u017e\u017f\7\5\2\2\u017f"+ - "\u0181\5R*\2\u0180\u017e\3\2\2\2\u0181\u0184\3\2\2\2\u0182\u0180\3\2\2"+ - "\2\u0182\u0183\3\2\2\2\u0183\u0185\3\2\2\2\u0184\u0182\3\2\2\2\u0185\u0186"+ - "\7\4\2\2\u0186\u0188\3\2\2\2\u0187\u0179\3\2\2\2\u0187\u017b\3\2\2\2\u0188"+ - ")\3\2\2\2\u0189\u018e\5T+\2\u018a\u018c\7\f\2\2\u018b\u018a\3\2\2\2\u018b"+ - "\u018c\3\2\2\2\u018c\u018d\3\2\2\2\u018d\u018f\5P)\2\u018e\u018b\3\2\2"+ - "\2\u018e\u018f\3\2\2\2\u018f\u01a3\3\2\2\2\u0190\u0191\7\3\2\2\u0191\u0192"+ - "\5\n\6\2\u0192\u0197\7\4\2\2\u0193\u0195\7\f\2\2\u0194\u0193\3\2\2\2\u0194"+ - "\u0195\3\2\2\2\u0195\u0196\3\2\2\2\u0196\u0198\5P)\2\u0197\u0194\3\2\2"+ - "\2\u0197\u0198\3\2\2\2\u0198\u01a3\3\2\2\2\u0199\u019a\7\3\2\2\u019a\u019b"+ - "\5\"\22\2\u019b\u01a0\7\4\2\2\u019c\u019e\7\f\2\2\u019d\u019c\3\2\2\2"+ - "\u019d\u019e\3\2\2\2\u019e\u019f\3\2\2\2\u019f\u01a1\5P)\2\u01a0\u019d"+ - "\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u01a3\3\2\2\2\u01a2\u0189\3\2\2\2\u01a2"+ - "\u0190\3\2\2\2\u01a2\u0199\3\2\2\2\u01a3+\3\2\2\2\u01a4\u01a5\5.\30\2"+ - "\u01a5-\3\2\2\2\u01a6\u01a7\b\30\1\2\u01a7\u01a8\7/\2\2\u01a8\u01d8\5"+ - ".\30\n\u01a9\u01aa\7\32\2\2\u01aa\u01ab\7\3\2\2\u01ab\u01ac\5\b\5\2\u01ac"+ - "\u01ad\7\4\2\2\u01ad\u01d8\3\2\2\2\u01ae\u01af\7;\2\2\u01af\u01b0\7\3"+ - "\2\2\u01b0\u01b5\5\\/\2\u01b1\u01b2\7\5\2\2\u01b2\u01b4\5\\/\2\u01b3\u01b1"+ - "\3\2\2\2\u01b4\u01b7\3\2\2\2\u01b5\u01b3\3\2\2\2\u01b5\u01b6\3\2\2\2\u01b6"+ - "\u01b8\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b8\u01b9\7\4\2\2\u01b9\u01d8\3\2"+ - "\2\2\u01ba\u01bb\7-\2\2\u01bb\u01bc\7\3\2\2\u01bc\u01bd\5P)\2\u01bd\u01be"+ - "\7\5\2\2\u01be\u01c3\5\\/\2\u01bf\u01c0\7\5\2\2\u01c0\u01c2\5\\/\2\u01c1"+ - "\u01bf\3\2\2\2\u01c2\u01c5\3\2\2\2\u01c3\u01c1\3\2\2\2\u01c3\u01c4\3\2"+ - "\2\2\u01c4\u01c6\3\2\2\2\u01c5\u01c3\3\2\2\2\u01c6\u01c7\7\4\2\2\u01c7"+ - "\u01d8\3\2\2\2\u01c8\u01c9\7-\2\2\u01c9\u01ca\7\3\2\2\u01ca\u01cb\5\\"+ - "/\2\u01cb\u01cc\7\5\2\2\u01cc\u01d1\5\\/\2\u01cd\u01ce\7\5\2\2\u01ce\u01d0"+ - "\5\\/\2\u01cf\u01cd\3\2\2\2\u01d0\u01d3\3\2\2\2\u01d1\u01cf\3\2\2\2\u01d1"+ - "\u01d2\3\2\2\2\u01d2\u01d4\3\2\2\2\u01d3\u01d1\3\2\2\2\u01d4\u01d5\7\4"+ - "\2\2\u01d5\u01d8\3\2\2\2\u01d6\u01d8\5\60\31\2\u01d7\u01a6\3\2\2\2\u01d7"+ - "\u01a9\3\2\2\2\u01d7\u01ae\3\2\2\2\u01d7\u01ba\3\2\2\2\u01d7\u01c8\3\2"+ - "\2\2\u01d7\u01d6\3\2\2\2\u01d8\u01e1\3\2\2\2\u01d9\u01da\f\4\2\2\u01da"+ - "\u01db\7\n\2\2\u01db\u01e0\5.\30\5\u01dc\u01dd\f\3\2\2\u01dd\u01de\7\63"+ - "\2\2\u01de\u01e0\5.\30\4\u01df\u01d9\3\2\2\2\u01df\u01dc\3\2\2\2\u01e0"+ - "\u01e3\3\2\2\2\u01e1\u01df\3\2\2\2\u01e1\u01e2\3\2\2\2\u01e2/\3\2\2\2"+ - "\u01e3\u01e1\3\2\2\2\u01e4\u01e6\58\35\2\u01e5\u01e7\5\62\32\2\u01e6\u01e5"+ - "\3\2\2\2\u01e6\u01e7\3\2\2\2\u01e7\61\3\2\2\2\u01e8\u01ea\7/\2\2\u01e9"+ - "\u01e8\3\2\2\2\u01e9\u01ea\3\2\2\2\u01ea\u01eb\3\2\2\2\u01eb\u01ec\7\16"+ - "\2\2\u01ec\u01ed\58\35\2\u01ed\u01ee\7\n\2\2\u01ee\u01ef\58\35\2\u01ef"+ - "\u0217\3\2\2\2\u01f0\u01f2\7/\2\2\u01f1\u01f0\3\2\2\2\u01f1\u01f2\3\2"+ - "\2\2\u01f2\u01f3\3\2\2\2\u01f3\u01f4\7%\2\2\u01f4\u01f5\7\3\2\2\u01f5"+ - "\u01fa\5,\27\2\u01f6\u01f7\7\5\2\2\u01f7\u01f9\5,\27\2\u01f8\u01f6\3\2"+ - "\2\2\u01f9\u01fc\3\2\2\2\u01fa\u01f8\3\2\2\2\u01fa\u01fb\3\2\2\2\u01fb"+ - "\u01fd\3\2\2\2\u01fc\u01fa\3\2\2\2\u01fd\u01fe\7\4\2\2\u01fe\u0217\3\2"+ - "\2\2\u01ff\u0201\7/\2\2\u0200\u01ff\3\2\2\2\u0200\u0201\3\2\2\2\u0201"+ - "\u0202\3\2\2\2\u0202\u0203\7%\2\2\u0203\u0204\7\3\2\2\u0204\u0205\5\b"+ - "\5\2\u0205\u0206\7\4\2\2\u0206\u0217\3\2\2\2\u0207\u0209\7/\2\2\u0208"+ - "\u0207\3\2\2\2\u0208\u0209\3\2\2\2\u0209\u020a\3\2\2\2\u020a\u020b\7*"+ - "\2\2\u020b\u0217\5\64\33\2\u020c\u020e\7/\2\2\u020d\u020c\3\2\2\2\u020d"+ - "\u020e\3\2\2\2\u020e\u020f\3\2\2\2\u020f\u0210\7:\2\2\u0210\u0217\5\\"+ - "/\2\u0211\u0213\7\'\2\2\u0212\u0214\7/\2\2\u0213\u0212\3\2\2\2\u0213\u0214"+ - "\3\2\2\2\u0214\u0215\3\2\2\2\u0215\u0217\7\60\2\2\u0216\u01e9\3\2\2\2"+ - "\u0216\u01f1\3\2\2\2\u0216\u0200\3\2\2\2\u0216\u0208\3\2\2\2\u0216\u020d"+ - "\3\2\2\2\u0216\u0211\3\2\2\2\u0217\63\3\2\2\2\u0218\u021a\5\\/\2\u0219"+ - "\u021b\5\66\34\2\u021a\u0219\3\2\2\2\u021a\u021b\3\2\2\2\u021b\65\3\2"+ - "\2\2\u021c\u021d\7\30\2\2\u021d\u0223\5\\/\2\u021e\u021f\7J\2\2\u021f"+ - "\u0220\5\\/\2\u0220\u0221\7Q\2\2\u0221\u0223\3\2\2\2\u0222\u021c\3\2\2"+ - "\2\u0222\u021e\3\2\2\2\u0223\67\3\2\2\2\u0224\u0225\b\35\1\2\u0225\u0229"+ - "\5:\36\2\u0226\u0227\t\n\2\2\u0227\u0229\58\35\6\u0228\u0224\3\2\2\2\u0228"+ - "\u0226\3\2\2\2\u0229\u0236\3\2\2\2\u022a\u022b\f\5\2\2\u022b\u022c\t\13"+ - "\2\2\u022c\u0235\58\35\6\u022d\u022e\f\4\2\2\u022e\u022f\t\n\2\2\u022f"+ - "\u0235\58\35\5\u0230\u0231\f\3\2\2\u0231\u0232\5J&\2\u0232\u0233\58\35"+ - "\4\u0233\u0235\3\2\2\2\u0234\u022a\3\2\2\2\u0234\u022d\3\2\2\2\u0234\u0230"+ - "\3\2\2\2\u0235\u0238\3\2\2\2\u0236\u0234\3\2\2\2\u0236\u0237\3\2\2\2\u0237"+ - "9\3\2\2\2\u0238\u0236\3\2\2\2\u0239\u024f\5<\37\2\u023a\u024f\5@!\2\u023b"+ - "\u024f\5H%\2\u023c\u024f\7Z\2\2\u023d\u023e\5P)\2\u023e\u023f\7^\2\2\u023f"+ - "\u0241\3\2\2\2\u0240\u023d\3\2\2\2\u0240\u0241\3\2\2\2\u0241\u0242\3\2"+ - "\2\2\u0242\u024f\7Z\2\2\u0243\u024f\5D#\2\u0244\u0245\7\3\2\2\u0245\u0246"+ - "\5\b\5\2\u0246\u0247\7\4\2\2\u0247\u024f\3\2\2\2\u0248\u024f\5R*\2\u0249"+ - "\u024f\5P)\2\u024a\u024b\7\3\2\2\u024b\u024c\5,\27\2\u024c\u024d\7\4\2"+ - "\2\u024d\u024f\3\2\2\2\u024e\u0239\3\2\2\2\u024e\u023a\3\2\2\2\u024e\u023b"+ - "\3\2\2\2\u024e\u023c\3\2\2\2\u024e\u0240\3\2\2\2\u024e\u0243\3\2\2\2\u024e"+ - "\u0244\3\2\2\2\u024e\u0248\3\2\2\2\u024e\u0249\3\2\2\2\u024e\u024a\3\2"+ - "\2\2\u024f;\3\2\2\2\u0250\u0256\5> \2\u0251\u0252\7K\2\2\u0252\u0253\5"+ - "> \2\u0253\u0254\7Q\2\2\u0254\u0256\3\2\2\2\u0255\u0250\3\2\2\2\u0255"+ - "\u0251\3\2\2\2\u0256=\3\2\2\2\u0257\u0258\7\20\2\2\u0258\u0259\7\3\2\2"+ - "\u0259\u025a\5,\27\2\u025a\u025b\7\f\2\2\u025b\u025c\5N(\2\u025c\u025d"+ - "\7\4\2\2\u025d?\3\2\2\2\u025e\u0264\5B\"\2\u025f\u0260\7K\2\2\u0260\u0261"+ - "\5B\"\2\u0261\u0262\7Q\2\2\u0262\u0264\3\2\2\2\u0263\u025e\3\2\2\2\u0263"+ - "\u025f\3\2\2\2\u0264A\3\2\2\2\u0265\u0266\7\34\2\2\u0266\u0267\7\3\2\2"+ - "\u0267\u0268\5R*\2\u0268\u0269\7\37\2\2\u0269\u026a\58\35\2\u026a\u026b"+ - "\7\4\2\2\u026bC\3\2\2\2\u026c\u0272\5F$\2\u026d\u026e\7K\2\2\u026e\u026f"+ - "\5F$\2\u026f\u0270\7Q\2\2\u0270\u0272\3\2\2\2\u0271\u026c\3\2\2\2\u0271"+ - "\u026d\3\2\2\2\u0272E\3\2\2\2\u0273\u0274\5R*\2\u0274\u0280\7\3\2\2\u0275"+ - "\u0277\5\36\20\2\u0276\u0275\3\2\2\2\u0276\u0277\3\2\2\2\u0277\u0278\3"+ - "\2\2\2\u0278\u027d\5,\27\2\u0279\u027a\7\5\2\2\u027a\u027c\5,\27\2\u027b"+ - "\u0279\3\2\2\2\u027c\u027f\3\2\2\2\u027d\u027b\3\2\2\2\u027d\u027e\3\2"+ - "\2\2\u027e\u0281\3\2\2\2\u027f\u027d\3\2\2\2\u0280\u0276\3\2\2\2\u0280"+ - "\u0281\3\2\2\2\u0281\u0282\3\2\2\2\u0282\u0283\7\4\2\2\u0283G\3\2\2\2"+ - "\u0284\u029e\7\60\2\2\u0285\u029e\5Z.\2\u0286\u029e\5L\'\2\u0287\u0289"+ - "\7`\2\2\u0288\u0287\3\2\2\2\u0289\u028a\3\2\2\2\u028a\u0288\3\2\2\2\u028a"+ - "\u028b\3\2\2\2\u028b\u029e\3\2\2\2\u028c\u029e\7_\2\2\u028d\u028e\7M\2"+ - "\2\u028e\u028f\5\\/\2\u028f\u0290\7Q\2\2\u0290\u029e\3\2\2\2\u0291\u0292"+ - "\7N\2\2\u0292\u0293\5\\/\2\u0293\u0294\7Q\2\2\u0294\u029e\3\2\2\2\u0295"+ - "\u0296\7O\2\2\u0296\u0297\5\\/\2\u0297\u0298\7Q\2\2\u0298\u029e\3\2\2"+ - "\2\u0299\u029a\7P\2\2\u029a\u029b\5\\/\2\u029b\u029c\7Q\2\2\u029c\u029e"+ - "\3\2\2\2\u029d\u0284\3\2\2\2\u029d\u0285\3\2\2\2\u029d\u0286\3\2\2\2\u029d"+ - "\u0288\3\2\2\2\u029d\u028c\3\2\2\2\u029d\u028d\3\2\2\2\u029d\u0291\3\2"+ - "\2\2\u029d\u0295\3\2\2\2\u029d\u0299\3\2\2\2\u029eI\3\2\2\2\u029f\u02a0"+ - "\t\f\2\2\u02a0K\3\2\2\2\u02a1\u02a2\t\r\2\2\u02a2M\3\2\2\2\u02a3\u02a4"+ - "\5R*\2\u02a4O\3\2\2\2\u02a5\u02a6\5R*\2\u02a6\u02a7\7^\2\2\u02a7\u02a9"+ - "\3\2\2\2\u02a8\u02a5\3\2\2\2\u02a9\u02ac\3\2\2\2\u02aa\u02a8\3\2\2\2\u02aa"+ - "\u02ab\3\2\2\2\u02ab\u02ad\3\2\2\2\u02ac\u02aa\3\2\2\2\u02ad\u02ae\5R"+ - "*\2\u02aeQ\3\2\2\2\u02af\u02b2\5V,\2\u02b0\u02b2\5X-\2\u02b1\u02af\3\2"+ - "\2\2\u02b1\u02b0\3\2\2\2\u02b2S\3\2\2\2\u02b3\u02b4\5R*\2\u02b4\u02b5"+ - "\7\6\2\2\u02b5\u02b7\3\2\2\2\u02b6\u02b3\3\2\2\2\u02b6\u02b7\3\2\2\2\u02b7"+ - "\u02b8\3\2\2\2\u02b8\u02c0\7e\2\2\u02b9\u02ba\5R*\2\u02ba\u02bb\7\6\2"+ - "\2\u02bb\u02bd\3\2\2\2\u02bc\u02b9\3\2\2\2\u02bc\u02bd\3\2\2\2\u02bd\u02be"+ - "\3\2\2\2\u02be\u02c0\5R*\2\u02bf\u02b6\3\2\2\2\u02bf\u02bc\3\2\2\2\u02c0"+ - "U\3\2\2\2\u02c1\u02c4\7f\2\2\u02c2\u02c4\7g\2\2\u02c3\u02c1\3\2\2\2\u02c3"+ - "\u02c2\3\2\2\2\u02c4W\3\2\2\2\u02c5\u02c9\7c\2\2\u02c6\u02c9\5^\60\2\u02c7"+ - "\u02c9\7d\2\2\u02c8\u02c5\3\2\2\2\u02c8\u02c6\3\2\2\2\u02c8\u02c7\3\2"+ - "\2\2\u02c9Y\3\2\2\2\u02ca\u02cd\7b\2\2\u02cb\u02cd\7a\2\2\u02cc\u02ca"+ - "\3\2\2\2\u02cc\u02cb\3\2\2\2\u02cd[\3\2\2\2\u02ce\u02cf\t\16\2\2\u02cf"+ - "]\3\2\2\2\u02d0\u02d1\t\17\2\2\u02d1_\3\2\2\2eoqu~\u0080\u0084\u008a\u008d"+ - "\u0098\u009b\u00a5\u00a8\u00ab\u00ae\u00b6\u00b9\u00bf\u00c3\u00c6\u00c9"+ - "\u00cc\u00d3\u00db\u00de\u00ea\u00ed\u00f0\u00f7\u00fe\u0102\u0106\u010d"+ - "\u0111\u0115\u011a\u011e\u0126\u012a\u0131\u013c\u013f\u0143\u014f\u0152"+ - "\u0158\u015f\u0166\u0169\u016d\u0171\u0175\u0177\u0182\u0187\u018b\u018e"+ - "\u0194\u0197\u019d\u01a0\u01a2\u01b5\u01c3\u01d1\u01d7\u01df\u01e1\u01e6"+ - "\u01e9\u01f1\u01fa\u0200\u0208\u020d\u0213\u0216\u021a\u0222\u0228\u0234"+ - "\u0236\u0240\u024e\u0255\u0263\u0271\u0276\u027d\u0280\u028a\u029d\u02aa"+ - "\u02b1\u02b6\u02bc\u02bf\u02c3\u02c8\u02cc"; + ",\t,\4-\t-\4.\t.\4/\t/\4\60\t\60\4\61\t\61\3\2\3\2\3\2\3\3\3\3\3\3\3\4"+ + "\3\4\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4r\n\4\f\4\16\4u\13\4\3\4\5\4x\n\4"+ + "\3\4\3\4\3\4\3\4\3\4\3\4\3\4\7\4\u0081\n\4\f\4\16\4\u0084\13\4\3\4\5\4"+ + "\u0087\n\4\3\4\3\4\3\4\3\4\5\4\u008d\n\4\3\4\5\4\u0090\n\4\3\4\3\4\3\4"+ + "\3\4\3\4\3\4\3\4\3\4\3\4\5\4\u009b\n\4\3\4\5\4\u009e\n\4\3\4\3\4\3\4\3"+ + "\4\3\4\3\4\3\4\3\4\5\4\u00a8\n\4\3\4\5\4\u00ab\n\4\3\4\5\4\u00ae\n\4\3"+ + "\4\5\4\u00b1\n\4\3\4\3\4\3\4\3\4\7\4\u00b7\n\4\f\4\16\4\u00ba\13\4\5\4"+ + "\u00bc\n\4\3\4\3\4\3\4\3\4\5\4\u00c2\n\4\3\4\3\4\5\4\u00c6\n\4\3\4\5\4"+ + "\u00c9\n\4\3\4\5\4\u00cc\n\4\3\4\5\4\u00cf\n\4\3\4\3\4\3\4\3\4\3\4\5\4"+ + "\u00d6\n\4\3\5\3\5\3\5\3\5\7\5\u00dc\n\5\f\5\16\5\u00df\13\5\5\5\u00e1"+ + "\n\5\3\5\3\5\3\6\3\6\3\6\3\6\3\6\3\6\7\6\u00eb\n\6\f\6\16\6\u00ee\13\6"+ + "\5\6\u00f0\n\6\3\6\5\6\u00f3\n\6\3\7\3\7\3\7\3\7\3\7\5\7\u00fa\n\7\3\b"+ + "\3\b\3\b\3\b\3\b\5\b\u0101\n\b\3\t\3\t\5\t\u0105\n\t\3\n\3\n\5\n\u0109"+ + "\n\n\3\n\3\n\3\n\7\n\u010e\n\n\f\n\16\n\u0111\13\n\3\n\5\n\u0114\n\n\3"+ + "\n\3\n\5\n\u0118\n\n\3\n\3\n\3\n\5\n\u011d\n\n\3\n\3\n\5\n\u0121\n\n\3"+ + "\13\3\13\3\13\3\13\7\13\u0127\n\13\f\13\16\13\u012a\13\13\3\f\5\f\u012d"+ + "\n\f\3\f\3\f\3\f\7\f\u0132\n\f\f\f\16\f\u0135\13\f\3\r\3\r\3\16\3\16\3"+ + "\16\3\16\7\16\u013d\n\16\f\16\16\16\u0140\13\16\5\16\u0142\n\16\3\16\3"+ + "\16\5\16\u0146\n\16\3\17\3\17\3\17\3\17\3\17\3\17\3\20\3\20\3\21\3\21"+ + "\5\21\u0152\n\21\3\21\5\21\u0155\n\21\3\22\3\22\7\22\u0159\n\22\f\22\16"+ + "\22\u015c\13\22\3\23\3\23\3\23\3\23\5\23\u0162\n\23\3\23\3\23\3\23\3\23"+ + "\3\23\5\23\u0169\n\23\3\24\5\24\u016c\n\24\3\24\3\24\5\24\u0170\n\24\3"+ + "\24\3\24\5\24\u0174\n\24\3\24\3\24\5\24\u0178\n\24\5\24\u017a\n\24\3\25"+ + "\3\25\3\25\3\25\3\25\3\25\3\25\7\25\u0183\n\25\f\25\16\25\u0186\13\25"+ + "\3\25\3\25\5\25\u018a\n\25\3\26\3\26\5\26\u018e\n\26\3\26\5\26\u0191\n"+ + "\26\3\26\3\26\3\26\3\26\5\26\u0197\n\26\3\26\5\26\u019a\n\26\3\26\3\26"+ + "\3\26\3\26\5\26\u01a0\n\26\3\26\5\26\u01a3\n\26\5\26\u01a5\n\26\3\27\3"+ + "\27\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\3\30\7"+ + "\30\u01b6\n\30\f\30\16\30\u01b9\13\30\3\30\3\30\3\30\3\30\3\30\3\30\3"+ + "\30\3\30\3\30\7\30\u01c4\n\30\f\30\16\30\u01c7\13\30\3\30\3\30\3\30\3"+ + "\30\3\30\3\30\3\30\3\30\3\30\7\30\u01d2\n\30\f\30\16\30\u01d5\13\30\3"+ + "\30\3\30\3\30\5\30\u01da\n\30\3\30\3\30\3\30\3\30\3\30\3\30\7\30\u01e2"+ + "\n\30\f\30\16\30\u01e5\13\30\3\31\3\31\5\31\u01e9\n\31\3\32\5\32\u01ec"+ + "\n\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u01f4\n\32\3\32\3\32\3\32\3\32"+ + "\3\32\7\32\u01fb\n\32\f\32\16\32\u01fe\13\32\3\32\3\32\3\32\5\32\u0203"+ + "\n\32\3\32\3\32\3\32\3\32\3\32\3\32\5\32\u020b\n\32\3\32\3\32\3\32\5\32"+ + "\u0210\n\32\3\32\3\32\3\32\3\32\5\32\u0216\n\32\3\32\5\32\u0219\n\32\3"+ + "\33\3\33\5\33\u021d\n\33\3\34\3\34\3\34\3\34\3\34\3\34\5\34\u0225\n\34"+ + "\3\35\3\35\3\35\3\35\5\35\u022b\n\35\3\35\3\35\3\35\3\35\3\35\3\35\3\35"+ + "\3\35\3\35\3\35\7\35\u0237\n\35\f\35\16\35\u023a\13\35\3\36\3\36\3\36"+ + "\3\36\3\36\3\36\3\36\5\36\u0243\n\36\3\36\3\36\3\36\3\36\3\36\3\36\3\36"+ + "\3\36\3\36\3\36\3\36\3\36\5\36\u0251\n\36\3\37\3\37\3\37\3\37\3\37\5\37"+ + "\u0258\n\37\3 \3 \3 \3 \3 \3 \3 \3!\3!\3!\3!\3!\5!\u0266\n!\3\"\3\"\3"+ + "\"\3\"\3\"\3\"\3\"\3#\3#\3#\3#\3#\5#\u0274\n#\3$\3$\3$\5$\u0279\n$\3$"+ + "\3$\3$\7$\u027e\n$\f$\16$\u0281\13$\5$\u0283\n$\3$\3$\3%\3%\3%\5%\u028a"+ + "\n%\3&\3&\3&\3&\6&\u0290\n&\r&\16&\u0291\3&\3&\3&\3&\3&\3&\3&\3&\3&\3"+ + "&\3&\3&\3&\3&\3&\3&\3&\5&\u02a5\n&\3\'\3\'\3(\3(\3)\3)\3*\3*\3*\7*\u02b0"+ + "\n*\f*\16*\u02b3\13*\3*\3*\3+\3+\5+\u02b9\n+\3,\3,\3,\5,\u02be\n,\3,\3"+ + ",\3,\3,\5,\u02c4\n,\3,\5,\u02c7\n,\3-\3-\5-\u02cb\n-\3.\3.\3.\5.\u02d0"+ + "\n.\3/\3/\5/\u02d4\n/\3\60\3\60\3\61\3\61\3\61\2\4.8\62\2\4\6\b\n\f\16"+ + "\20\22\24\26\30\32\34\36 \"$&(*,.\60\62\64\668:<>@BDFHJLNPRTVXZ\\^`\2"+ + "\20\b\2\7\7\t\t\31\31,,\62\62\66\66\4\2\"\"BB\4\2\t\t\62\62\4\2\37\37"+ + "%%\3\2\25\26\4\2\7\7aa\4\2\r\r\25\25\4\2\7\7\27\27\3\2XY\3\2Z\\\3\2RW"+ + "\4\2\35\35CC\3\2_`\20\2\b\t\22\24\31\31\33\33\36\36!\",,\62\62\668:<>"+ + "?ABDEGG\u0336\2b\3\2\2\2\4e\3\2\2\2\6\u00d5\3\2\2\2\b\u00e0\3\2\2\2\n"+ + "\u00e4\3\2\2\2\f\u00f9\3\2\2\2\16\u0100\3\2\2\2\20\u0102\3\2\2\2\22\u0106"+ + "\3\2\2\2\24\u0122\3\2\2\2\26\u012c\3\2\2\2\30\u0136\3\2\2\2\32\u0145\3"+ + "\2\2\2\34\u0147\3\2\2\2\36\u014d\3\2\2\2 \u014f\3\2\2\2\"\u0156\3\2\2"+ + "\2$\u0168\3\2\2\2&\u0179\3\2\2\2(\u0189\3\2\2\2*\u01a4\3\2\2\2,\u01a6"+ + "\3\2\2\2.\u01d9\3\2\2\2\60\u01e6\3\2\2\2\62\u0218\3\2\2\2\64\u021a\3\2"+ + "\2\2\66\u0224\3\2\2\28\u022a\3\2\2\2:\u0250\3\2\2\2<\u0257\3\2\2\2>\u0259"+ + "\3\2\2\2@\u0265\3\2\2\2B\u0267\3\2\2\2D\u0273\3\2\2\2F\u0275\3\2\2\2H"+ + "\u0289\3\2\2\2J\u02a4\3\2\2\2L\u02a6\3\2\2\2N\u02a8\3\2\2\2P\u02aa\3\2"+ + "\2\2R\u02b1\3\2\2\2T\u02b8\3\2\2\2V\u02c6\3\2\2\2X\u02ca\3\2\2\2Z\u02cf"+ + "\3\2\2\2\\\u02d3\3\2\2\2^\u02d5\3\2\2\2`\u02d7\3\2\2\2bc\5\6\4\2cd\7\2"+ + "\2\3d\3\3\2\2\2ef\5,\27\2fg\7\2\2\3g\5\3\2\2\2h\u00d6\5\b\5\2iw\7\33\2"+ + "\2js\7\3\2\2kl\78\2\2lr\t\2\2\2mn\7\36\2\2nr\t\3\2\2op\7G\2\2pr\5N(\2"+ + "qk\3\2\2\2qm\3\2\2\2qo\3\2\2\2ru\3\2\2\2sq\3\2\2\2st\3\2\2\2tv\3\2\2\2"+ + "us\3\2\2\2vx\7\4\2\2wj\3\2\2\2wx\3\2\2\2xy\3\2\2\2y\u00d6\5\6\4\2z\u0086"+ + "\7\24\2\2{\u0082\7\3\2\2|}\78\2\2}\u0081\t\4\2\2~\177\7\36\2\2\177\u0081"+ + "\t\3\2\2\u0080|\3\2\2\2\u0080~\3\2\2\2\u0081\u0084\3\2\2\2\u0082\u0080"+ + "\3\2\2\2\u0082\u0083\3\2\2\2\u0083\u0085\3\2\2\2\u0084\u0082\3\2\2\2\u0085"+ + "\u0087\7\4\2\2\u0086{\3\2\2\2\u0086\u0087\3\2\2\2\u0087\u0088\3\2\2\2"+ + "\u0088\u00d6\5\6\4\2\u0089\u008a\7>\2\2\u008a\u008f\7A\2\2\u008b\u008d"+ + "\7*\2\2\u008c\u008b\3\2\2\2\u008c\u008d\3\2\2\2\u008d\u008e\3\2\2\2\u008e"+ + "\u0090\5\64\33\2\u008f\u008c\3\2\2\2\u008f\u0090\3\2\2\2\u0090\u00d6\3"+ + "\2\2\2\u0091\u0092\7>\2\2\u0092\u0093\7\23\2\2\u0093\u0094\t\5\2\2\u0094"+ + "\u00d6\5V,\2\u0095\u0096\t\6\2\2\u0096\u00d6\5V,\2\u0097\u0098\7>\2\2"+ + "\u0098\u009d\7!\2\2\u0099\u009b\7*\2\2\u009a\u0099\3\2\2\2\u009a\u009b"+ + "\3\2\2\2\u009b\u009c\3\2\2\2\u009c\u009e\5\64\33\2\u009d\u009a\3\2\2\2"+ + "\u009d\u009e\3\2\2\2\u009e\u00d6\3\2\2\2\u009f\u00a0\7>\2\2\u00a0\u00d6"+ + "\7<\2\2\u00a1\u00a2\7?\2\2\u00a2\u00d6\7\22\2\2\u00a3\u00a4\7?\2\2\u00a4"+ + "\u00aa\7A\2\2\u00a5\u00a7\7\21\2\2\u00a6\u00a8\7*\2\2\u00a7\u00a6\3\2"+ + "\2\2\u00a7\u00a8\3\2\2\2\u00a8\u00a9\3\2\2\2\u00a9\u00ab\5\64\33\2\u00aa"+ + "\u00a5\3\2\2\2\u00aa\u00ab\3\2\2\2\u00ab\u00b0\3\2\2\2\u00ac\u00ae\7*"+ + "\2\2\u00ad\u00ac\3\2\2\2\u00ad\u00ae\3\2\2\2\u00ae\u00af\3\2\2\2\u00af"+ + "\u00b1\5\64\33\2\u00b0\u00ad\3\2\2\2\u00b0\u00b1\3\2\2\2\u00b1\u00bb\3"+ + "\2\2\2\u00b2\u00b3\7D\2\2\u00b3\u00b8\5^\60\2\u00b4\u00b5\7\5\2\2\u00b5"+ + "\u00b7\5^\60\2\u00b6\u00b4\3\2\2\2\u00b7\u00ba\3\2\2\2\u00b8\u00b6\3\2"+ + "\2\2\u00b8\u00b9\3\2\2\2\u00b9\u00bc\3\2\2\2\u00ba\u00b8\3\2\2\2\u00bb"+ + "\u00b2\3\2\2\2\u00bb\u00bc\3\2\2\2\u00bc\u00d6\3\2\2\2\u00bd\u00be\7?"+ + "\2\2\u00be\u00c1\7\23\2\2\u00bf\u00c0\7\21\2\2\u00c0\u00c2\5^\60\2\u00c1"+ + "\u00bf\3\2\2\2\u00c1\u00c2\3\2\2\2\u00c2\u00c8\3\2\2\2\u00c3\u00c5\7@"+ + "\2\2\u00c4\u00c6\7*\2\2\u00c5\u00c4\3\2\2\2\u00c5\u00c6\3\2\2\2\u00c6"+ + "\u00c7\3\2\2\2\u00c7\u00c9\5\64\33\2\u00c8\u00c3\3\2\2\2\u00c8\u00c9\3"+ + "\2\2\2\u00c9\u00ce\3\2\2\2\u00ca\u00cc\7*\2\2\u00cb\u00ca\3\2\2\2\u00cb"+ + "\u00cc\3\2\2\2\u00cc\u00cd\3\2\2\2\u00cd\u00cf\5\64\33\2\u00ce\u00cb\3"+ + "\2\2\2\u00ce\u00cf\3\2\2\2\u00cf\u00d6\3\2\2\2\u00d0\u00d1\7?\2\2\u00d1"+ + "\u00d6\7E\2\2\u00d2\u00d3\7?\2\2\u00d3\u00d4\7@\2\2\u00d4\u00d6\7E\2\2"+ + "\u00d5h\3\2\2\2\u00d5i\3\2\2\2\u00d5z\3\2\2\2\u00d5\u0089\3\2\2\2\u00d5"+ + "\u0091\3\2\2\2\u00d5\u0095\3\2\2\2\u00d5\u0097\3\2\2\2\u00d5\u009f\3\2"+ + "\2\2\u00d5\u00a1\3\2\2\2\u00d5\u00a3\3\2\2\2\u00d5\u00bd\3\2\2\2\u00d5"+ + "\u00d0\3\2\2\2\u00d5\u00d2\3\2\2\2\u00d6\7\3\2\2\2\u00d7\u00d8\7I\2\2"+ + "\u00d8\u00dd\5\34\17\2\u00d9\u00da\7\5\2\2\u00da\u00dc\5\34\17\2\u00db"+ + "\u00d9\3\2\2\2\u00dc\u00df\3\2\2\2\u00dd\u00db\3\2\2\2\u00dd\u00de\3\2"+ + "\2\2\u00de\u00e1\3\2\2\2\u00df\u00dd\3\2\2\2\u00e0\u00d7\3\2\2\2\u00e0"+ + "\u00e1\3\2\2\2\u00e1\u00e2\3\2\2\2\u00e2\u00e3\5\n\6\2\u00e3\t\3\2\2\2"+ + "\u00e4\u00ef\5\16\b\2\u00e5\u00e6\7\64\2\2\u00e6\u00e7\7\17\2\2\u00e7"+ + "\u00ec\5\20\t\2\u00e8\u00e9\7\5\2\2\u00e9\u00eb\5\20\t\2\u00ea\u00e8\3"+ + "\2\2\2\u00eb\u00ee\3\2\2\2\u00ec\u00ea\3\2\2\2\u00ec\u00ed\3\2\2\2\u00ed"+ + "\u00f0\3\2\2\2\u00ee\u00ec\3\2\2\2\u00ef\u00e5\3\2\2\2\u00ef\u00f0\3\2"+ + "\2\2\u00f0\u00f2\3\2\2\2\u00f1\u00f3\5\f\7\2\u00f2\u00f1\3\2\2\2\u00f2"+ + "\u00f3\3\2\2\2\u00f3\13\3\2\2\2\u00f4\u00f5\7+\2\2\u00f5\u00fa\t\7\2\2"+ + "\u00f6\u00f7\7L\2\2\u00f7\u00f8\t\7\2\2\u00f8\u00fa\7Q\2\2\u00f9\u00f4"+ + "\3\2\2\2\u00f9\u00f6\3\2\2\2\u00fa\r\3\2\2\2\u00fb\u0101\5\22\n\2\u00fc"+ + "\u00fd\7\3\2\2\u00fd\u00fe\5\n\6\2\u00fe\u00ff\7\4\2\2\u00ff\u0101\3\2"+ + "\2\2\u0100\u00fb\3\2\2\2\u0100\u00fc\3\2\2\2\u0101\17\3\2\2\2\u0102\u0104"+ + "\5,\27\2\u0103\u0105\t\b\2\2\u0104\u0103\3\2\2\2\u0104\u0105\3\2\2\2\u0105"+ + "\21\3\2\2\2\u0106\u0108\7=\2\2\u0107\u0109\5\36\20\2\u0108\u0107\3\2\2"+ + "\2\u0108\u0109\3\2\2\2\u0109\u010a\3\2\2\2\u010a\u010f\5 \21\2\u010b\u010c"+ + "\7\5\2\2\u010c\u010e\5 \21\2\u010d\u010b\3\2\2\2\u010e\u0111\3\2\2\2\u010f"+ + "\u010d\3\2\2\2\u010f\u0110\3\2\2\2\u0110\u0113\3\2\2\2\u0111\u010f\3\2"+ + "\2\2\u0112\u0114\5\24\13\2\u0113\u0112\3\2\2\2\u0113\u0114\3\2\2\2\u0114"+ + "\u0117\3\2\2\2\u0115\u0116\7H\2\2\u0116\u0118\5.\30\2\u0117\u0115\3\2"+ + "\2\2\u0117\u0118\3\2\2\2\u0118\u011c\3\2\2\2\u0119\u011a\7#\2\2\u011a"+ + "\u011b\7\17\2\2\u011b\u011d\5\26\f\2\u011c\u0119\3\2\2\2\u011c\u011d\3"+ + "\2\2\2\u011d\u0120\3\2\2\2\u011e\u011f\7$\2\2\u011f\u0121\5.\30\2\u0120"+ + "\u011e\3\2\2\2\u0120\u0121\3\2\2\2\u0121\23\3\2\2\2\u0122\u0123\7\37\2"+ + "\2\u0123\u0128\5\"\22\2\u0124\u0125\7\5\2\2\u0125\u0127\5\"\22\2\u0126"+ + "\u0124\3\2\2\2\u0127\u012a\3\2\2\2\u0128\u0126\3\2\2\2\u0128\u0129\3\2"+ + "\2\2\u0129\25\3\2\2\2\u012a\u0128\3\2\2\2\u012b\u012d\5\36\20\2\u012c"+ + "\u012b\3\2\2\2\u012c\u012d\3\2\2\2\u012d\u012e\3\2\2\2\u012e\u0133\5\30"+ + "\r\2\u012f\u0130\7\5\2\2\u0130\u0132\5\30\r\2\u0131\u012f\3\2\2\2\u0132"+ + "\u0135\3\2\2\2\u0133\u0131\3\2\2\2\u0133\u0134\3\2\2\2\u0134\27\3\2\2"+ + "\2\u0135\u0133\3\2\2\2\u0136\u0137\5\32\16\2\u0137\31\3\2\2\2\u0138\u0141"+ + "\7\3\2\2\u0139\u013e\5,\27\2\u013a\u013b\7\5\2\2\u013b\u013d\5,\27\2\u013c"+ + "\u013a\3\2\2\2\u013d\u0140\3\2\2\2\u013e\u013c\3\2\2\2\u013e\u013f\3\2"+ + "\2\2\u013f\u0142\3\2\2\2\u0140\u013e\3\2\2\2\u0141\u0139\3\2\2\2\u0141"+ + "\u0142\3\2\2\2\u0142\u0143\3\2\2\2\u0143\u0146\7\4\2\2\u0144\u0146\5,"+ + "\27\2\u0145\u0138\3\2\2\2\u0145\u0144\3\2\2\2\u0146\33\3\2\2\2\u0147\u0148"+ + "\5T+\2\u0148\u0149\7\f\2\2\u0149\u014a\7\3\2\2\u014a\u014b\5\n\6\2\u014b"+ + "\u014c\7\4\2\2\u014c\35\3\2\2\2\u014d\u014e\t\t\2\2\u014e\37\3\2\2\2\u014f"+ + "\u0154\5,\27\2\u0150\u0152\7\f\2\2\u0151\u0150\3\2\2\2\u0151\u0152\3\2"+ + "\2\2\u0152\u0153\3\2\2\2\u0153\u0155\5T+\2\u0154\u0151\3\2\2\2\u0154\u0155"+ + "\3\2\2\2\u0155!\3\2\2\2\u0156\u015a\5*\26\2\u0157\u0159\5$\23\2\u0158"+ + "\u0157\3\2\2\2\u0159\u015c\3\2\2\2\u015a\u0158\3\2\2\2\u015a\u015b\3\2"+ + "\2\2\u015b#\3\2\2\2\u015c\u015a\3\2\2\2\u015d\u015e\5&\24\2\u015e\u015f"+ + "\7(\2\2\u015f\u0161\5*\26\2\u0160\u0162\5(\25\2\u0161\u0160\3\2\2\2\u0161"+ + "\u0162\3\2\2\2\u0162\u0169\3\2\2\2\u0163\u0164\7.\2\2\u0164\u0165\5&\24"+ + "\2\u0165\u0166\7(\2\2\u0166\u0167\5*\26\2\u0167\u0169\3\2\2\2\u0168\u015d"+ + "\3\2\2\2\u0168\u0163\3\2\2\2\u0169%\3\2\2\2\u016a\u016c\7&\2\2\u016b\u016a"+ + "\3\2\2\2\u016b\u016c\3\2\2\2\u016c\u017a\3\2\2\2\u016d\u016f\7)\2\2\u016e"+ + "\u0170\7\65\2\2\u016f\u016e\3\2\2\2\u016f\u0170\3\2\2\2\u0170\u017a\3"+ + "\2\2\2\u0171\u0173\79\2\2\u0172\u0174\7\65\2\2\u0173\u0172\3\2\2\2\u0173"+ + "\u0174\3\2\2\2\u0174\u017a\3\2\2\2\u0175\u0177\7 \2\2\u0176\u0178\7\65"+ + "\2\2\u0177\u0176\3\2\2\2\u0177\u0178\3\2\2\2\u0178\u017a\3\2\2\2\u0179"+ + "\u016b\3\2\2\2\u0179\u016d\3\2\2\2\u0179\u0171\3\2\2\2\u0179\u0175\3\2"+ + "\2\2\u017a\'\3\2\2\2\u017b\u017c\7\61\2\2\u017c\u018a\5.\30\2\u017d\u017e"+ + "\7F\2\2\u017e\u017f\7\3\2\2\u017f\u0184\5T+\2\u0180\u0181\7\5\2\2\u0181"+ + "\u0183\5T+\2\u0182\u0180\3\2\2\2\u0183\u0186\3\2\2\2\u0184\u0182\3\2\2"+ + "\2\u0184\u0185\3\2\2\2\u0185\u0187\3\2\2\2\u0186\u0184\3\2\2\2\u0187\u0188"+ + "\7\4\2\2\u0188\u018a\3\2\2\2\u0189\u017b\3\2\2\2\u0189\u017d\3\2\2\2\u018a"+ + ")\3\2\2\2\u018b\u0190\5V,\2\u018c\u018e\7\f\2\2\u018d\u018c\3\2\2\2\u018d"+ + "\u018e\3\2\2\2\u018e\u018f\3\2\2\2\u018f\u0191\5R*\2\u0190\u018d\3\2\2"+ + "\2\u0190\u0191\3\2\2\2\u0191\u01a5\3\2\2\2\u0192\u0193\7\3\2\2\u0193\u0194"+ + "\5\n\6\2\u0194\u0199\7\4\2\2\u0195\u0197\7\f\2\2\u0196\u0195\3\2\2\2\u0196"+ + "\u0197\3\2\2\2\u0197\u0198\3\2\2\2\u0198\u019a\5R*\2\u0199\u0196\3\2\2"+ + "\2\u0199\u019a\3\2\2\2\u019a\u01a5\3\2\2\2\u019b\u019c\7\3\2\2\u019c\u019d"+ + "\5\"\22\2\u019d\u01a2\7\4\2\2\u019e\u01a0\7\f\2\2\u019f\u019e\3\2\2\2"+ + "\u019f\u01a0\3\2\2\2\u01a0\u01a1\3\2\2\2\u01a1\u01a3\5R*\2\u01a2\u019f"+ + "\3\2\2\2\u01a2\u01a3\3\2\2\2\u01a3\u01a5\3\2\2\2\u01a4\u018b\3\2\2\2\u01a4"+ + "\u0192\3\2\2\2\u01a4\u019b\3\2\2\2\u01a5+\3\2\2\2\u01a6\u01a7\5.\30\2"+ + "\u01a7-\3\2\2\2\u01a8\u01a9\b\30\1\2\u01a9\u01aa\7/\2\2\u01aa\u01da\5"+ + ".\30\n\u01ab\u01ac\7\32\2\2\u01ac\u01ad\7\3\2\2\u01ad\u01ae\5\b\5\2\u01ae"+ + "\u01af\7\4\2\2\u01af\u01da\3\2\2\2\u01b0\u01b1\7;\2\2\u01b1\u01b2\7\3"+ + "\2\2\u01b2\u01b7\5^\60\2\u01b3\u01b4\7\5\2\2\u01b4\u01b6\5^\60\2\u01b5"+ + "\u01b3\3\2\2\2\u01b6\u01b9\3\2\2\2\u01b7\u01b5\3\2\2\2\u01b7\u01b8\3\2"+ + "\2\2\u01b8\u01ba\3\2\2\2\u01b9\u01b7\3\2\2\2\u01ba\u01bb\7\4\2\2\u01bb"+ + "\u01da\3\2\2\2\u01bc\u01bd\7-\2\2\u01bd\u01be\7\3\2\2\u01be\u01bf\5R*"+ + "\2\u01bf\u01c0\7\5\2\2\u01c0\u01c5\5^\60\2\u01c1\u01c2\7\5\2\2\u01c2\u01c4"+ + "\5^\60\2\u01c3\u01c1\3\2\2\2\u01c4\u01c7\3\2\2\2\u01c5\u01c3\3\2\2\2\u01c5"+ + "\u01c6\3\2\2\2\u01c6\u01c8\3\2\2\2\u01c7\u01c5\3\2\2\2\u01c8\u01c9\7\4"+ + "\2\2\u01c9\u01da\3\2\2\2\u01ca\u01cb\7-\2\2\u01cb\u01cc\7\3\2\2\u01cc"+ + "\u01cd\5^\60\2\u01cd\u01ce\7\5\2\2\u01ce\u01d3\5^\60\2\u01cf\u01d0\7\5"+ + "\2\2\u01d0\u01d2\5^\60\2\u01d1\u01cf\3\2\2\2\u01d2\u01d5\3\2\2\2\u01d3"+ + "\u01d1\3\2\2\2\u01d3\u01d4\3\2\2\2\u01d4\u01d6\3\2\2\2\u01d5\u01d3\3\2"+ + "\2\2\u01d6\u01d7\7\4\2\2\u01d7\u01da\3\2\2\2\u01d8\u01da\5\60\31\2\u01d9"+ + "\u01a8\3\2\2\2\u01d9\u01ab\3\2\2\2\u01d9\u01b0\3\2\2\2\u01d9\u01bc\3\2"+ + "\2\2\u01d9\u01ca\3\2\2\2\u01d9\u01d8\3\2\2\2\u01da\u01e3\3\2\2\2\u01db"+ + "\u01dc\f\4\2\2\u01dc\u01dd\7\n\2\2\u01dd\u01e2\5.\30\5\u01de\u01df\f\3"+ + "\2\2\u01df\u01e0\7\63\2\2\u01e0\u01e2\5.\30\4\u01e1\u01db\3\2\2\2\u01e1"+ + "\u01de\3\2\2\2\u01e2\u01e5\3\2\2\2\u01e3\u01e1\3\2\2\2\u01e3\u01e4\3\2"+ + "\2\2\u01e4/\3\2\2\2\u01e5\u01e3\3\2\2\2\u01e6\u01e8\58\35\2\u01e7\u01e9"+ + "\5\62\32\2\u01e8\u01e7\3\2\2\2\u01e8\u01e9\3\2\2\2\u01e9\61\3\2\2\2\u01ea"+ + "\u01ec\7/\2\2\u01eb\u01ea\3\2\2\2\u01eb\u01ec\3\2\2\2\u01ec\u01ed\3\2"+ + "\2\2\u01ed\u01ee\7\16\2\2\u01ee\u01ef\58\35\2\u01ef\u01f0\7\n\2\2\u01f0"+ + "\u01f1\58\35\2\u01f1\u0219\3\2\2\2\u01f2\u01f4\7/\2\2\u01f3\u01f2\3\2"+ + "\2\2\u01f3\u01f4\3\2\2\2\u01f4\u01f5\3\2\2\2\u01f5\u01f6\7%\2\2\u01f6"+ + "\u01f7\7\3\2\2\u01f7\u01fc\5,\27\2\u01f8\u01f9\7\5\2\2\u01f9\u01fb\5,"+ + "\27\2\u01fa\u01f8\3\2\2\2\u01fb\u01fe\3\2\2\2\u01fc\u01fa\3\2\2\2\u01fc"+ + "\u01fd\3\2\2\2\u01fd\u01ff\3\2\2\2\u01fe\u01fc\3\2\2\2\u01ff\u0200\7\4"+ + "\2\2\u0200\u0219\3\2\2\2\u0201\u0203\7/\2\2\u0202\u0201\3\2\2\2\u0202"+ + "\u0203\3\2\2\2\u0203\u0204\3\2\2\2\u0204\u0205\7%\2\2\u0205\u0206\7\3"+ + "\2\2\u0206\u0207\5\b\5\2\u0207\u0208\7\4\2\2\u0208\u0219\3\2\2\2\u0209"+ + "\u020b\7/\2\2\u020a\u0209\3\2\2\2\u020a\u020b\3\2\2\2\u020b\u020c\3\2"+ + "\2\2\u020c\u020d\7*\2\2\u020d\u0219\5\64\33\2\u020e\u0210\7/\2\2\u020f"+ + "\u020e\3\2\2\2\u020f\u0210\3\2\2\2\u0210\u0211\3\2\2\2\u0211\u0212\7:"+ + "\2\2\u0212\u0219\5^\60\2\u0213\u0215\7\'\2\2\u0214\u0216\7/\2\2\u0215"+ + "\u0214\3\2\2\2\u0215\u0216\3\2\2\2\u0216\u0217\3\2\2\2\u0217\u0219\7\60"+ + "\2\2\u0218\u01eb\3\2\2\2\u0218\u01f3\3\2\2\2\u0218\u0202\3\2\2\2\u0218"+ + "\u020a\3\2\2\2\u0218\u020f\3\2\2\2\u0218\u0213\3\2\2\2\u0219\63\3\2\2"+ + "\2\u021a\u021c\5^\60\2\u021b\u021d\5\66\34\2\u021c\u021b\3\2\2\2\u021c"+ + "\u021d\3\2\2\2\u021d\65\3\2\2\2\u021e\u021f\7\30\2\2\u021f\u0225\5^\60"+ + "\2\u0220\u0221\7J\2\2\u0221\u0222\5^\60\2\u0222\u0223\7Q\2\2\u0223\u0225"+ + "\3\2\2\2\u0224\u021e\3\2\2\2\u0224\u0220\3\2\2\2\u0225\67\3\2\2\2\u0226"+ + "\u0227\b\35\1\2\u0227\u022b\5:\36\2\u0228\u0229\t\n\2\2\u0229\u022b\5"+ + "8\35\6\u022a\u0226\3\2\2\2\u022a\u0228\3\2\2\2\u022b\u0238\3\2\2\2\u022c"+ + "\u022d\f\5\2\2\u022d\u022e\t\13\2\2\u022e\u0237\58\35\6\u022f\u0230\f"+ + "\4\2\2\u0230\u0231\t\n\2\2\u0231\u0237\58\35\5\u0232\u0233\f\3\2\2\u0233"+ + "\u0234\5L\'\2\u0234\u0235\58\35\4\u0235\u0237\3\2\2\2\u0236\u022c\3\2"+ + "\2\2\u0236\u022f\3\2\2\2\u0236\u0232\3\2\2\2\u0237\u023a\3\2\2\2\u0238"+ + "\u0236\3\2\2\2\u0238\u0239\3\2\2\2\u02399\3\2\2\2\u023a\u0238\3\2\2\2"+ + "\u023b\u0251\5<\37\2\u023c\u0251\5@!\2\u023d\u0251\5J&\2\u023e\u0251\7"+ + "Z\2\2\u023f\u0240\5R*\2\u0240\u0241\7^\2\2\u0241\u0243\3\2\2\2\u0242\u023f"+ + "\3\2\2\2\u0242\u0243\3\2\2\2\u0243\u0244\3\2\2\2\u0244\u0251\7Z\2\2\u0245"+ + "\u0251\5D#\2\u0246\u0247\7\3\2\2\u0247\u0248\5\b\5\2\u0248\u0249\7\4\2"+ + "\2\u0249\u0251\3\2\2\2\u024a\u0251\5T+\2\u024b\u0251\5R*\2\u024c\u024d"+ + "\7\3\2\2\u024d\u024e\5,\27\2\u024e\u024f\7\4\2\2\u024f\u0251\3\2\2\2\u0250"+ + "\u023b\3\2\2\2\u0250\u023c\3\2\2\2\u0250\u023d\3\2\2\2\u0250\u023e\3\2"+ + "\2\2\u0250\u0242\3\2\2\2\u0250\u0245\3\2\2\2\u0250\u0246\3\2\2\2\u0250"+ + "\u024a\3\2\2\2\u0250\u024b\3\2\2\2\u0250\u024c\3\2\2\2\u0251;\3\2\2\2"+ + "\u0252\u0258\5> \2\u0253\u0254\7K\2\2\u0254\u0255\5> \2\u0255\u0256\7"+ + "Q\2\2\u0256\u0258\3\2\2\2\u0257\u0252\3\2\2\2\u0257\u0253\3\2\2\2\u0258"+ + "=\3\2\2\2\u0259\u025a\7\20\2\2\u025a\u025b\7\3\2\2\u025b\u025c\5,\27\2"+ + "\u025c\u025d\7\f\2\2\u025d\u025e\5P)\2\u025e\u025f\7\4\2\2\u025f?\3\2"+ + "\2\2\u0260\u0266\5B\"\2\u0261\u0262\7K\2\2\u0262\u0263\5B\"\2\u0263\u0264"+ + "\7Q\2\2\u0264\u0266\3\2\2\2\u0265\u0260\3\2\2\2\u0265\u0261\3\2\2\2\u0266"+ + "A\3\2\2\2\u0267\u0268\7\34\2\2\u0268\u0269\7\3\2\2\u0269\u026a\5T+\2\u026a"+ + "\u026b\7\37\2\2\u026b\u026c\58\35\2\u026c\u026d\7\4\2\2\u026dC\3\2\2\2"+ + "\u026e\u0274\5F$\2\u026f\u0270\7K\2\2\u0270\u0271\5F$\2\u0271\u0272\7"+ + "Q\2\2\u0272\u0274\3\2\2\2\u0273\u026e\3\2\2\2\u0273\u026f\3\2\2\2\u0274"+ + "E\3\2\2\2\u0275\u0276\5H%\2\u0276\u0282\7\3\2\2\u0277\u0279\5\36\20\2"+ + "\u0278\u0277\3\2\2\2\u0278\u0279\3\2\2\2\u0279\u027a\3\2\2\2\u027a\u027f"+ + "\5,\27\2\u027b\u027c\7\5\2\2\u027c\u027e\5,\27\2\u027d\u027b\3\2\2\2\u027e"+ + "\u0281\3\2\2\2\u027f\u027d\3\2\2\2\u027f\u0280\3\2\2\2\u0280\u0283\3\2"+ + "\2\2\u0281\u027f\3\2\2\2\u0282\u0278\3\2\2\2\u0282\u0283\3\2\2\2\u0283"+ + "\u0284\3\2\2\2\u0284\u0285\7\4\2\2\u0285G\3\2\2\2\u0286\u028a\7)\2\2\u0287"+ + "\u028a\79\2\2\u0288\u028a\5T+\2\u0289\u0286\3\2\2\2\u0289\u0287\3\2\2"+ + "\2\u0289\u0288\3\2\2\2\u028aI\3\2\2\2\u028b\u02a5\7\60\2\2\u028c\u02a5"+ + "\5\\/\2\u028d\u02a5\5N(\2\u028e\u0290\7`\2\2\u028f\u028e\3\2\2\2\u0290"+ + "\u0291\3\2\2\2\u0291\u028f\3\2\2\2\u0291\u0292\3\2\2\2\u0292\u02a5\3\2"+ + "\2\2\u0293\u02a5\7_\2\2\u0294\u0295\7M\2\2\u0295\u0296\5^\60\2\u0296\u0297"+ + "\7Q\2\2\u0297\u02a5\3\2\2\2\u0298\u0299\7N\2\2\u0299\u029a\5^\60\2\u029a"+ + "\u029b\7Q\2\2\u029b\u02a5\3\2\2\2\u029c\u029d\7O\2\2\u029d\u029e\5^\60"+ + "\2\u029e\u029f\7Q\2\2\u029f\u02a5\3\2\2\2\u02a0\u02a1\7P\2\2\u02a1\u02a2"+ + "\5^\60\2\u02a2\u02a3\7Q\2\2\u02a3\u02a5\3\2\2\2\u02a4\u028b\3\2\2\2\u02a4"+ + "\u028c\3\2\2\2\u02a4\u028d\3\2\2\2\u02a4\u028f\3\2\2\2\u02a4\u0293\3\2"+ + "\2\2\u02a4\u0294\3\2\2\2\u02a4\u0298\3\2\2\2\u02a4\u029c\3\2\2\2\u02a4"+ + "\u02a0\3\2\2\2\u02a5K\3\2\2\2\u02a6\u02a7\t\f\2\2\u02a7M\3\2\2\2\u02a8"+ + "\u02a9\t\r\2\2\u02a9O\3\2\2\2\u02aa\u02ab\5T+\2\u02abQ\3\2\2\2\u02ac\u02ad"+ + "\5T+\2\u02ad\u02ae\7^\2\2\u02ae\u02b0\3\2\2\2\u02af\u02ac\3\2\2\2\u02b0"+ + "\u02b3\3\2\2\2\u02b1\u02af\3\2\2\2\u02b1\u02b2\3\2\2\2\u02b2\u02b4\3\2"+ + "\2\2\u02b3\u02b1\3\2\2\2\u02b4\u02b5\5T+\2\u02b5S\3\2\2\2\u02b6\u02b9"+ + "\5X-\2\u02b7\u02b9\5Z.\2\u02b8\u02b6\3\2\2\2\u02b8\u02b7\3\2\2\2\u02b9"+ + "U\3\2\2\2\u02ba\u02bb\5T+\2\u02bb\u02bc\7\6\2\2\u02bc\u02be\3\2\2\2\u02bd"+ + "\u02ba\3\2\2\2\u02bd\u02be\3\2\2\2\u02be\u02bf\3\2\2\2\u02bf\u02c7\7e"+ + "\2\2\u02c0\u02c1\5T+\2\u02c1\u02c2\7\6\2\2\u02c2\u02c4\3\2\2\2\u02c3\u02c0"+ + "\3\2\2\2\u02c3\u02c4\3\2\2\2\u02c4\u02c5\3\2\2\2\u02c5\u02c7\5T+\2\u02c6"+ + "\u02bd\3\2\2\2\u02c6\u02c3\3\2\2\2\u02c7W\3\2\2\2\u02c8\u02cb\7f\2\2\u02c9"+ + "\u02cb\7g\2\2\u02ca\u02c8\3\2\2\2\u02ca\u02c9\3\2\2\2\u02cbY\3\2\2\2\u02cc"+ + "\u02d0\7c\2\2\u02cd\u02d0\5`\61\2\u02ce\u02d0\7d\2\2\u02cf\u02cc\3\2\2"+ + "\2\u02cf\u02cd\3\2\2\2\u02cf\u02ce\3\2\2\2\u02d0[\3\2\2\2\u02d1\u02d4"+ + "\7b\2\2\u02d2\u02d4\7a\2\2\u02d3\u02d1\3\2\2\2\u02d3\u02d2\3\2\2\2\u02d4"+ + "]\3\2\2\2\u02d5\u02d6\t\16\2\2\u02d6_\3\2\2\2\u02d7\u02d8\t\17\2\2\u02d8"+ + "a\3\2\2\2fqsw\u0080\u0082\u0086\u008c\u008f\u009a\u009d\u00a7\u00aa\u00ad"+ + "\u00b0\u00b8\u00bb\u00c1\u00c5\u00c8\u00cb\u00ce\u00d5\u00dd\u00e0\u00ec"+ + "\u00ef\u00f2\u00f9\u0100\u0104\u0108\u010f\u0113\u0117\u011c\u0120\u0128"+ + "\u012c\u0133\u013e\u0141\u0145\u0151\u0154\u015a\u0161\u0168\u016b\u016f"+ + "\u0173\u0177\u0179\u0184\u0189\u018d\u0190\u0196\u0199\u019f\u01a2\u01a4"+ + "\u01b7\u01c5\u01d3\u01d9\u01e1\u01e3\u01e8\u01eb\u01f3\u01fc\u0202\u020a"+ + "\u020f\u0215\u0218\u021c\u0224\u022a\u0236\u0238\u0242\u0250\u0257\u0265"+ + "\u0273\u0278\u027f\u0282\u0289\u0291\u02a4\u02b1\u02b8\u02bd\u02c3\u02c6"+ + "\u02ca\u02cf\u02d3"; public static final ATN _ATN = new ATNDeserializer().deserialize(_serializedATN.toCharArray()); static { diff --git a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java index 6745b3fa89ba9..b2ad5c8f770f9 100644 --- a/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java +++ b/x-pack/plugin/sql/src/main/java/org/elasticsearch/xpack/sql/parser/SqlBaseVisitor.java @@ -445,6 +445,12 @@ interface SqlBaseVisitor extends ParseTreeVisitor { * @return the visitor result */ T visitFunctionTemplate(SqlBaseParser.FunctionTemplateContext ctx); + /** + * Visit a parse tree produced by {@link SqlBaseParser#functionName}. + * @param ctx the parse tree + * @return the visitor result + */ + T visitFunctionName(SqlBaseParser.FunctionNameContext ctx); /** * Visit a parse tree produced by the {@code nullLiteral} * labeled alternative in {@link SqlBaseParser#constant}. diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java index 3702939dd377e..ecb5b83896eb2 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/ExpressionTests.java @@ -8,6 +8,7 @@ import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.sql.expression.Expression; import org.elasticsearch.xpack.sql.expression.Literal; +import org.elasticsearch.xpack.sql.expression.function.UnresolvedFunction; import org.elasticsearch.xpack.sql.expression.function.scalar.arithmetic.Neg; import org.elasticsearch.xpack.sql.type.DataType; @@ -15,6 +16,13 @@ public class ExpressionTests extends ESTestCase { private final SqlParser parser = new SqlParser(); + public void testTokenFunctionName() throws Exception { + Expression lt = parser.createExpression("LEFT()"); + assertEquals(UnresolvedFunction.class, lt.getClass()); + UnresolvedFunction uf = (UnresolvedFunction) lt; + assertEquals("LEFT", uf.functionName()); + } + public void testLiteralLong() throws Exception { Expression lt = parser.createExpression(String.valueOf(Long.MAX_VALUE)); assertEquals(Literal.class, lt.getClass()); diff --git a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java index f7f03e5e4b70f..de9c6c56da099 100644 --- a/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java +++ b/x-pack/plugin/sql/src/test/java/org/elasticsearch/xpack/sql/parser/SqlParserTests.java @@ -23,6 +23,7 @@ import static org.hamcrest.Matchers.instanceOf; public class SqlParserTests extends ESTestCase { + public void testSelectStar() { singleProjection(project(parseStatement("SELECT * FROM foo")), UnresolvedStar.class); } @@ -44,6 +45,11 @@ public void testSelectScore() { assertEquals("SCORE", f.functionName()); } + public void testSelectRightFunction() { + UnresolvedFunction f = singleProjection(project(parseStatement("SELECT RIGHT()")), UnresolvedFunction.class); + assertEquals("RIGHT", f.functionName()); + } + public void testOrderByField() { Order.OrderDirection dir = randomFrom(Order.OrderDirection.values()); OrderBy ob = orderBy(parseStatement("SELECT * FROM foo ORDER BY bar" + stringForDirection(dir))); From 9371e77824f2ebc4dd000f921902ee1a81d082a9 Mon Sep 17 00:00:00 2001 From: Andy Bristol Date: Mon, 16 Jul 2018 13:57:17 -0700 Subject: [PATCH 059/107] Revert "[test] disable packaging tests for suse boxes" This reverts commit 30d6fd3ca74c44de27f8314fe7c070ac1d23819a. --- .../gradle/vagrant/VagrantTestPlugin.groovy | 12 ++---------- 1 file changed, 2 insertions(+), 10 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index de3c0dfc3285f..d4d1d857e90d4 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -526,11 +526,7 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(batsPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - // these tests are temporarily disabled for suse boxes while we debug an issue - // https://github.com/elastic/elasticsearch/issues/30295 - if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { - packagingTest.dependsOn(batsPackagingTest) - } + packagingTest.dependsOn(batsPackagingTest) } } @@ -569,11 +565,7 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(javaPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - // these tests are temporarily disabled for suse boxes while we debug an issue - // https://github.com/elastic/elasticsearch/issues/30295 - if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { - packagingTest.dependsOn(javaPackagingTest) - } + packagingTest.dependsOn(javaPackagingTest) } /* From 780697fd03de446075096e1261a8fc14e8f427fd Mon Sep 17 00:00:00 2001 From: Zachary Tong Date: Mon, 16 Jul 2018 17:20:50 -0400 Subject: [PATCH 060/107] [Rollup] Add new capabilities endpoint for concrete rollup indices (#30401) This introduces a new GetRollupIndexCaps API which allows the user to retrieve rollup capabilities of a specific rollup index (or index pattern). This is distinct from the existing RollupCaps endpoint. - Multiple jobs can be stored in multiple indices and point to a single target data index pattern (logstash-*). The existing API finds capabilities/config of all jobs matching that data index pattern. - One rollup index can hold data from multiple jobs, targeting multiple data index patterns. This new API finds the capabilities based on the concrete rollup indices. --- .../rollup/rollup-index-caps.asciidoc | 161 ++++++++ .../rollup/action/GetRollupCapsAction.java | 2 +- .../action/GetRollupIndexCapsAction.java | 195 ++++++++++ .../elasticsearch/xpack/rollup/Rollup.java | 33 +- .../action/TransportGetRollupCapsAction.java | 7 +- .../TransportGetRollupIndexCapsAction.java | 79 ++++ .../rest/RestGetRollupIndexCapsAction.java | 38 ++ .../GetRollupCapsActionRequestTests.java | 2 +- .../GetRollupIndexCapsActionRequestTests.java | 177 +++++++++ .../xpack.rollup.get_rollup_index_caps.json | 17 + .../test/rollup/get_rollup_caps.yml | 13 +- .../test/rollup/get_rollup_index_caps.yml | 363 ++++++++++++++++++ 12 files changed, 1066 insertions(+), 21 deletions(-) create mode 100644 x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc create mode 100644 x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java create mode 100644 x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java create mode 100644 x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java create mode 100644 x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.rollup.get_rollup_index_caps.json create mode 100644 x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml diff --git a/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc b/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc new file mode 100644 index 0000000000000..4636d9775e9d3 --- /dev/null +++ b/x-pack/docs/en/rest-api/rollup/rollup-index-caps.asciidoc @@ -0,0 +1,161 @@ +[role="xpack"] +[[rollup-get-rollup-index-caps]] +=== Get Rollup Index Capabilities +++++ +Get Rollup Index Caps +++++ + +experimental[] + +This API returns the rollup capabilities of all jobs inside of a rollup index (e.g. the index where rollup data is stored). +A single rollup index may store the data for multiple rollup jobs, and may have a variety of capabilities depending on those jobs. + +This API will allow you to determine: + +1. What jobs are stored in an index (or indices specified via a pattern)? +2. What target indices were rolled up, what fields were used in those rollups and what aggregations can be performed on each job? + +==== Request + +`GET {index}/_xpack/rollup/data` + +//===== Description + +==== Path Parameters + +`index`:: + (string) Index or index-pattern of concrete rollup indices to check for capabilities. + + + +==== Request Body + +There is no request body for the Get Jobs API. + +==== Authorization + +You must have `monitor`, `monitor_rollup`, `manage` or `manage_rollup` cluster privileges to use this API. +For more information, see +{xpack-ref}/security-privileges.html[Security Privileges]. + +==== Examples + +Imagine we have an index named `sensor-1` full of raw data. We know that the data will grow over time, so there +will be a `sensor-2`, `sensor-3`, etc. Let's create a Rollup job, which stores it's data in `sensor_rollup`: + +[source,js] +-------------------------------------------------- +PUT _xpack/rollup/job/sensor +{ + "index_pattern": "sensor-*", + "rollup_index": "sensor_rollup", + "cron": "*/30 * * * * ?", + "page_size" :1000, + "groups" : { + "date_histogram": { + "field": "timestamp", + "interval": "1h", + "delay": "7d" + }, + "terms": { + "fields": ["node"] + } + }, + "metrics": [ + { + "field": "temperature", + "metrics": ["min", "max", "sum"] + }, + { + "field": "voltage", + "metrics": ["avg"] + } + ] +} +-------------------------------------------------- +// CONSOLE +// TEST[setup:sensor_index] + +If at a later date, we'd like to determine what jobs and capabilities were stored in the `sensor_rollup` index, we can use the Get Rollup +Index API: + +[source,js] +-------------------------------------------------- +GET /sensor_rollup/_xpack/rollup/data +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +Note how we are requesting the concrete rollup index name (`sensor_rollup`) as the first part of the URL. +This will yield the following response: + +[source,js] +---- +{ + "sensor_rollup" : { + "rollup_jobs" : [ + { + "job_id" : "sensor", + "rollup_index" : "sensor_rollup", + "index_pattern" : "sensor-*", + "fields" : { + "node" : [ + { + "agg" : "terms" + } + ], + "temperature" : [ + { + "agg" : "min" + }, + { + "agg" : "max" + }, + { + "agg" : "sum" + } + ], + "timestamp" : [ + { + "agg" : "date_histogram", + "time_zone" : "UTC", + "interval" : "1h", + "delay": "7d" + } + ], + "voltage" : [ + { + "agg" : "avg" + } + ] + } + } + ] + } +} +---- +// TESTRESPONSE + + +The response that is returned contains information that is similar to the original Rollup configuration, but formatted +differently. First, there are some house-keeping details: the Rollup job's ID, the index that holds the rolled data, +the index pattern that the job was targeting. + +Next it shows a list of fields that contain data eligible for rollup searches. Here we see four fields: `node`, `temperature`, +`timestamp` and `voltage`. Each of these fields list the aggregations that are possible. For example, you can use a min, max +or sum aggregation on the `temperature` field, but only a `date_histogram` on `timestamp`. + +Note that the `rollup_jobs` element is an array; there can be multiple, independent jobs configured for a single index +or index pattern. Each of these jobs may have different configurations, so the API returns a list of all the various +configurations available. + + +Like other APIs that interact with indices, you can specify index patterns instead of explicit indices: + +[source,js] +-------------------------------------------------- +GET /*_rollup/_xpack/rollup/data +-------------------------------------------------- +// CONSOLE +// TEST[continued] + diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java index ea98c2f4628e2..128874a6c8c87 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupCapsAction.java @@ -139,7 +139,7 @@ public void writeTo(StreamOutput out) throws IOException { public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.startObject(); for (Map.Entry entry : jobs.entrySet()) { - entry.getValue().toXContent(builder, params); + entry.getValue().toXContent(builder, params); } builder.endObject(); return builder; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java new file mode 100644 index 0000000000000..4f95919c4986b --- /dev/null +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/action/GetRollupIndexCapsAction.java @@ -0,0 +1,195 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.core.rollup.action; + + +import org.elasticsearch.action.Action; +import org.elasticsearch.action.ActionRequest; +import org.elasticsearch.action.ActionRequestBuilder; +import org.elasticsearch.action.ActionRequestValidationException; +import org.elasticsearch.action.ActionResponse; +import org.elasticsearch.action.IndicesRequest; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.Strings; +import org.elasticsearch.common.io.stream.StreamInput; +import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; +import org.elasticsearch.common.xcontent.ToXContent; +import org.elasticsearch.common.xcontent.ToXContentObject; +import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.xpack.core.rollup.RollupField; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.Map; +import java.util.Objects; + +public class GetRollupIndexCapsAction extends Action { + + public static final GetRollupIndexCapsAction INSTANCE = new GetRollupIndexCapsAction(); + public static final String NAME = "indices:data/read/xpack/rollup/get/index/caps"; + public static final ParseField CONFIG = new ParseField("config"); + public static final ParseField STATUS = new ParseField("status"); + private static final ParseField INDICES_OPTIONS = new ParseField("indices_options"); + + private GetRollupIndexCapsAction() { + super(NAME); + } + + @Override + public Response newResponse() { + return new Response(); + } + + public static class Request extends ActionRequest implements IndicesRequest.Replaceable, ToXContent { + private String[] indices; + private IndicesOptions options; + + public Request(String[] indices) { + this(indices, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED); + } + + public Request(String[] indices, IndicesOptions options) { + this.indices = indices; + this.options = options; + } + + public Request() {} + + @Override + public IndicesOptions indicesOptions() { + return options; + } + + @Override + public String[] indices() { + return indices; + } + + @Override + public IndicesRequest indices(String... indices) { + Objects.requireNonNull(indices, "indices must not be null"); + for (String index : indices) { + Objects.requireNonNull(index, "index must not be null"); + } + this.indices = indices; + return this; + } + + @Override + public void readFrom(StreamInput in) throws IOException { + super.readFrom(in); + this.indices = in.readStringArray(); + this.options = IndicesOptions.readIndicesOptions(in); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeStringArray(indices); + options.writeIndicesOptions(out); + } + + @Override + public ActionRequestValidationException validate() { + return null; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.array(RollupField.ID.getPreferredName(), indices); + builder.field(INDICES_OPTIONS.getPreferredName(), options); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(Arrays.hashCode(indices), options); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Request other = (Request) obj; + return Arrays.equals(indices, other.indices) + && Objects.equals(options, other.options); + } + } + + public static class RequestBuilder extends ActionRequestBuilder { + + protected RequestBuilder(ElasticsearchClient client, GetRollupIndexCapsAction action) { + super(client, action, new Request()); + } + } + + public static class Response extends ActionResponse implements Writeable, ToXContentObject { + + private Map jobs = Collections.emptyMap(); + + public Response() { + + } + + public Response(Map jobs) { + this.jobs = Objects.requireNonNull(jobs); + } + + Response(StreamInput in) throws IOException { + jobs = in.readMap(StreamInput::readString, RollableIndexCaps::new); + } + + public Map getJobs() { + return jobs; + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + super.writeTo(out); + out.writeMap(jobs, StreamOutput::writeString, (out1, value) -> value.writeTo(out1)); + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + for (Map.Entry entry : jobs.entrySet()) { + entry.getValue().toXContent(builder, params); + } + builder.endObject(); + return builder; + } + + @Override + public int hashCode() { + return Objects.hash(jobs); + } + + @Override + public boolean equals(Object obj) { + if (obj == null) { + return false; + } + if (getClass() != obj.getClass()) { + return false; + } + Response other = (Response) obj; + return Objects.equals(jobs, other.jobs); + } + + @Override + public final String toString() { + return Strings.toString(this); + } + } +} diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java index cc24a0b4ab944..546103df5dd46 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/Rollup.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.DeleteRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.GetRollupCapsAction; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; import org.elasticsearch.xpack.core.rollup.action.GetRollupJobsAction; import org.elasticsearch.xpack.core.rollup.action.PutRollupJobAction; import org.elasticsearch.xpack.core.rollup.action.RollupSearchAction; @@ -47,6 +48,7 @@ import org.elasticsearch.xpack.core.template.TemplateUtils; import org.elasticsearch.xpack.rollup.action.TransportDeleteRollupJobAction; import org.elasticsearch.xpack.rollup.action.TransportGetRollupCapsAction; +import org.elasticsearch.xpack.rollup.action.TransportGetRollupIndexCapsAction; import org.elasticsearch.xpack.rollup.action.TransportGetRollupJobAction; import org.elasticsearch.xpack.rollup.action.TransportPutRollupJobAction; import org.elasticsearch.xpack.rollup.action.TransportRollupSearchAction; @@ -55,6 +57,7 @@ import org.elasticsearch.xpack.rollup.job.RollupJobTask; import org.elasticsearch.xpack.rollup.rest.RestDeleteRollupJobAction; import org.elasticsearch.xpack.rollup.rest.RestGetRollupCapsAction; +import org.elasticsearch.xpack.rollup.rest.RestGetRollupIndexCapsAction; import org.elasticsearch.xpack.rollup.rest.RestGetRollupJobsAction; import org.elasticsearch.xpack.rollup.rest.RestPutRollupJobAction; import org.elasticsearch.xpack.rollup.rest.RestRollupSearchAction; @@ -136,13 +139,14 @@ public List getRestHandlers(Settings settings, RestController restC } return Arrays.asList( - new RestRollupSearchAction(settings, restController), - new RestPutRollupJobAction(settings, restController), - new RestStartRollupJobAction(settings, restController), - new RestStopRollupJobAction(settings, restController), - new RestDeleteRollupJobAction(settings, restController), - new RestGetRollupJobsAction(settings, restController), - new RestGetRollupCapsAction(settings, restController) + new RestRollupSearchAction(settings, restController), + new RestPutRollupJobAction(settings, restController), + new RestStartRollupJobAction(settings, restController), + new RestStopRollupJobAction(settings, restController), + new RestDeleteRollupJobAction(settings, restController), + new RestGetRollupJobsAction(settings, restController), + new RestGetRollupCapsAction(settings, restController), + new RestGetRollupIndexCapsAction(settings, restController) ); } @@ -153,13 +157,14 @@ public List getRestHandlers(Settings settings, RestController restC return emptyList(); } return Arrays.asList( - new ActionHandler<>(RollupSearchAction.INSTANCE, TransportRollupSearchAction.class), - new ActionHandler<>(PutRollupJobAction.INSTANCE, TransportPutRollupJobAction.class), - new ActionHandler<>(StartRollupJobAction.INSTANCE, TransportStartRollupAction.class), - new ActionHandler<>(StopRollupJobAction.INSTANCE, TransportStopRollupAction.class), - new ActionHandler<>(DeleteRollupJobAction.INSTANCE, TransportDeleteRollupJobAction.class), - new ActionHandler<>(GetRollupJobsAction.INSTANCE, TransportGetRollupJobAction.class), - new ActionHandler<>(GetRollupCapsAction.INSTANCE, TransportGetRollupCapsAction.class) + new ActionHandler<>(RollupSearchAction.INSTANCE, TransportRollupSearchAction.class), + new ActionHandler<>(PutRollupJobAction.INSTANCE, TransportPutRollupJobAction.class), + new ActionHandler<>(StartRollupJobAction.INSTANCE, TransportStartRollupAction.class), + new ActionHandler<>(StopRollupJobAction.INSTANCE, TransportStopRollupAction.class), + new ActionHandler<>(DeleteRollupJobAction.INSTANCE, TransportDeleteRollupJobAction.class), + new ActionHandler<>(GetRollupJobsAction.INSTANCE, TransportGetRollupJobAction.class), + new ActionHandler<>(GetRollupCapsAction.INSTANCE, TransportGetRollupCapsAction.class), + new ActionHandler<>(GetRollupIndexCapsAction.INSTANCE, TransportGetRollupIndexCapsAction.class) ); } diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java index 5f013e8897bde..6d565e43b8644 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupCapsAction.java @@ -44,7 +44,6 @@ public TransportGetRollupCapsAction(Settings settings, TransportService transpor @Override protected void doExecute(Task task, GetRollupCapsAction.Request request, ActionListener listener) { - Map allCaps = getCaps(request.getIndexPattern(), clusterService.state().getMetaData().indices()); listener.onResponse(new GetRollupCapsAction.Response(allCaps)); } @@ -67,7 +66,7 @@ static Map getCaps(String indexPattern, ImmutableOpen jobCaps.forEach(jobCap -> { String pattern = indexPattern.equals(MetaData.ALL) - ? jobCap.getIndexPattern() : indexPattern; + ? jobCap.getIndexPattern() : indexPattern; // Do we already have an entry for this index pattern? RollableIndexCaps indexCaps = allCaps.get(pattern); @@ -98,11 +97,11 @@ static Optional findRollupIndexCaps(String indexName, IndexMeta } RollupIndexCaps caps = RollupIndexCaps.parseMetadataXContent( - new BytesArray(rollupMapping.source().uncompressed()), indexName); + new BytesArray(rollupMapping.source().uncompressed()), indexName); if (caps.hasCaps()) { return Optional.of(caps); } return Optional.empty(); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java new file mode 100644 index 0000000000000..5d81f4046ebd8 --- /dev/null +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/action/TransportGetRollupIndexCapsAction.java @@ -0,0 +1,79 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.rollup.action; + +import org.elasticsearch.action.ActionListener; +import org.elasticsearch.action.support.ActionFilters; +import org.elasticsearch.action.support.HandledTransportAction; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.common.inject.Inject; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.tasks.Task; +import org.elasticsearch.transport.TransportService; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; +import org.elasticsearch.xpack.core.rollup.action.RollableIndexCaps; + +import java.util.Arrays; +import java.util.List; +import java.util.Map; +import java.util.TreeMap; +import java.util.function.Supplier; +import java.util.stream.StreamSupport; + +public class TransportGetRollupIndexCapsAction extends HandledTransportAction { + + private final ClusterService clusterService; + + @Inject + public TransportGetRollupIndexCapsAction(Settings settings, TransportService transportService, + ClusterService clusterService, ActionFilters actionFilters) { + super(settings, GetRollupIndexCapsAction.NAME, transportService, actionFilters, + (Supplier) GetRollupIndexCapsAction.Request::new); + this.clusterService = clusterService; + } + + @Override + protected void doExecute(Task task, GetRollupIndexCapsAction.Request request, + ActionListener listener) { + + IndexNameExpressionResolver resolver = new IndexNameExpressionResolver(clusterService.getSettings()); + String[] indices = resolver.concreteIndexNames(clusterService.state(), + request.indicesOptions(), request.indices()); + Map allCaps = getCapsByRollupIndex(Arrays.asList(indices), + clusterService.state().getMetaData().indices()); + listener.onResponse(new GetRollupIndexCapsAction.Response(allCaps)); + } + + static Map getCapsByRollupIndex(List resolvedIndexNames, + ImmutableOpenMap indices) { + Map allCaps = new TreeMap<>(); + + StreamSupport.stream(indices.spliterator(), false) + .filter(entry -> resolvedIndexNames.contains(entry.key)) + .forEach(entry -> { + // Does this index have rollup metadata? + TransportGetRollupCapsAction.findRollupIndexCaps(entry.key, entry.value) + .ifPresent(cap -> { + cap.getJobCaps().forEach(jobCap -> { + // Do we already have an entry for this index? + RollableIndexCaps indexCaps = allCaps.get(jobCap.getRollupIndex()); + if (indexCaps == null) { + indexCaps = new RollableIndexCaps(jobCap.getRollupIndex()); + } + indexCaps.addJobCap(jobCap); + allCaps.put(jobCap.getRollupIndex(), indexCaps); + }); + }); + }); + + return allCaps; + } + +} diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java new file mode 100644 index 0000000000000..4f4336f11abf2 --- /dev/null +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/rest/RestGetRollupIndexCapsAction.java @@ -0,0 +1,38 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.rollup.rest; + +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.rest.BaseRestHandler; +import org.elasticsearch.rest.RestController; +import org.elasticsearch.rest.RestRequest; +import org.elasticsearch.rest.action.RestToXContentListener; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; + +public class RestGetRollupIndexCapsAction extends BaseRestHandler { + public static final ParseField INDEX = new ParseField("index"); + + public RestGetRollupIndexCapsAction(Settings settings, RestController controller) { + super(settings); + controller.registerHandler(RestRequest.Method.GET, "/{index}/_xpack/rollup/data", this); + } + + @Override + protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) { + String index = restRequest.param(INDEX.getPreferredName()); + IndicesOptions options = IndicesOptions.fromRequest(restRequest, IndicesOptions.STRICT_EXPAND_OPEN_FORBID_CLOSED); + GetRollupIndexCapsAction.Request request = new GetRollupIndexCapsAction.Request(new String[]{index}, options); + return channel -> client.execute(GetRollupIndexCapsAction.INSTANCE, request, new RestToXContentListener<>(channel)); + } + + @Override + public String getName() { + return "rollup_get_caps_action"; + } +} diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java index e63650397abdf..e3a45dbd66b87 100644 --- a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupCapsActionRequestTests.java @@ -11,11 +11,11 @@ import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.action.GetRollupCapsAction; import org.elasticsearch.xpack.core.rollup.action.RollableIndexCaps; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; -import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; import org.mockito.Mockito; import java.io.IOException; diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java new file mode 100644 index 0000000000000..2066d6649965f --- /dev/null +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/action/GetRollupIndexCapsActionRequestTests.java @@ -0,0 +1,177 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.rollup.action; + + +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.cluster.metadata.MetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; +import org.elasticsearch.test.AbstractStreamableTestCase; +import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; +import org.elasticsearch.xpack.core.rollup.RollupField; +import org.elasticsearch.xpack.core.rollup.action.GetRollupIndexCapsAction; +import org.elasticsearch.xpack.core.rollup.action.RollableIndexCaps; +import org.mockito.Mockito; + +import java.io.IOException; +import java.util.Arrays; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static org.elasticsearch.xpack.rollup.action.TransportGetRollupIndexCapsAction.getCapsByRollupIndex; +import static org.hamcrest.Matchers.equalTo; + + +public class GetRollupIndexCapsActionRequestTests extends AbstractStreamableTestCase { + + @Override + protected GetRollupIndexCapsAction.Request createTestInstance() { + if (randomBoolean()) { + return new GetRollupIndexCapsAction.Request(new String[]{MetaData.ALL}); + } + return new GetRollupIndexCapsAction.Request(new String[]{randomAlphaOfLengthBetween(1, 20)}); + } + + @Override + protected GetRollupIndexCapsAction.Request createBlankInstance() { + return new GetRollupIndexCapsAction.Request(); + } + + + public void testNoIndicesByRollup() { + ImmutableOpenMap indices = new ImmutableOpenMap.Builder().build(); + Map caps = getCapsByRollupIndex(Collections.singletonList("foo"), indices); + assertThat(caps.size(), equalTo(0)); + } + + public void testAllIndicesByRollupSingleRollup() throws IOException { + int num = randomIntBetween(1,5); + ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); + int indexCounter = 0; + for (int j = 0; j < 5; j++) { + + Map jobs = new HashMap<>(num); + for (int i = 0; i < num; i++) { + String jobName = randomAlphaOfLength(10); + String indexName = Integer.toString(indexCounter); + indexCounter += 1; + jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName).setRollupIndex("foo").build()); + } + + MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", + Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(1); + mappings.put(RollupField.TYPE_NAME, mappingMeta); + IndexMetaData meta = Mockito.mock(IndexMetaData.class); + Mockito.when(meta.getMappings()).thenReturn(mappings.build()); + indices.put("foo", meta); + } + + Map caps = getCapsByRollupIndex(Collections.singletonList("foo"), + indices.build()); + assertThat(caps.size(), equalTo(1)); + } + + public void testAllIndicesByRollupManyRollup() throws IOException { + ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); + int indexCounter = 0; + for (int j = 0; j < 5; j++) { + + Map jobs = new HashMap<>(1); + String jobName = randomAlphaOfLength(10); + String indexName = Integer.toString(indexCounter); + indexCounter += 1; + jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName) + .setIndexPattern(indexName) + .setRollupIndex("rollup_" + indexName).build()); + + + MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", + Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(1); + mappings.put(RollupField.TYPE_NAME, mappingMeta); + IndexMetaData meta = Mockito.mock(IndexMetaData.class); + Mockito.when(meta.getMappings()).thenReturn(mappings.build()); + indices.put("rollup_" + indexName, meta); + } + + Map caps = getCapsByRollupIndex(Arrays.asList(indices.keys().toArray(String.class)), indices.build()); + assertThat(caps.size(), equalTo(5)); + } + + + public void testOneIndexByRollupManyRollup() throws IOException { + ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); + int indexCounter = 0; + for (int j = 0; j < 5; j++) { + + Map jobs = new HashMap<>(1); + String jobName = randomAlphaOfLength(10); + String indexName = Integer.toString(indexCounter); + indexCounter += 1; + jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName) + .setIndexPattern("foo_" + indexName) + .setRollupIndex("rollup_" + indexName).build()); + + MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", + Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(1); + mappings.put(RollupField.TYPE_NAME, mappingMeta); + IndexMetaData meta = Mockito.mock(IndexMetaData.class); + Mockito.when(meta.getMappings()).thenReturn(mappings.build()); + indices.put("rollup_" + indexName, meta); + } + + Map caps = getCapsByRollupIndex(Collections.singletonList("rollup_1"), indices.build()); + assertThat(caps.size(), equalTo(1)); + assertThat(caps.get("rollup_1").getIndexName(), equalTo("rollup_1")); + assertThat(caps.get("rollup_1").getJobCaps().size(), equalTo(1)); + } + + public void testOneIndexByRollupOneRollup() throws IOException { + ImmutableOpenMap.Builder indices = new ImmutableOpenMap.Builder<>(5); + int indexCounter = 0; + for (int j = 0; j < 5; j++) { + + Map jobs = new HashMap<>(1); + String jobName = randomAlphaOfLength(10); + String indexName = Integer.toString(indexCounter); + indexCounter += 1; + jobs.put(jobName, ConfigTestHelpers.getRollupJob(jobName) + .setIndexPattern("foo_" + indexName) + .setRollupIndex("rollup_foo").build()); + + MappingMetaData mappingMeta = new MappingMetaData(RollupField.TYPE_NAME, + Collections.singletonMap(RollupField.TYPE_NAME, + Collections.singletonMap("_meta", + Collections.singletonMap(RollupField.ROLLUP_META, jobs)))); + + ImmutableOpenMap.Builder mappings = ImmutableOpenMap.builder(1); + mappings.put(RollupField.TYPE_NAME, mappingMeta); + IndexMetaData meta = Mockito.mock(IndexMetaData.class); + Mockito.when(meta.getMappings()).thenReturn(mappings.build()); + indices.put("rollup_foo", meta); + } + + Map caps = getCapsByRollupIndex(Collections.singletonList("rollup_foo"), indices.build()); + assertThat(caps.size(), equalTo(1)); + assertThat(caps.get("rollup_foo").getIndexName(), equalTo("rollup_foo")); + assertThat(caps.get("rollup_foo").getJobCaps().size(), equalTo(1)); + } +} + + diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.rollup.get_rollup_index_caps.json b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.rollup.get_rollup_index_caps.json new file mode 100644 index 0000000000000..458311417d4ae --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/api/xpack.rollup.get_rollup_index_caps.json @@ -0,0 +1,17 @@ +{ + "xpack.rollup.get_rollup_index_caps": { + "documentation": "", + "methods": [ "GET" ], + "url": { + "path": "/{index}/_xpack/rollup/data", + "paths": [ "/{index}/_xpack/rollup/data" ], + "parts": { + "index": { + "type": "string", + "required": true, + "description": "The rollup index or index pattern to obtain rollup capabilities from." + } + } + } + } +} diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml index 050e49bc4b40f..f8bb401a7721e 100644 --- a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_caps.yml @@ -22,6 +22,18 @@ setup: type: date value_field: type: integer + + - do: + indices.create: + index: foo3 + include_type_name: false + body: + mappings: + properties: + the_field: + type: date + value_field: + type: integer - do: headers: Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser @@ -228,4 +240,3 @@ setup: - agg: "min" - agg: "max" - agg: "sum" - diff --git a/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml new file mode 100644 index 0000000000000..7a539edcc6741 --- /dev/null +++ b/x-pack/plugin/src/test/resources/rest-api-spec/test/rollup/get_rollup_index_caps.yml @@ -0,0 +1,363 @@ +setup: + - do: + indices.create: + index: foo + include_type_name: false + body: + mappings: + properties: + the_field: + type: date + value_field: + type: integer + + - do: + indices.create: + index: foo2 + include_type_name: false + body: + mappings: + properties: + the_field: + type: date + value_field: + type: integer + + - do: + indices.create: + index: foo3 + include_type_name: false + body: + mappings: + properties: + the_field: + type: date + value_field: + type: integer + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo + body: > + { + "index_pattern": "foo", + "rollup_index": "foo_rollup", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + +--- +"Verify one job caps by rollup index": + + - do: + xpack.rollup.get_rollup_index_caps: + index: "foo_rollup" + + - match: + foo_rollup: + rollup_jobs: + - job_id: "foo" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + +--- +"Verify two job caps by rollup index": + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo2 + body: > + { + "index_pattern": "foo", + "rollup_index": "foo_rollup", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + - do: + xpack.rollup.get_rollup_index_caps: + index: "foo_rollup" + + - match: + foo_rollup: + rollup_jobs: + - job_id: "foo" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + - job_id: "foo2" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + + +--- +"Verify two different job caps by rollup index": + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo2 + body: > + { + "index_pattern": "foo2", + "rollup_index": "foo_rollup2", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + - do: + xpack.rollup.get_rollup_index_caps: + index: "foo_rollup" + + - match: + foo_rollup: + rollup_jobs: + - job_id: "foo" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + +--- +"Verify all job caps by rollup index": + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo2 + body: > + { + "index_pattern": "foo2", + "rollup_index": "foo_rollup", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo3 + body: > + { + "index_pattern": "foo3", + "rollup_index": "foo_rollup2", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + + - do: + xpack.rollup.get_rollup_index_caps: + index: "_all" + + - match: + $body: + foo_rollup: + rollup_jobs: + - job_id: "foo" + rollup_index: "foo_rollup" + index_pattern: "foo" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + - job_id: "foo2" + rollup_index: "foo_rollup" + index_pattern: "foo2" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + foo_rollup2: + rollup_jobs: + - job_id: "foo3" + rollup_index: "foo_rollup2" + index_pattern: "foo3" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + +--- +"Verify index pattern": + + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo2 + body: > + { + "index_pattern": "foo2", + "rollup_index": "foo_rollup", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + - do: + headers: + Authorization: "Basic eF9wYWNrX3Jlc3RfdXNlcjp4LXBhY2stdGVzdC1wYXNzd29yZA==" # run as x_pack_rest_user, i.e. the test setup superuser + xpack.rollup.put_job: + id: foo3 + body: > + { + "index_pattern": "foo3", + "rollup_index": "foo_rollup2", + "cron": "*/30 * * * * ?", + "page_size" :10, + "groups" : { + "date_histogram": { + "field": "the_field", + "interval": "1h" + } + }, + "metrics": [ + { + "field": "value_field", + "metrics": ["min", "max", "sum"] + } + ] + } + + - do: + xpack.rollup.get_rollup_index_caps: + index: "*_rollup2" + + - match: + $body: + foo_rollup2: + rollup_jobs: + - job_id: "foo3" + rollup_index: "foo_rollup2" + index_pattern: "foo3" + fields: + the_field: + - agg: "date_histogram" + interval: "1h" + time_zone: "UTC" + value_field: + - agg: "min" + - agg: "max" + - agg: "sum" + From 1106355e523f89f1ff82255f4c2a9fac9cffd6f5 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Mon, 16 Jul 2018 17:44:19 -0400 Subject: [PATCH 061/107] Switch non-x-pack to new style requests (#32106) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes most of the calls not in X-Pack to their new versions. --- .../rest/Netty4BadRequestIT.java | 19 +++++++----- .../rest/Netty4HeadBodyIsEmptyIT.java | 30 ++++++++++++------- .../hdfs/HaHdfsFailoverTestSuiteIT.java | 20 +++++-------- .../VerifyVersionConstantsIT.java | 3 +- 4 files changed, 39 insertions(+), 33 deletions(-) diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java index bc89558d3c6dc..17a62b3a440ef 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4BadRequestIT.java @@ -19,7 +19,8 @@ package org.elasticsearch.rest; -import org.apache.http.message.BasicHeader; +import org.elasticsearch.client.Request; +import org.elasticsearch.client.RequestOptions; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Setting; @@ -43,7 +44,7 @@ public class Netty4BadRequestIT extends ESRestTestCase { public void testBadRequest() throws IOException { - final Response response = client().performRequest("GET", "/_nodes/settings", Collections.emptyMap()); + final Response response = client().performRequest(new Request("GET", "/_nodes/settings")); final ObjectPath objectPath = ObjectPath.createFromResponse(response); final Map map = objectPath.evaluate("nodes"); int maxMaxInitialLineLength = Integer.MIN_VALUE; @@ -77,9 +78,9 @@ public void testBadRequest() throws IOException { } public void testInvalidParameterValue() throws IOException { - final ResponseException e = expectThrows( - ResponseException.class, - () -> client().performRequest("GET", "/_cluster/settings", Collections.singletonMap("pretty", "neither-true-nor-false"))); + final Request request = new Request("GET", "/_cluster/settings"); + request.addParameter("pretty", "neither-true-nor-false"); + final ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); final Response response = e.getResponse(); assertThat(response.getStatusLine().getStatusCode(), equalTo(400)); final ObjectPath objectPath = ObjectPath.createFromResponse(response); @@ -89,9 +90,11 @@ public void testInvalidParameterValue() throws IOException { } public void testInvalidHeaderValue() throws IOException { - final BasicHeader header = new BasicHeader("Content-Type", "\t"); - final ResponseException e = - expectThrows(ResponseException.class, () -> client().performRequest("GET", "/_cluster/settings", header)); + final Request request = new Request("GET", "/_cluster/settings"); + final RequestOptions.Builder options = request.getOptions().toBuilder(); + options.addHeader("Content-Type", "\t"); + request.setOptions(options); + final ResponseException e = expectThrows(ResponseException.class, () -> client().performRequest(request)); final Response response = e.getResponse(); assertThat(response.getStatusLine().getStatusCode(), equalTo(400)); final ObjectPath objectPath = ObjectPath.createFromResponse(response); diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java index 20b18ebdaddda..17b374ecb3765 100644 --- a/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java +++ b/modules/transport-netty4/src/test/java/org/elasticsearch/rest/Netty4HeadBodyIsEmptyIT.java @@ -19,8 +19,7 @@ package org.elasticsearch.rest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.common.Strings; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -57,8 +56,9 @@ private void createTestDoc(final String indexName, final String typeName) throws builder.field("test", "test"); } builder.endObject(); - client().performRequest("PUT", "/" + indexName + "/" + typeName + "/" + "1", emptyMap(), - new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/" + indexName + "/" + typeName + "/" + "1"); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); } } @@ -109,8 +109,9 @@ public void testAliasExists() throws IOException { } builder.endObject(); - client().performRequest("POST", "_aliases", emptyMap(), new StringEntity(Strings.toString(builder), - ContentType.APPLICATION_JSON)); + Request request = new Request("POST", "/_aliases"); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); headTestCase("/_alias/test_alias", emptyMap(), greaterThan(0)); headTestCase("/test/_alias/test_alias", emptyMap(), greaterThan(0)); } @@ -135,8 +136,9 @@ public void testTemplateExists() throws IOException { } builder.endObject(); - client().performRequest("PUT", "/_template/template", emptyMap(), - new StringEntity(Strings.toString(builder), ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", "/_template/template"); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); headTestCase("/_template/template", emptyMap(), greaterThan(0)); } } @@ -164,8 +166,10 @@ public void testGetSourceAction() throws IOException { builder.endObject(); } builder.endObject(); - client().performRequest("PUT", "/test-no-source", emptyMap(), new StringEntity(Strings.toString(builder), - ContentType.APPLICATION_JSON)); + + Request request = new Request("PUT", "/test-no-source"); + request.setJsonEntity(Strings.toString(builder)); + client().performRequest(request); createTestDoc("test-no-source", "test-no-source"); headTestCase("/test-no-source/test-no-source/1/_source", emptyMap(), NOT_FOUND.getStatus(), equalTo(0)); } @@ -190,7 +194,11 @@ private void headTestCase( final Map params, final int expectedStatusCode, final Matcher matcher) throws IOException { - Response response = client().performRequest("HEAD", url, params); + Request request = new Request("HEAD", url); + for (Map.Entry param : params.entrySet()) { + request.addParameter(param.getKey(), param.getValue()); + } + Response response = client().performRequest(request); assertEquals(expectedStatusCode, response.getStatusLine().getStatusCode()); assertThat(Integer.valueOf(response.getHeader("Content-Length")), matcher); assertNull("HEAD requests shouldn't have a response body but " + url + " did", response.getEntity()); diff --git a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java index ce4fe9b6d3f42..0248576b57384 100644 --- a/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java +++ b/plugins/repository-hdfs/src/test/java/org/elasticsearch/repositories/hdfs/HaHdfsFailoverTestSuiteIT.java @@ -21,16 +21,13 @@ import java.io.IOException; import java.net.InetSocketAddress; -import java.nio.charset.Charset; import java.nio.file.Files; import java.nio.file.Path; import java.security.AccessController; import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.util.ArrayList; -import java.util.Collections; import java.util.List; -import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.ha.BadFencingConfigurationException; @@ -42,9 +39,7 @@ import org.apache.hadoop.hdfs.tools.DFSHAAdmin; import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation; -import org.apache.http.Header; -import org.apache.http.message.BasicHeader; -import org.apache.http.nio.entity.NStringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.RestClient; import org.elasticsearch.common.io.PathUtils; @@ -58,8 +53,6 @@ public class HaHdfsFailoverTestSuiteIT extends ESRestTestCase { public void testHAFailoverWithRepository() throws Exception { RestClient client = client(); - Map emptyParams = Collections.emptyMap(); - Header contentHeader = new BasicHeader("Content-Type", "application/json"); String esKerberosPrincipal = System.getProperty("test.krb5.principal.es"); String hdfsKerberosPrincipal = System.getProperty("test.krb5.principal.hdfs"); @@ -106,7 +99,8 @@ public void testHAFailoverWithRepository() throws Exception { // Create repository { - Response response = client.performRequest("PUT", "/_snapshot/hdfs_ha_repo_read", emptyParams, new NStringEntity( + Request request = new Request("PUT", "/_snapshot/hdfs_ha_repo_read"); + request.setJsonEntity( "{" + "\"type\":\"hdfs\"," + "\"settings\":{" + @@ -121,15 +115,15 @@ public void testHAFailoverWithRepository() throws Exception { "\"conf.dfs.client.failover.proxy.provider.ha-hdfs\": " + "\"org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider\"" + "}" + - "}", - Charset.defaultCharset()), contentHeader); + "}"); + Response response = client.performRequest(request); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } // Get repository { - Response response = client.performRequest("GET", "/_snapshot/hdfs_ha_repo_read/_all", emptyParams); + Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } @@ -138,7 +132,7 @@ public void testHAFailoverWithRepository() throws Exception { // Get repository again { - Response response = client.performRequest("GET", "/_snapshot/hdfs_ha_repo_read/_all", emptyParams); + Response response = client.performRequest(new Request("GET", "/_snapshot/hdfs_ha_repo_read/_all")); Assert.assertEquals(200, response.getStatusLine().getStatusCode()); } } diff --git a/qa/verify-version-constants/src/test/java/org/elasticsearch/qa/verify_version_constants/VerifyVersionConstantsIT.java b/qa/verify-version-constants/src/test/java/org/elasticsearch/qa/verify_version_constants/VerifyVersionConstantsIT.java index a26237962bf4b..71fc9d09fe3f5 100644 --- a/qa/verify-version-constants/src/test/java/org/elasticsearch/qa/verify_version_constants/VerifyVersionConstantsIT.java +++ b/qa/verify-version-constants/src/test/java/org/elasticsearch/qa/verify_version_constants/VerifyVersionConstantsIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.qa.verify_version_constants; import org.elasticsearch.Version; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.test.rest.ESRestTestCase; import org.elasticsearch.test.rest.yaml.ObjectPath; @@ -32,7 +33,7 @@ public class VerifyVersionConstantsIT extends ESRestTestCase { public void testLuceneVersionConstant() throws IOException, ParseException { - final Response response = client().performRequest("GET", "/"); + final Response response = client().performRequest(new Request("GET", "/")); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); final ObjectPath objectPath = ObjectPath.createFromResponse(response); final String elasticsearchVersionString = objectPath.evaluate("version.number").toString(); From 7ce992616ac42187fe3d55b5f1b763926d763e12 Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Tue, 17 Jul 2018 00:26:01 +0200 Subject: [PATCH 062/107] Bypass highlight query terms extraction on empty fields (#32090) Dealing with empty fields in the highlight phase can slow down the query because the query terms extraction is done independently on each field. This change shortcuts the highlighting performed by the unified highlighter for fields that are not present in the document. In such cases there is nothing to higlight so we don't need to visit the query to build the highligh builder. --- .../search/fetch/subphase/highlight/UnifiedHighlighter.java | 3 +++ 1 file changed, 3 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java index 7f209ed0586e0..2c9d482cab0b2 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/UnifiedHighlighter.java @@ -76,6 +76,9 @@ public HighlightField highlight(HighlighterContext highlighterContext) { fieldValues = fieldValues.stream() .map((s) -> convertFieldValue(fieldType, s)) .collect(Collectors.toList()); + if (fieldValues.size() == 0) { + return null; + } final IndexSearcher searcher = new IndexSearcher(hitContext.reader()); final CustomUnifiedHighlighter highlighter; final String fieldValue = mergeFieldValues(fieldValues, MULTIVAL_SEP_CHAR); From 8ff5735204a2754034670a6f11e3cc58400b287a Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Mon, 16 Jul 2018 16:13:48 -0700 Subject: [PATCH 063/107] Painless: Move and Rename Several Methods in the lookup package (#32105) --- .../painless/AnalyzerCaster.java | 7 +- .../java/org/elasticsearch/painless/Def.java | 7 +- .../elasticsearch/painless/FunctionRef.java | 3 +- .../org/elasticsearch/painless/Locals.java | 5 +- .../painless/ScriptClassInfo.java | 3 +- .../painless/antlr/EnhancedPainlessLexer.java | 2 +- .../painless/lookup/PainlessLookup.java | 208 +------------ .../lookup/PainlessLookupBuilder.java | 69 +---- .../lookup/PainlessLookupUtility.java | 284 ++++++++++++++++++ .../painless/lookup/PainlessMethod.java | 14 +- .../painless/node/AExpression.java | 6 +- .../painless/node/EAssignment.java | 4 +- .../elasticsearch/painless/node/EBinary.java | 37 ++- .../painless/node/ECallLocal.java | 4 +- .../painless/node/ECapturingFunctionRef.java | 11 +- .../elasticsearch/painless/node/ECast.java | 6 +- .../elasticsearch/painless/node/EComp.java | 28 +- .../painless/node/EFunctionRef.java | 10 +- .../painless/node/EInstanceof.java | 7 +- .../elasticsearch/painless/node/ELambda.java | 20 +- .../painless/node/EListInit.java | 6 +- .../elasticsearch/painless/node/EMapInit.java | 6 +- .../elasticsearch/painless/node/ENewObj.java | 4 +- .../elasticsearch/painless/node/ENull.java | 4 +- .../elasticsearch/painless/node/EUnary.java | 15 +- .../elasticsearch/painless/node/PBrace.java | 8 +- .../painless/node/PCallInvoke.java | 12 +- .../elasticsearch/painless/node/PField.java | 16 +- .../painless/node/PSubCallInvoke.java | 2 +- .../painless/node/PSubDefArray.java | 2 +- .../painless/node/PSubDefCall.java | 2 +- .../painless/node/PSubDefField.java | 2 +- .../painless/node/PSubField.java | 8 +- .../painless/node/PSubListShortcut.java | 4 +- .../painless/node/PSubMapShortcut.java | 4 +- .../painless/node/PSubShortcut.java | 2 +- .../elasticsearch/painless/node/SEach.java | 8 +- .../painless/node/SFunction.java | 11 +- .../painless/node/SSubEachArray.java | 6 +- .../painless/node/SSubEachIterable.java | 16 +- 40 files changed, 467 insertions(+), 406 deletions(-) create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java index 457ec82a5e429..6cfc7ff6ebfd2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/AnalyzerCaster.java @@ -19,8 +19,8 @@ package org.elasticsearch.painless; -import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.def; import java.util.Objects; @@ -465,8 +465,9 @@ public static PainlessCast getLegalCast(Location location, Class actual, Clas (actual.isAssignableFrom(expected) && explicit)) { return PainlessCast.standard(actual, expected, explicit); } else { - throw location.createError(new ClassCastException( - "Cannot cast from [" + PainlessLookup.ClassToName(actual) + "] to [" + PainlessLookup.ClassToName(expected) + "].")); + throw location.createError(new ClassCastException("Cannot cast from " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(actual) + "] to " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "].")); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index f006b57f46bad..78db712d183d2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -19,9 +19,10 @@ package org.elasticsearch.painless; +import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.lang.invoke.CallSite; @@ -302,7 +303,7 @@ static MethodHandle lookupMethod(PainlessLookup painlessLookup, MethodHandles.Lo nestedType, 0, DefBootstrap.REFERENCE, - PainlessLookup.ClassToName(interfaceType)); + PainlessLookupUtility.anyTypeToPainlessTypeName(interfaceType)); filter = nested.dynamicInvoker(); } else { throw new AssertionError(); @@ -347,7 +348,7 @@ private static MethodHandle lookupReferenceInternal(PainlessLookup painlessLooku PainlessMethod interfaceMethod = painlessLookup.getPainlessStructFromJavaClass(clazz).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookup.ClassToName(clazz) + "], not a functional interface"); + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(clazz) + "], not a functional interface"); } int arity = interfaceMethod.arguments.size() + captures.length; final MethodHandle handle; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 6ab7292c7f65b..9e72dc2c83576 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -21,6 +21,7 @@ import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; @@ -168,7 +169,7 @@ private static PainlessMethod lookup(PainlessLookup painlessLookup, Class exp PainlessMethod method = painlessLookup.getPainlessStructFromJavaClass(expected).functionalMethod; if (method == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookup.ClassToName(expected) + "], not a functional interface"); + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); } // lookup requested method diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index bab1b7942d657..e797740fed185 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -19,10 +19,11 @@ package org.elasticsearch.painless; +import org.elasticsearch.painless.ScriptClassInfo.MethodArgument; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.ScriptClassInfo.MethodArgument; import java.util.Arrays; import java.util.Collection; @@ -292,7 +293,7 @@ public int getSlot() { @Override public String toString() { StringBuilder b = new StringBuilder(); - b.append("Variable[type=").append(PainlessLookup.ClassToName(clazz)); + b.append("Variable[type=").append(PainlessLookupUtility.anyTypeToPainlessTypeName(clazz)); b.append(",name=").append(name); b.append(",slot=").append(slot); if (readonly) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java index df43dba7b3476..ff2061a9a4b92 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/ScriptClassInfo.java @@ -20,6 +20,7 @@ package org.elasticsearch.painless; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.lang.invoke.MethodType; import java.lang.reflect.Field; @@ -182,7 +183,7 @@ private MethodArgument methodArgument(PainlessLookup painlessLookup, Class cl private static Class definitionTypeForClass(PainlessLookup painlessLookup, Class type, Function, String> unknownErrorMessageSource) { - type = PainlessLookup.ObjectClassTodefClass(type); + type = PainlessLookupUtility.javaObjectTypeToPainlessDefType(type); Class componentType = type; while (componentType.isArray()) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java index e6c5da057980f..f1db35636b41c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/antlr/EnhancedPainlessLexer.java @@ -23,8 +23,8 @@ import org.antlr.v4.runtime.LexerNoViableAltException; import org.antlr.v4.runtime.Token; import org.antlr.v4.runtime.misc.Interval; -import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.lookup.PainlessLookup; /** * A lexer that is customized for painless. It: diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java index feeaf4d34bcdd..6111d12317b18 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookup.java @@ -19,7 +19,6 @@ package org.elasticsearch.painless.lookup; -import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Map; @@ -30,154 +29,6 @@ */ public final class PainlessLookup { - public static Class getBoxedType(Class clazz) { - if (clazz == boolean.class) { - return Boolean.class; - } else if (clazz == byte.class) { - return Byte.class; - } else if (clazz == short.class) { - return Short.class; - } else if (clazz == char.class) { - return Character.class; - } else if (clazz == int.class) { - return Integer.class; - } else if (clazz == long.class) { - return Long.class; - } else if (clazz == float.class) { - return Float.class; - } else if (clazz == double.class) { - return Double.class; - } - - return clazz; - } - - public static Class getUnboxedype(Class clazz) { - if (clazz == Boolean.class) { - return boolean.class; - } else if (clazz == Byte.class) { - return byte.class; - } else if (clazz == Short.class) { - return short.class; - } else if (clazz == Character.class) { - return char.class; - } else if (clazz == Integer.class) { - return int.class; - } else if (clazz == Long.class) { - return long.class; - } else if (clazz == Float.class) { - return float.class; - } else if (clazz == Double.class) { - return double.class; - } - - return clazz; - } - - public static boolean isConstantType(Class clazz) { - return clazz == boolean.class || - clazz == byte.class || - clazz == short.class || - clazz == char.class || - clazz == int.class || - clazz == long.class || - clazz == float.class || - clazz == double.class || - clazz == String.class; - } - - public Class getClassFromBinaryName(String painlessType) { - return painlessTypesToJavaClasses.get(painlessType.replace('$', '.')); - } - - public static Class ObjectClassTodefClass(Class clazz) { - if (clazz.isArray()) { - Class component = clazz.getComponentType(); - int dimensions = 1; - - while (component.isArray()) { - component = component.getComponentType(); - ++dimensions; - } - - if (component == Object.class) { - char[] braces = new char[dimensions]; - Arrays.fill(braces, '['); - - String descriptor = new String(braces) + org.objectweb.asm.Type.getType(def.class).getDescriptor(); - org.objectweb.asm.Type type = org.objectweb.asm.Type.getType(descriptor); - - try { - return Class.forName(type.getInternalName().replace('/', '.')); - } catch (ClassNotFoundException exception) { - throw new IllegalStateException("internal error", exception); - } - } - } else if (clazz == Object.class) { - return def.class; - } - - return clazz; - } - - public static Class defClassToObjectClass(Class clazz) { - if (clazz.isArray()) { - Class component = clazz.getComponentType(); - int dimensions = 1; - - while (component.isArray()) { - component = component.getComponentType(); - ++dimensions; - } - - if (component == def.class) { - char[] braces = new char[dimensions]; - Arrays.fill(braces, '['); - - String descriptor = new String(braces) + org.objectweb.asm.Type.getType(Object.class).getDescriptor(); - org.objectweb.asm.Type type = org.objectweb.asm.Type.getType(descriptor); - - try { - return Class.forName(type.getInternalName().replace('/', '.')); - } catch (ClassNotFoundException exception) { - throw new IllegalStateException("internal error", exception); - } - } - } else if (clazz == def.class) { - return Object.class; - } - - return clazz; - } - - public static String ClassToName(Class clazz) { - if (clazz.isLocalClass() || clazz.isAnonymousClass()) { - return null; - } else if (clazz.isArray()) { - Class component = clazz.getComponentType(); - int dimensions = 1; - - while (component.isArray()) { - component = component.getComponentType(); - ++dimensions; - } - - if (component == def.class) { - StringBuilder builder = new StringBuilder(def.class.getSimpleName()); - - for (int dimension = 0; dimension < dimensions; dimension++) { - builder.append("[]"); - } - - return builder.toString(); - } - } else if (clazz == def.class) { - return def.class.getSimpleName(); - } - - return clazz.getCanonicalName().replace('$', '.'); - } - public Collection getStructs() { return javaClassesToPainlessStructs.values(); } @@ -190,6 +41,10 @@ public Collection getStructs() { this.javaClassesToPainlessStructs = Collections.unmodifiableMap(javaClassesToPainlessStructs); } + public Class getClassFromBinaryName(String painlessType) { + return painlessTypesToJavaClasses.get(painlessType.replace('$', '.')); + } + public boolean isSimplePainlessType(String painlessType) { return painlessTypesToJavaClasses.containsKey(painlessType); } @@ -199,59 +54,6 @@ public PainlessClass getPainlessStructFromJavaClass(Class clazz) { } public Class getJavaClassFromPainlessType(String painlessType) { - Class javaClass = painlessTypesToJavaClasses.get(painlessType); - - if (javaClass != null) { - return javaClass; - } - int arrayDimensions = 0; - int arrayIndex = painlessType.indexOf('['); - - if (arrayIndex != -1) { - int length = painlessType.length(); - - while (arrayIndex < length) { - if (painlessType.charAt(arrayIndex) == '[' && ++arrayIndex < length && painlessType.charAt(arrayIndex++) == ']') { - ++arrayDimensions; - } else { - throw new IllegalArgumentException("invalid painless type [" + painlessType + "]."); - } - } - - painlessType = painlessType.substring(0, painlessType.indexOf('[')); - javaClass = painlessTypesToJavaClasses.get(painlessType); - - char braces[] = new char[arrayDimensions]; - Arrays.fill(braces, '['); - String descriptor = new String(braces); - - if (javaClass == boolean.class) { - descriptor += "Z"; - } else if (javaClass == byte.class) { - descriptor += "B"; - } else if (javaClass == short.class) { - descriptor += "S"; - } else if (javaClass == char.class) { - descriptor += "C"; - } else if (javaClass == int.class) { - descriptor += "I"; - } else if (javaClass == long.class) { - descriptor += "J"; - } else if (javaClass == float.class) { - descriptor += "F"; - } else if (javaClass == double.class) { - descriptor += "D"; - } else { - descriptor += "L" + javaClass.getName() + ";"; - } - - try { - return Class.forName(descriptor); - } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("invalid painless type [" + painlessType + "]", cnfe); - } - } - - throw new IllegalArgumentException("invalid painless type [" + painlessType + "]"); + return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessType, painlessTypesToJavaClasses); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 1dadce318d672..9a5e08d65a754 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -30,7 +30,6 @@ import java.lang.invoke.MethodHandles; import java.lang.reflect.Modifier; import java.util.ArrayList; -import java.util.Arrays; import java.util.Collections; import java.util.HashMap; import java.util.List; @@ -279,7 +278,7 @@ private void addConstructor(String ownerStructName, WhitelistConstructor whiteli Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount] = PainlessLookup.defClassToObjectClass(painlessParameterClass); + javaClassParameters[parameterCount] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessParameterClass); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for constructor parameter [" + painlessParameterTypeName + "] " + "with owner struct [" + ownerStructName + "] and constructor parameters " + @@ -364,7 +363,8 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, Class painlessParameterClass = getJavaClassFromPainlessType(painlessParameterTypeName); painlessParametersTypes.add(painlessParameterClass); - javaClassParameters[parameterCount + augmentedOffset] = PainlessLookup.defClassToObjectClass(painlessParameterClass); + javaClassParameters[parameterCount + augmentedOffset] = + PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessParameterClass); } catch (IllegalArgumentException iae) { throw new IllegalArgumentException("struct not defined for method parameter [" + painlessParameterTypeName + "] " + "with owner struct [" + ownerStructName + "] and method with name [" + whitelistMethod.javaMethodName + "] " + @@ -393,7 +393,7 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, "and parameters " + whitelistMethod.painlessParameterTypeNames, iae); } - if (javaMethod.getReturnType() != PainlessLookup.defClassToObjectClass(painlessReturnClass)) { + if (javaMethod.getReturnType() != PainlessLookupUtility.painlessDefTypeToJavaObjectType(painlessReturnClass)) { throw new IllegalArgumentException("specified return type class [" + painlessReturnClass + "] " + "does not match the return type class [" + javaMethod.getReturnType() + "] for the " + "method with name [" + whitelistMethod.javaMethodName + "] " + @@ -711,64 +711,11 @@ private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) { return painless; } - public Class getJavaClassFromPainlessType(String painlessType) { - Class javaClass = painlessTypesToJavaClasses.get(painlessType); - - if (javaClass != null) { - return javaClass; - } - int arrayDimensions = 0; - int arrayIndex = painlessType.indexOf('['); - - if (arrayIndex != -1) { - int length = painlessType.length(); - - while (arrayIndex < length) { - if (painlessType.charAt(arrayIndex) == '[' && ++arrayIndex < length && painlessType.charAt(arrayIndex++) == ']') { - ++arrayDimensions; - } else { - throw new IllegalArgumentException("invalid painless type [" + painlessType + "]."); - } - } - - painlessType = painlessType.substring(0, painlessType.indexOf('[')); - javaClass = painlessTypesToJavaClasses.get(painlessType); - - char braces[] = new char[arrayDimensions]; - Arrays.fill(braces, '['); - String descriptor = new String(braces); - - if (javaClass == boolean.class) { - descriptor += "Z"; - } else if (javaClass == byte.class) { - descriptor += "B"; - } else if (javaClass == short.class) { - descriptor += "S"; - } else if (javaClass == char.class) { - descriptor += "C"; - } else if (javaClass == int.class) { - descriptor += "I"; - } else if (javaClass == long.class) { - descriptor += "J"; - } else if (javaClass == float.class) { - descriptor += "F"; - } else if (javaClass == double.class) { - descriptor += "D"; - } else { - descriptor += "L" + javaClass.getName() + ";"; - } - - try { - return Class.forName(descriptor); - } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("invalid painless type [" + painlessType + "]", cnfe); - } - } - - throw new IllegalArgumentException("invalid painless type [" + painlessType + "]"); - } - public PainlessLookup build() { return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessStructs); } + + public Class getJavaClassFromPainlessType(String painlessType) { + return PainlessLookupUtility.painlessTypeNameToPainlessType(painlessType, painlessTypesToJavaClasses); + } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java new file mode 100644 index 0000000000000..d1f3ee4ece3e0 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -0,0 +1,284 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import org.objectweb.asm.Type; + +import java.util.Arrays; +import java.util.Collection; +import java.util.Map; + +/** + * This class contains methods shared by {@link PainlessLookupBuilder}, {@link PainlessLookup}, and other classes within + * Painless for conversion between type names and types along with some other various utility methods. + * + * The following terminology is used for variable names throughout the lookup package: + * + * - javaClass (Class) - a java class including def and excluding array type java classes + * - javaClassName (String) - the fully qualified java class name for a javaClass + * - painlessClassName (String) - the fully qualified painless name or imported painless name for a painlessClass + * - anyClassName (String) - either a javaClassName or a painlessClassName + * - javaType (Class) - a java class excluding def and array type java classes + * - painlessType (Class) - a java class including def and array type java classes + * - javaTypeName (String) - the fully qualified java Type name for a javaType + * - painlessTypeName (String) - the fully qualified painless name or imported painless name for a painlessType + * - anyTypeName (String) - either a javaTypeName or a painlessTypeName + * - painlessClass (PainlessClass) - a painless class object + * + * Under ambiguous circumstances most variable names are prefixed with asm, java, or painless. + * If the variable name is the same for asm, java, and painless, no prefix is used. + */ +public final class PainlessLookupUtility { + + public static Class javaObjectTypeToPainlessDefType(Class javaType) { + if (javaType.isArray()) { + Class javaTypeComponent = javaType.getComponentType(); + int arrayDimensions = 1; + + while (javaTypeComponent.isArray()) { + javaTypeComponent = javaTypeComponent.getComponentType(); + ++arrayDimensions; + } + + if (javaTypeComponent == Object.class) { + char[] asmDescriptorBraces = new char[arrayDimensions]; + Arrays.fill(asmDescriptorBraces, '['); + + String asmDescriptor = new String(asmDescriptorBraces) + Type.getType(def.class).getDescriptor(); + Type asmType = Type.getType(asmDescriptor); + + try { + return Class.forName(asmType.getInternalName().replace('/', '.')); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("internal error", cnfe); + } + } + } else if (javaType == Object.class) { + return def.class; + } + + return javaType; + } + + public static Class painlessDefTypeToJavaObjectType(Class painlessType) { + if (painlessType.isArray()) { + Class painlessTypeComponent = painlessType.getComponentType(); + int arrayDimensions = 1; + + while (painlessTypeComponent.isArray()) { + painlessTypeComponent = painlessTypeComponent.getComponentType(); + ++arrayDimensions; + } + + if (painlessTypeComponent == def.class) { + char[] asmDescriptorBraces = new char[arrayDimensions]; + Arrays.fill(asmDescriptorBraces, '['); + + String asmDescriptor = new String(asmDescriptorBraces) + Type.getType(Object.class).getDescriptor(); + Type asmType = Type.getType(asmDescriptor); + + try { + return Class.forName(asmType.getInternalName().replace('/', '.')); + } catch (ClassNotFoundException exception) { + throw new IllegalStateException("internal error", exception); + } + } + } else if (painlessType == def.class) { + return Object.class; + } + + return painlessType; + } + + public static String anyTypeNameToPainlessTypeName(String anyTypeName) { + return anyTypeName.replace(def.class.getName(), DEF_PAINLESS_CLASS_NAME).replace('$', '.'); + } + + public static String anyTypeToPainlessTypeName(Class anyType) { + if (anyType.isLocalClass() || anyType.isAnonymousClass()) { + return null; + } else if (anyType.isArray()) { + Class anyTypeComponent = anyType.getComponentType(); + int arrayDimensions = 1; + + while (anyTypeComponent.isArray()) { + anyTypeComponent = anyTypeComponent.getComponentType(); + ++arrayDimensions; + } + + if (anyTypeComponent == def.class) { + StringBuilder painlessDefTypeNameArrayBuilder = new StringBuilder(DEF_PAINLESS_CLASS_NAME); + + for (int dimension = 0; dimension < arrayDimensions; dimension++) { + painlessDefTypeNameArrayBuilder.append("[]"); + } + + return painlessDefTypeNameArrayBuilder.toString(); + } + } else if (anyType == def.class) { + return DEF_PAINLESS_CLASS_NAME; + } + + return anyType.getCanonicalName().replace('$', '.'); + } + + public static Class painlessTypeNameToPainlessType(String painlessTypeName, Map> painlessClassNamesToJavaClasses) { + Class javaClass = painlessClassNamesToJavaClasses.get(painlessTypeName); + + if (javaClass != null) { + return javaClass; + } + + int arrayDimensions = 0; + int arrayIndex = painlessTypeName.indexOf('['); + + if (arrayIndex != -1) { + int painlessTypeNameLength = painlessTypeName.length(); + + while (arrayIndex < painlessTypeNameLength) { + if (painlessTypeName.charAt(arrayIndex) == '[' && + ++arrayIndex < painlessTypeNameLength && + painlessTypeName.charAt(arrayIndex++) == ']') { + ++arrayDimensions; + } else { + throw new IllegalArgumentException("invalid painless type [" + painlessTypeName + "]."); + } + } + + painlessTypeName = painlessTypeName.substring(0, painlessTypeName.indexOf('[')); + javaClass = painlessClassNamesToJavaClasses.get(painlessTypeName); + + char javaDescriptorBraces[] = new char[arrayDimensions]; + Arrays.fill(javaDescriptorBraces, '['); + String javaDescriptor = new String(javaDescriptorBraces); + + if (javaClass == boolean.class) { + javaDescriptor += "Z"; + } else if (javaClass == byte.class) { + javaDescriptor += "B"; + } else if (javaClass == short.class) { + javaDescriptor += "S"; + } else if (javaClass == char.class) { + javaDescriptor += "C"; + } else if (javaClass == int.class) { + javaDescriptor += "I"; + } else if (javaClass == long.class) { + javaDescriptor += "J"; + } else if (javaClass == float.class) { + javaDescriptor += "F"; + } else if (javaClass == double.class) { + javaDescriptor += "D"; + } else { + javaDescriptor += "L" + javaClass.getName() + ";"; + } + + try { + return Class.forName(javaDescriptor); + } catch (ClassNotFoundException cnfe) { + throw new IllegalStateException("painless type [" + painlessTypeName + "] not found", cnfe); + } + } + + throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found"); + } + + public static void validatePainlessType(Class painlessType, Collection> javaClasses) { + String painlessTypeName = anyTypeNameToPainlessTypeName(painlessType.getName()); + + while (painlessType.getComponentType() != null) { + painlessType = painlessType.getComponentType(); + } + + if (javaClasses.contains(painlessType) == false) { + throw new IllegalStateException("painless type [" + painlessTypeName + "] not found"); + } + } + + public static String buildPainlessMethodKey(String methodName, int methodArity) { + return methodName + "/" + methodArity; + } + + public static String buildPainlessFieldKey(String fieldName) { + return fieldName; + } + + public static Class getBoxedAnyType(Class anyType) { + if (anyType == boolean.class) { + return Boolean.class; + } else if (anyType == byte.class) { + return Byte.class; + } else if (anyType == short.class) { + return Short.class; + } else if (anyType == char.class) { + return Character.class; + } else if (anyType == int.class) { + return Integer.class; + } else if (anyType == long.class) { + return Long.class; + } else if (anyType == float.class) { + return Float.class; + } else if (anyType == double.class) { + return Double.class; + } + + return anyType; + } + + public static Class getUnboxedAnyType(Class anyType) { + if (anyType == Boolean.class) { + return boolean.class; + } else if (anyType == Byte.class) { + return byte.class; + } else if (anyType == Short.class) { + return short.class; + } else if (anyType == Character.class) { + return char.class; + } else if (anyType == Integer.class) { + return int.class; + } else if (anyType == Long.class) { + return long.class; + } else if (anyType == Float.class) { + return float.class; + } else if (anyType == Double.class) { + return double.class; + } + + return anyType; + } + + public static boolean isAnyTypeConstant(Class anyType) { + return anyType == boolean.class || + anyType == byte.class || + anyType == short.class || + anyType == char.class || + anyType == int.class || + anyType == long.class || + anyType == float.class || + anyType == double.class || + anyType == String.class; + } + + public static final String DEF_PAINLESS_CLASS_NAME = def.class.getSimpleName(); + public static final String CONSTRUCTOR_ANY_NAME = ""; + + private PainlessLookupUtility() { + + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java index e87e1d4bf38fc..8d8a7f691fecd 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethod.java @@ -69,21 +69,21 @@ public MethodType getMethodType() { params = new Class[1 + arguments.size()]; params[0] = augmentation; for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } - returnValue = PainlessLookup.defClassToObjectClass(rtn); + returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); } else if (Modifier.isStatic(modifiers)) { // static method: straightforward copy params = new Class[arguments.size()]; for (int i = 0; i < arguments.size(); i++) { - params[i] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } - returnValue = PainlessLookup.defClassToObjectClass(rtn); + returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); } else if ("".equals(name)) { // constructor: returns the owner class params = new Class[arguments.size()]; for (int i = 0; i < arguments.size(); i++) { - params[i] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } returnValue = owner.clazz; } else { @@ -91,9 +91,9 @@ public MethodType getMethodType() { params = new Class[1 + arguments.size()]; params[0] = owner.clazz; for (int i = 0; i < arguments.size(); i++) { - params[i + 1] = PainlessLookup.defClassToObjectClass(arguments.get(i)); + params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } - returnValue = PainlessLookup.defClassToObjectClass(rtn); + returnValue = PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtn); } return MethodType.methodType(returnValue, params); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java index 0dfcf4d409335..dd813f73c3dfc 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/AExpression.java @@ -20,10 +20,10 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.util.Objects; @@ -157,7 +157,7 @@ AExpression cast(Locals locals) { return ecast; } else { - if (PainlessLookup.isConstantType(expected)) { + if (PainlessLookupUtility.isAnyTypeConstant(expected)) { // For the case where a cast is required, a constant is set, // and the constant can be immediately cast to the expected type. // An EConstant replaces this node with the constant cast appropriately diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java index a0a29ed59ddde..584e5df6342ab 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EAssignment.java @@ -22,13 +22,13 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessCast; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.def; import java.util.ArrayList; import java.util.List; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java index 422300072dc2f..65776ca76f117 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EBinary.java @@ -21,14 +21,14 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; import org.elasticsearch.painless.WriterConstants; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import java.util.Objects; import java.util.Set; @@ -106,7 +106,8 @@ private void analyzeMul(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply multiply [*] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -148,7 +149,8 @@ private void analyzeDiv(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply divide [/] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -195,7 +197,8 @@ private void analyzeRem(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply remainder [%] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -242,7 +245,8 @@ private void analyzeAdd(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply add [+] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -300,7 +304,8 @@ private void analyzeSub(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply subtract [-] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -358,7 +363,8 @@ private void analyzeLSH(Locals variables) { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply left shift [<<] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote = lhspromote; @@ -405,7 +411,8 @@ private void analyzeRSH(Locals variables) { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply right shift [>>] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote = lhspromote; @@ -455,7 +462,8 @@ private void analyzeUSH(Locals variables) { if (lhspromote == null || rhspromote == null) { throw createError(new ClassCastException("Cannot apply unsigned shift [>>>] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (lhspromote == def.class || rhspromote == def.class) { @@ -498,7 +506,8 @@ private void analyzeBWAnd(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply and [&] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -537,7 +546,8 @@ private void analyzeXor(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply xor [^] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; @@ -577,7 +587,8 @@ private void analyzeBWOr(Locals variables) { if (promote == null) { throw createError(new ClassCastException("Cannot apply or [|] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } actual = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index ede1a2a6b9c36..dfed0ca47b482 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.List; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java index c0345b6308c3e..e8ad9d85ed698 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECapturingFunctionRef.java @@ -21,14 +21,14 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; @@ -69,7 +69,7 @@ void analyze(Locals locals) { defPointer = "D" + variable + "." + call + ",1"; } else { // typed implementation - defPointer = "S" + PainlessLookup.ClassToName(captured.clazz) + "." + call + ",1"; + defPointer = "S" + PainlessLookupUtility.anyTypeToPainlessTypeName(captured.clazz) + "." + call + ",1"; } actual = String.class; } else { @@ -77,7 +77,8 @@ void analyze(Locals locals) { // static case if (captured.clazz != def.class) { try { - ref = new FunctionRef(locals.getPainlessLookup(), expected, PainlessLookup.ClassToName(captured.clazz), call, 1); + ref = new FunctionRef( + locals.getPainlessLookup(), expected, PainlessLookupUtility.anyTypeToPainlessTypeName(captured.clazz), call, 1); // check casts between the interface method and the delegate method are legal for (int i = 0; i < ref.interfaceMethod.arguments.size(); ++i) { @@ -109,7 +110,7 @@ void write(MethodWriter writer, Globals globals) { // typed interface, dynamic implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); Type methodType = Type.getMethodType(MethodWriter.getType(expected), MethodWriter.getType(captured.clazz)); - writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, PainlessLookup.ClassToName(expected)); + writer.invokeDefCall(call, methodType, DefBootstrap.REFERENCE, PainlessLookupUtility.anyTypeToPainlessTypeName(expected)); } else { // typed interface, typed implementation writer.visitVarInsn(MethodWriter.getType(captured.clazz).getOpcode(Opcodes.ILOAD), captured.getSlot()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java index 55a9dbf71c891..b0451b685b57d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECast.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.util.Objects; import java.util.Set; @@ -63,6 +63,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(PainlessLookup.ClassToName(cast.to), child); + return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(cast.to), child); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java index 806204d051ae0..edf18f501bc77 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EComp.java @@ -21,13 +21,13 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Label; import org.objectweb.asm.Type; @@ -93,7 +93,8 @@ private void analyzeEq(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply equals [==] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -142,7 +143,8 @@ private void analyzeEqR(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference equals [===] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } left.expected = promotedType; @@ -182,7 +184,8 @@ private void analyzeNE(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply not equals [!=] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -231,7 +234,8 @@ private void analyzeNER(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply reference not equals [!==] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } left.expected = promotedType; @@ -271,7 +275,8 @@ private void analyzeGTE(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than or equals [>=] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -310,7 +315,8 @@ private void analyzeGT(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply greater than [>] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -349,7 +355,8 @@ private void analyzeLTE(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than or equals [<=] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { @@ -388,7 +395,8 @@ private void analyzeLT(Locals variables) { if (promotedType == null) { throw createError(new ClassCastException("Cannot apply less than [>=] to types " + - "[" + PainlessLookup.ClassToName(left.actual) + "] and [" + PainlessLookup.ClassToName(right.actual) + "].")); + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(left.actual) + "] and " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(right.actual) + "].")); } if (promotedType == def.class) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index 82c24e27c5d16..d4eddb059a847 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -20,14 +20,14 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.util.Objects; @@ -69,12 +69,12 @@ void analyze(Locals locals) { PainlessMethod interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookup.ClassToName(expected) + "], not a functional interface"); + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); } PainlessMethod delegateMethod = locals.getMethod(new PainlessMethodKey(call, interfaceMethod.arguments.size())); if (delegateMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + - "to [" + PainlessLookup.ClassToName(expected) + "], function not found"); + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], function not found"); } ref = new FunctionRef(expected, interfaceMethod, delegateMethod, 0); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java index a3835cbc21372..05564a2952e6f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EInstanceof.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.util.Objects; import java.util.Set; @@ -64,7 +64,8 @@ void analyze(Locals locals) { } // map to wrapped type for primitive types - resolvedType = clazz.isPrimitive() ? PainlessLookup.getBoxedType(clazz) : PainlessLookup.defClassToObjectClass(clazz); + resolvedType = clazz.isPrimitive() ? PainlessLookupUtility.getBoxedAnyType(clazz) : + PainlessLookupUtility.painlessDefTypeToJavaObjectType(clazz); // analyze and cast the expression expression.analyze(locals); @@ -75,7 +76,7 @@ void analyze(Locals locals) { primitiveExpression = expression.actual.isPrimitive(); // map to wrapped type for primitive types expressionType = expression.actual.isPrimitive() ? - PainlessLookup.getBoxedType(expression.actual) : PainlessLookup.defClassToObjectClass(clazz); + PainlessLookupUtility.getBoxedAnyType(expression.actual) : PainlessLookupUtility.painlessDefTypeToJavaObjectType(clazz); actual = boolean.class; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java index 8977f4f0ef329..8e8d164b03d62 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ELambda.java @@ -20,15 +20,15 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.FunctionRef; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.node.SFunction.FunctionReserved; import org.objectweb.asm.Opcodes; @@ -122,13 +122,13 @@ void analyze(Locals locals) { // we know the method statically, infer return type and any unknown/def types interfaceMethod = locals.getPainlessLookup().getPainlessStructFromJavaClass(expected).functionalMethod; if (interfaceMethod == null) { - throw createError(new IllegalArgumentException("Cannot pass lambda to [" + PainlessLookup.ClassToName(expected) + - "], not a functional interface")); + throw createError(new IllegalArgumentException("Cannot pass lambda to " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface")); } // check arity before we manipulate parameters if (interfaceMethod.arguments.size() != paramTypeStrs.size()) throw new IllegalArgumentException("Incorrect number of parameters for [" + interfaceMethod.name + - "] in [" + PainlessLookup.ClassToName(expected) + "]"); + "] in [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "]"); // for method invocation, its allowed to ignore the return value if (interfaceMethod.rtn == void.class) { returnType = def.class; @@ -140,7 +140,7 @@ void analyze(Locals locals) { for (int i = 0; i < paramTypeStrs.size(); i++) { String paramType = paramTypeStrs.get(i); if (paramType == null) { - actualParamTypeStrs.add(PainlessLookup.ClassToName(interfaceMethod.arguments.get(i))); + actualParamTypeStrs.add(PainlessLookupUtility.anyTypeToPainlessTypeName(interfaceMethod.arguments.get(i))); } else { actualParamTypeStrs.add(paramType); } @@ -162,15 +162,15 @@ void analyze(Locals locals) { List paramTypes = new ArrayList<>(captures.size() + actualParamTypeStrs.size()); List paramNames = new ArrayList<>(captures.size() + paramNameStrs.size()); for (Variable var : captures) { - paramTypes.add(PainlessLookup.ClassToName(var.clazz)); + paramTypes.add(PainlessLookupUtility.anyTypeToPainlessTypeName(var.clazz)); paramNames.add(var.name); } paramTypes.addAll(actualParamTypeStrs); paramNames.addAll(paramNameStrs); // desugar lambda body into a synthetic method - desugared = new SFunction(reserved, location, PainlessLookup.ClassToName(returnType), name, - paramTypes, paramNames, statements, true); + desugared = new SFunction(reserved, location, PainlessLookupUtility.anyTypeToPainlessTypeName(returnType), name, + paramTypes, paramNames, statements, true); desugared.generateSignature(locals.getPainlessLookup()); desugared.analyze(Locals.newLambdaScope(locals.getProgramScope(), returnType, desugared.parameters, captures.size(), reserved.getMaxLoopCounter())); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 820cce685edcd..90475419b3260 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import java.util.ArrayList; import java.util.List; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index b6c7fb80af95f..c6474846d4c7a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import java.util.HashMap; import java.util.List; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index 197e2fcb7fd42..a780ea3e05be8 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.List; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java index 983819b6b2bf9..6bc5331cb1d84 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENull.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.objectweb.asm.Opcodes; import java.util.Set; @@ -53,7 +53,7 @@ void analyze(Locals locals) { if (expected != null) { if (expected.isPrimitive()) { throw createError(new IllegalArgumentException( - "Cannot cast null to a primitive type [" + PainlessLookup.ClassToName(expected) + "].")); + "Cannot cast null to a primitive type [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "].")); } actual = expected; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java index 3a5102ebdc99c..d34399db779df 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EUnary.java @@ -21,13 +21,13 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.Operation; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; import org.objectweb.asm.Type; @@ -93,7 +93,8 @@ void analyzeBWNot(Locals variables) { promote = AnalyzerCaster.promoteNumeric(child.actual, false); if (promote == null) { - throw createError(new ClassCastException("Cannot apply not [~] to type [" + PainlessLookup.ClassToName(child.actual) + "].")); + throw createError(new ClassCastException("Cannot apply not [~] to type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(child.actual) + "].")); } child.expected = promote; @@ -122,8 +123,8 @@ void analyzerAdd(Locals variables) { promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { - throw createError( - new ClassCastException("Cannot apply positive [+] to type [" + PainlessLookup.ClassToName(child.actual) + "].")); + throw createError(new ClassCastException("Cannot apply positive [+] to type " + + "[" + PainlessLookupUtility.painlessDefTypeToJavaObjectType(child.actual) + "].")); } child.expected = promote; @@ -156,8 +157,8 @@ void analyzerSub(Locals variables) { promote = AnalyzerCaster.promoteNumeric(child.actual, true); if (promote == null) { - throw createError( - new ClassCastException("Cannot apply negative [-] to type [" + PainlessLookup.ClassToName(child.actual) + "].")); + throw createError(new ClassCastException("Cannot apply negative [-] to type " + + "[" + PainlessLookupUtility.painlessDefTypeToJavaObjectType(child.actual) + "].")); } child.expected = promote; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java index 5b282abdce9fa..c45107a37ac21 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PBrace.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import java.util.List; import java.util.Map; @@ -67,8 +67,8 @@ void analyze(Locals locals) { } else if (List.class.isAssignableFrom(prefix.actual)) { sub = new PSubListShortcut(location, locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual), index); } else { - throw createError( - new IllegalArgumentException("Illegal array access on type [" + PainlessLookup.ClassToName(prefix.actual) + "].")); + throw createError(new IllegalArgumentException("Illegal array access on type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual) + "].")); } sub.write = write; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index f23ae9f188704..445c053347ec3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -19,15 +19,15 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.lookup.PainlessClass; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import java.util.List; import java.util.Objects; @@ -74,7 +74,7 @@ void analyze(Locals locals) { PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(prefix.actual); if (prefix.actual.isPrimitive()) { - struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookup.getBoxedType(prefix.actual)); + struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookupUtility.getBoxedAnyType(prefix.actual)); } PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 78a18b91ab2c6..3f2f887956491 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -19,16 +19,16 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessField; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessClass; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessField; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import java.util.List; import java.util.Map; @@ -64,7 +64,7 @@ void analyze(Locals locals) { prefix = prefix.cast(locals); if (prefix.actual.isArray()) { - sub = new PSubArrayLength(location, PainlessLookup.ClassToName(prefix.actual), value); + sub = new PSubArrayLength(location, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), value); } else if (prefix.actual == def.class) { sub = new PSubDefField(location, value); } else { @@ -86,7 +86,7 @@ void analyze(Locals locals) { new PainlessMethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (getter != null || setter != null) { - sub = new PSubShortcut(location, value, PainlessLookup.ClassToName(prefix.actual), getter, setter); + sub = new PSubShortcut(location, value, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), getter, setter); } else { EConstant index = new EConstant(location, value); index.analyze(locals); @@ -104,7 +104,7 @@ void analyze(Locals locals) { if (sub == null) { throw createError(new IllegalArgumentException( - "Unknown field [" + value + "] for type [" + PainlessLookup.ClassToName(prefix.actual) + "].")); + "Unknown field [" + value + "] for type [" + PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual) + "].")); } if (nullSafe) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java index 66ad0ecff1b89..237efa61ffa7d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubCallInvoke.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; import java.util.List; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java index ccbc25db4f25e..afad497dec7d9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefArray.java @@ -20,11 +20,11 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java index a9021000e2dad..7f4e253b4090d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefCall.java @@ -20,11 +20,11 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; import java.util.ArrayList; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java index 1c081c9422ecb..bf00d8d859988 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubDefField.java @@ -20,11 +20,11 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.def; import java.util.Objects; import java.util.Set; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index d6c367cfeabec..8eb154e745bf7 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessField; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import java.lang.reflect.Modifier; import java.util.Objects; @@ -51,8 +51,8 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { if (write && Modifier.isFinal(field.modifiers)) { - throw createError(new IllegalArgumentException( - "Cannot write to read-only field [" + field.name + "] for type [" + PainlessLookup.ClassToName(field.clazz) + "].")); + throw createError(new IllegalArgumentException("Cannot write to read-only field [" + field.name + "] for type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(field.clazz) + "].")); } actual = field.clazz; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 5d881b30db22d..0a3ab142ddc7c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index 4875d55cbeb58..f71e2ac5d1fa0 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -19,12 +19,12 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java index 4b2910dbc010e..eb5668c554c20 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubShortcut.java @@ -19,11 +19,11 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessMethod; import java.util.Set; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java index c402d8982d89e..de1a7062a24f2 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SEach.java @@ -19,13 +19,13 @@ package org.elasticsearch.painless.node; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.def; import java.util.Objects; import java.util.Set; @@ -84,8 +84,8 @@ void analyze(Locals locals) { } else if (expression.actual == def.class || Iterable.class.isAssignableFrom(expression.actual)) { sub = new SSubEachIterable(location, variable, expression, block); } else { - throw createError( - new IllegalArgumentException("Illegal for each type [" + PainlessLookup.ClassToName(expression.actual) + "].")); + throw createError(new IllegalArgumentException("Illegal for each type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expression.actual) + "].")); } sub.analyze(locals); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java index 628bb1d32d59e..1c801d509b581 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SFunction.java @@ -22,8 +22,6 @@ import org.elasticsearch.painless.CompilerSettings; import org.elasticsearch.painless.Constant; import org.elasticsearch.painless.Def; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Parameter; @@ -31,6 +29,9 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; +import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.node.SSource.Reserved; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.Handle; @@ -135,7 +136,7 @@ void generateSignature(PainlessLookup painlessLookup) { try { Class paramType = painlessLookup.getJavaClassFromPainlessType(this.paramTypeStrs.get(param)); - paramClasses[param] = PainlessLookup.defClassToObjectClass(paramType); + paramClasses[param] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(paramType); paramTypes.add(paramType); parameters.add(new Parameter(location, paramNameStrs.get(param), paramType)); } catch (IllegalArgumentException exception) { @@ -144,8 +145,8 @@ void generateSignature(PainlessLookup painlessLookup) { } } - org.objectweb.asm.commons.Method method = new org.objectweb.asm.commons.Method( - name, MethodType.methodType(PainlessLookup.defClassToObjectClass(rtnType), paramClasses).toMethodDescriptorString()); + org.objectweb.asm.commons.Method method = new org.objectweb.asm.commons.Method(name, MethodType.methodType( + PainlessLookupUtility.painlessDefTypeToJavaObjectType(rtnType), paramClasses).toMethodDescriptorString()); this.method = new PainlessMethod(name, null, null, rtnType, paramTypes, method, Modifier.STATIC | Modifier.PRIVATE, null); } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java index 5db161b8002a4..fea8c8953b67f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachArray.java @@ -20,13 +20,13 @@ package org.elasticsearch.painless.node; import org.elasticsearch.painless.AnalyzerCaster; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -109,6 +109,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(PainlessLookup.ClassToName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(variable.clazz), variable.name, expression, block); } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index cfc87536b6b7b..cec1297a4c41c 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -21,16 +21,16 @@ import org.elasticsearch.painless.AnalyzerCaster; import org.elasticsearch.painless.DefBootstrap; -import org.elasticsearch.painless.lookup.PainlessLookup; -import org.elasticsearch.painless.lookup.PainlessCast; -import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; -import org.elasticsearch.painless.lookup.def; import org.elasticsearch.painless.Globals; import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Locals.Variable; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessCast; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.elasticsearch.painless.lookup.PainlessMethod; +import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -81,8 +81,8 @@ void analyze(Locals locals) { getPainlessStructFromJavaClass(expression.actual).methods.get(new PainlessMethodKey("iterator", 0)); if (method == null) { - throw createError(new IllegalArgumentException( - "Unable to create iterator for the type [" + PainlessLookup.ClassToName(expression.actual) + "].")); + throw createError(new IllegalArgumentException("Unable to create iterator for the type " + + "[" + PainlessLookupUtility.anyTypeToPainlessTypeName(expression.actual) + "].")); } } @@ -133,6 +133,6 @@ void write(MethodWriter writer, Globals globals) { @Override public String toString() { - return singleLineToString(PainlessLookup.ClassToName(variable.clazz), variable.name, expression, block); + return singleLineToString(PainlessLookupUtility.anyTypeToPainlessTypeName(variable.clazz), variable.name, expression, block); } } From 97fbe49ee9ffc42804dde871dfd167da0193469e Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 17 Jul 2018 06:50:21 +0200 Subject: [PATCH 064/107] Add Index UUID to `/_stats` Response (#31871) * Add "uuid" field to each index's section in the `/_stats` response * closes #31791 --- .../rest-api-spec/test/indices.stats/10_index.yml | 2 ++ .../action/admin/indices/stats/IndexStats.java | 9 ++++++++- .../admin/indices/stats/IndicesStatsResponse.java | 14 +++++++++----- .../admin/indices/stats/IndicesStatsTests.java | 8 +++++++- .../indices/IndicesStatsMonitoringDocTests.java | 2 +- 5 files changed, 27 insertions(+), 8 deletions(-) diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml index b70fac8cf0480..42847b05cd149 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml @@ -47,7 +47,9 @@ setup: - match: { _shards.total: 18 } - is_true: _all - is_true: indices.test1 + - is_true: indices.test1.uuid - is_true: indices.test2 + - is_true: indices.test2.uuid --- diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java index 7903a340628d2..d8480519e5def 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndexStats.java @@ -29,10 +29,13 @@ public class IndexStats implements Iterable { private final String index; + private final String uuid; + private final ShardStats shards[]; - public IndexStats(String index, ShardStats[] shards) { + public IndexStats(String index, String uuid, ShardStats[] shards) { this.index = index; + this.uuid = uuid; this.shards = shards; } @@ -40,6 +43,10 @@ public String getIndex() { return this.index; } + public String getUuid() { + return uuid; + } + public ShardStats[] getShards() { return this.shards; } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java index 7406dc4f2d12c..cc563948160da 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsResponse.java @@ -26,6 +26,7 @@ import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.index.Index; import java.io.IOException; import java.util.ArrayList; @@ -84,19 +85,22 @@ public Map getIndices() { } Map indicesStats = new HashMap<>(); - Set indices = new HashSet<>(); + Set indices = new HashSet<>(); for (ShardStats shard : shards) { - indices.add(shard.getShardRouting().getIndexName()); + indices.add(shard.getShardRouting().index()); } - for (String indexName : indices) { + for (Index index : indices) { List shards = new ArrayList<>(); + String indexName = index.getName(); for (ShardStats shard : this.shards) { if (shard.getShardRouting().getIndexName().equals(indexName)) { shards.add(shard); } } - indicesStats.put(indexName, new IndexStats(indexName, shards.toArray(new ShardStats[shards.size()]))); + indicesStats.put( + indexName, new IndexStats(indexName, index.getUUID(), shards.toArray(new ShardStats[shards.size()])) + ); } this.indicesStats = indicesStats; return indicesStats; @@ -169,7 +173,7 @@ protected void addCustomXContentFields(XContentBuilder builder, Params params) t builder.startObject(Fields.INDICES); for (IndexStats indexStats : getIndices().values()) { builder.startObject(indexStats.getIndex()); - + builder.field("uuid", indexStats.getUuid()); builder.startObject("primaries"); indexStats.getPrimaries().toXContent(builder, params); builder.endObject(); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java index 26785d2c8706c..264a92137be96 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/stats/IndicesStatsTests.java @@ -153,6 +153,13 @@ public void testRefreshListeners() throws Exception { assertEquals(0, common.refresh.getListeners()); } + @SuppressWarnings("unchecked") + public void testUuidOnRootStatsIndices() { + String uuid = createIndex("test").indexUUID(); + IndicesStatsResponse rsp = client().admin().indices().prepareStats().get(); + assertEquals(uuid, rsp.getIndex("test").getUuid()); + } + /** * Gives access to package private IndicesStatsResponse constructor for test purpose. **/ @@ -160,5 +167,4 @@ public static IndicesStatsResponse newIndicesStatsResponse(ShardStats[] shards, int failedShards, List shardFailures) { return new IndicesStatsResponse(shards, totalShards, successfulShards, failedShards, shardFailures); } - } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java index 86fa89f4c01bb..66b41d40943d0 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/indices/IndicesStatsMonitoringDocTests.java @@ -45,7 +45,7 @@ public class IndicesStatsMonitoringDocTests extends BaseFilteredMonitoringDocTes @Before public void setUp() throws Exception { super.setUp(); - indicesStats = Collections.singletonList(new IndexStats("index-0", new ShardStats[] { + indicesStats = Collections.singletonList(new IndexStats("index-0", "dcvO5uZATE-EhIKc3tk9Bg", new ShardStats[] { // Primaries new ShardStats(mockShardRouting(true), mockShardPath(), mockCommonStats(), null, null), new ShardStats(mockShardRouting(true), mockShardPath(), mockCommonStats(), null, null), From ca2844fa2b21f5a2e3ee7f73c4988a669e3d1218 Mon Sep 17 00:00:00 2001 From: Yogesh Gaikwad <902768+bizybot@users.noreply.github.com> Date: Tue, 17 Jul 2018 15:43:03 +1000 Subject: [PATCH 065/107] [Test] Modify assert statement for ssl handshake (#32072) There have been changes in error messages for `SSLHandshakeException`. This has caused a couple of failures in our tests. This commit modifies test verification to assert on exception type of class `SSLHandshakeException`. There was another issue in Java11 which caused NPE. The bug has now been fixed on Java11 - early access build 22. Bug Ref: https://bugs.java.com/bugdatabase/view_bug.do?bug_id=8206355 Enable the skipped tests due to this bug. Closes #31940 --- .../elasticsearch/client/RestClientBuilderIntegTests.java | 8 +++----- .../smoketest/SmokeTestMonitoringWithSecurityIT.java | 1 - 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java index 30359ea90f666..0d1c3ffd6b8bb 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java @@ -30,6 +30,7 @@ import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; +import javax.net.ssl.SSLHandshakeException; import javax.net.ssl.TrustManagerFactory; import java.io.IOException; import java.io.InputStream; @@ -37,7 +38,7 @@ import java.net.InetSocketAddress; import java.security.KeyStore; -import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThat; import static org.junit.Assert.fail; @@ -72,9 +73,6 @@ public static void stopHttpServers() throws IOException { } public void testBuilderUsesDefaultSSLContext() throws Exception { - assumeFalse("Due to bug inside jdk, this test can't momentarily run with java 11. " + - "See: https://github.com/elastic/elasticsearch/issues/31940", - System.getProperty("java.version").contains("11")); final SSLContext defaultSSLContext = SSLContext.getDefault(); try { try (RestClient client = buildRestClient()) { @@ -82,7 +80,7 @@ public void testBuilderUsesDefaultSSLContext() throws Exception { client.performRequest(new Request("GET", "/")); fail("connection should have been rejected due to SSL handshake"); } catch (Exception e) { - assertThat(e.getMessage(), containsString("General SSLEngine problem")); + assertThat(e, instanceOf(SSLHandshakeException.class)); } } diff --git a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java index 52c2a7dfa2d2d..c427d8bf32c86 100644 --- a/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java +++ b/x-pack/qa/smoke-test-plugins-ssl/src/test/java/org/elasticsearch/smoketest/SmokeTestMonitoringWithSecurityIT.java @@ -127,7 +127,6 @@ private boolean getMonitoringUsageExportersDefined() throws Exception { return monitoringUsage.get().getExporters().isEmpty() == false; } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/31940") public void testHTTPExporterWithSSL() throws Exception { // Ensures that the exporter is actually on assertBusy(() -> assertThat("[_http] exporter is not defined", getMonitoringUsageExportersDefined(), is(true))); From 8bad2c697f195404980b166992e5efdbaccb7203 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Jul 2018 09:04:41 +0200 Subject: [PATCH 066/107] Add exclusion option to `keep_types` token filter (#32012) Currently the `keep_types` token filter includes all token types specified using its `types` parameter. Lucenes TypeTokenFilter also provides a second mode where instead of keeping the specified tokens (include) they are filtered out (exclude). This change exposes this option as a new `mode` parameter that can either take the values `include` (the default, if not specified) or `exclude`. Closes #29277 --- .../keep-types-tokenfilter.asciidoc | 74 ++++++++++++++++++- .../common/KeepTypesFilterFactory.java | 36 +++++++-- .../common/KeepTypesFilterFactoryTests.java | 48 ++++++++++-- 3 files changed, 142 insertions(+), 16 deletions(-) diff --git a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc index afaf4f8fa8c46..05687f8669155 100644 --- a/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/keep-types-tokenfilter.asciidoc @@ -8,8 +8,9 @@ contained in a predefined set. [float] === Options [horizontal] -types:: a list of types to keep - +types:: a list of types to include (default mode) or exclude +mode:: if set to `include` (default) the specified token types will be kept, +if set to `exclude` the specified token types will be removed from the stream [float] === Settings example @@ -53,7 +54,7 @@ POST /keep_types_example/_analyze // CONSOLE // TEST[continued] -And it'd respond: +The response will be: [source,js] -------------------------------------------------- @@ -72,3 +73,70 @@ And it'd respond: // TESTRESPONSE Note how only the `` token is in the output. + +=== Exclude mode settings example + +If the `mode` parameter is set to `exclude` like in the following example: + +[source,js] +-------------------------------------------------- +PUT /keep_types_exclude_example +{ + "settings" : { + "analysis" : { + "analyzer" : { + "my_analyzer" : { + "tokenizer" : "standard", + "filter" : ["standard", "lowercase", "remove_numbers"] + } + }, + "filter" : { + "remove_numbers" : { + "type" : "keep_types", + "mode" : "exclude", + "types" : [ "" ] + } + } + } + } +} +-------------------------------------------------- +// CONSOLE + +And we test it like: + +[source,js] +-------------------------------------------------- +POST /keep_types_exclude_example/_analyze +{ + "analyzer" : "my_analyzer", + "text" : "hello 101 world" +} +-------------------------------------------------- +// CONSOLE +// TEST[continued] + +The response will be: + +[source,js] +-------------------------------------------------- +{ + "tokens": [ + { + "token": "hello", + "start_offset": 0, + "end_offset": 5, + "type": "", + "position": 0 + }, + { + "token": "world", + "start_offset": 10, + "end_offset": 15, + "type": "", + "position": 2 + } + ] +} +-------------------------------------------------- +// TESTRESPONSE \ No newline at end of file diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java index 0f94b521e4b7d..b6b8b45fabfc2 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java @@ -29,21 +29,47 @@ import java.util.HashSet; import java.util.List; +import java.util.Locale; import java.util.Set; /** * A {@link TokenFilterFactory} for {@link TypeTokenFilter}. This filter only * keep tokens that are contained in the set configured via - * {@value #KEEP_TYPES_KEY} setting. + * {@value #KEEP_TYPES_MODE_KEY} setting. *

* Configuration options: *

    - *
  • {@value #KEEP_TYPES_KEY} the array of words / tokens to keep.
  • + *
  • {@value #KEEP_TYPES_KEY} the array of words / tokens.
  • + *
  • {@value #KEEP_TYPES_MODE_KEY} whether to keep ("include") or discard + * ("exclude") the specified token types.
  • *
*/ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory { private final Set keepTypes; - private static final String KEEP_TYPES_KEY = "types"; + private final KeepTypesMode includeMode; + static final String KEEP_TYPES_KEY = "types"; + static final String KEEP_TYPES_MODE_KEY = "mode"; + + enum KeepTypesMode { + INCLUDE, EXCLUDE; + + @Override + public String toString() { + return this.name().toLowerCase(Locale.ROOT); + } + + private static KeepTypesMode fromString(String modeString) { + String lc = modeString.toLowerCase(Locale.ROOT); + if (lc.equals("include")) { + return INCLUDE; + } else if (lc.equals("exclude")) { + return EXCLUDE; + } else { + throw new IllegalArgumentException("`keep_types` tokenfilter mode can only be [" + KeepTypesMode.INCLUDE + "] or [" + + KeepTypesMode.EXCLUDE + "] but was [" + modeString + "]."); + } + } + }; KeepTypesFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { super(indexSettings, name, settings); @@ -52,12 +78,12 @@ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory { if ((arrayKeepTypes == null)) { throw new IllegalArgumentException("keep_types requires `" + KEEP_TYPES_KEY + "` to be configured"); } - + this.includeMode = KeepTypesMode.fromString(settings.get(KEEP_TYPES_MODE_KEY, "include")); this.keepTypes = new HashSet<>(arrayKeepTypes); } @Override public TokenStream create(TokenStream tokenStream) { - return new TypeTokenFilter(tokenStream, keepTypes, true); + return new TypeTokenFilter(tokenStream, keepTypes, includeMode == KeepTypesMode.INCLUDE); } } diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java index a19882d6faa00..d0c7723457ff3 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java @@ -34,19 +34,51 @@ import static org.hamcrest.Matchers.instanceOf; public class KeepTypesFilterFactoryTests extends ESTokenStreamTestCase { - public void testKeepTypes() throws IOException { - Settings settings = Settings.builder() - .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put("index.analysis.filter.keep_numbers.type", "keep_types") - .putList("index.analysis.filter.keep_numbers.types", new String[] {"", ""}) - .build(); + + private static final String BASE_SETTING = "index.analysis.filter.keep_numbers"; + + public void testKeepTypesInclude() throws IOException { + Settings.Builder settingsBuilder = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }); + // either use default mode or set "include" mode explicitly + if (random().nextBoolean()) { + settingsBuilder.put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, + KeepTypesFilterFactory.KeepTypesMode.INCLUDE); + } + Settings settings = settingsBuilder.build(); + ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); + TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers"); + assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class)); + String source = "Hello 123 world"; + String[] expected = new String[] { "123" }; + Tokenizer tokenizer = new StandardTokenizer(); + tokenizer.setReader(new StringReader(source)); + assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[] { 2 }); + } + + public void testKeepTypesExclude() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) + .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, KeepTypesFilterFactory.KeepTypesMode.EXCLUDE).build(); ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin()); TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers"); assertThat(tokenFilter, instanceOf(KeepTypesFilterFactory.class)); String source = "Hello 123 world"; - String[] expected = new String[]{"123"}; + String[] expected = new String[] { "Hello", "world" }; Tokenizer tokenizer = new StandardTokenizer(); tokenizer.setReader(new StringReader(source)); - assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[]{2}); + assertTokenStreamContents(tokenFilter.create(tokenizer), expected, new int[] { 1, 2 }); + } + + public void testKeepTypesException() throws IOException { + Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) + .put(BASE_SETTING + ".type", "keep_types") + .putList(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_KEY, new String[] { "", "" }) + .put(BASE_SETTING + "." + KeepTypesFilterFactory.KEEP_TYPES_MODE_KEY, "bad_parameter").build(); + IllegalArgumentException ex = expectThrows(IllegalArgumentException.class, + () -> AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin())); + assertEquals("`keep_types` tokenfilter mode can only be [include] or [exclude] but was [bad_parameter].", ex.getMessage()); } } From 89bce9331ad8c22722c6c76d4dd789fe9671ef56 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Tue, 17 Jul 2018 09:09:03 +0200 Subject: [PATCH 067/107] Fix put mappings java API documentation (#31955) The current docs of the put-mapping Java API is currently broken. It its current form, it creates an index and uses the whole mapping definition given as a JSON string as the type name. Since we didn't check the index created in the IndicesDocumentationIT so far this went unnoticed. This change adds test to catch this error to the documentation test, changes the documentation so it works correctly now and adds an input validation to PutMappingRequest#buildFromSimplifiedDef() which was used internally to reject calls where no mapping definition is given. Closes #31906 --- .../admin/indices/put-mapping.asciidoc | 11 ++++-- .../mapping/put/PutMappingRequest.java | 11 +++--- .../mapping/put/PutMappingRequestTests.java | 6 +++- .../documentation/IndicesDocumentationIT.java | 35 ++++++++++++++----- 4 files changed, 47 insertions(+), 16 deletions(-) diff --git a/docs/java-api/admin/indices/put-mapping.asciidoc b/docs/java-api/admin/indices/put-mapping.asciidoc index 3e931dfd7b7e7..8bdcb4916976f 100644 --- a/docs/java-api/admin/indices/put-mapping.asciidoc +++ b/docs/java-api/admin/indices/put-mapping.asciidoc @@ -2,17 +2,22 @@ ==== Put Mapping -The PUT mapping API allows you to add a new type while creating an index: +You can add mappings for a new type at index creation time: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- include-tagged::{client-tests}/IndicesDocumentationIT.java[index-with-mapping] -------------------------------------------------- <1> <> called `twitter` -<2> It also adds a `tweet` mapping type. +<2> Add a `tweet` type with a field called `message` that has the datatype `text`. +There are several variants of the above `addMapping` method, some taking an +`XContentBuilder` or a `Map` with the mapping definition as arguments. Make sure +to check the javadocs to pick the simplest one for your use case. -The PUT mapping API also allows to add a new type to an existing index: +The PUT mapping API also allows to specify the mapping of a type after index +creation. In this case you can provide the mapping as a String similar to the +Rest API syntax: ["source","java",subs="attributes,callouts,macros"] -------------------------------------------------- diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index dc201b38c3bee..3429b35073ca2 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -184,10 +184,13 @@ public PutMappingRequest source(Object... source) { } /** - * @param type the mapping type - * @param source consisting of field/properties pairs (e.g. "field1", - * "type=string,store=true"). If the number of arguments is not - * divisible by two an {@link IllegalArgumentException} is thrown + * @param type + * the mapping type + * @param source + * consisting of field/properties pairs (e.g. "field1", + * "type=string,store=true") + * @throws IllegalArgumentException + * if the number of the source arguments is not divisible by two * @return the mappings definition */ public static XContentBuilder buildFromSimplifiedDef(String type, Object... source) { diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index be44d790b4004..86c2b67be9c54 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -76,8 +76,12 @@ public void testValidation() { " concrete index: [[foo/bar]] and indices: [myindex];"); } + /** + * Test that {@link PutMappingRequest#buildFromSimplifiedDef(String, Object...)} + * rejects inputs where the {@code Object...} varargs of field name and properties are not + * paired correctly + */ public void testBuildFromSimplifiedDef() { - // test that method rejects input where input varargs fieldname/properites are not paired correctly IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PutMappingRequest.buildFromSimplifiedDef("type", "only_field")); assertEquals("mapping source must be pairs of fieldnames and properties definition.", e.getMessage()); diff --git a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java index 064702170d5bb..e5df229cd98a9 100644 --- a/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java +++ b/server/src/test/java/org/elasticsearch/client/documentation/IndicesDocumentationIT.java @@ -19,10 +19,19 @@ package org.elasticsearch.client.documentation; +import org.elasticsearch.action.admin.indices.mapping.get.GetMappingsResponse; import org.elasticsearch.client.Client; +import org.elasticsearch.cluster.metadata.MappingMetaData; +import org.elasticsearch.common.collect.ImmutableOpenMap; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESIntegTestCase; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; +import static org.hamcrest.Matchers.instanceOf; + /** * This class is used to generate the Java indices administration documentation. * You need to wrap your code between two tags like: @@ -48,16 +57,14 @@ public void testPutMappingDocumentation() throws Exception { Client client = client(); // tag::index-with-mapping - client.admin().indices().prepareCreate("twitter") // <1> - .addMapping("\"tweet\": {\n" + // <2> - " \"properties\": {\n" + - " \"message\": {\n" + - " \"type\": \"text\"\n" + - " }\n" + - " }\n" + - "}") + client.admin().indices().prepareCreate("twitter") // <1> + .addMapping("tweet", "message", "type=text") // <2> .get(); // end::index-with-mapping + GetMappingsResponse getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); + assertEquals(1, getMappingsResponse.getMappings().size()); + ImmutableOpenMap indexMapping = getMappingsResponse.getMappings().get("twitter"); + assertThat(indexMapping.get("tweet"), instanceOf(MappingMetaData.class)); // we need to delete in order to create a fresh new index with another type client.admin().indices().prepareDelete("twitter").get(); @@ -89,6 +96,11 @@ public void testPutMappingDocumentation() throws Exception { "}", XContentType.JSON) .get(); // end::putMapping-request-source + getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); + assertEquals(1, getMappingsResponse.getMappings().size()); + indexMapping = getMappingsResponse.getMappings().get("twitter"); + assertEquals(singletonMap("properties", singletonMap("name", singletonMap("type", "text"))), + indexMapping.get("user").getSourceAsMap()); // tag::putMapping-request-source-append client.admin().indices().preparePutMapping("twitter") // <1> @@ -102,6 +114,13 @@ public void testPutMappingDocumentation() throws Exception { "}", XContentType.JSON) .get(); // end::putMapping-request-source-append + getMappingsResponse = client.admin().indices().prepareGetMappings("twitter").get(); + assertEquals(1, getMappingsResponse.getMappings().size()); + indexMapping = getMappingsResponse.getMappings().get("twitter"); + Map> expected = new HashMap<>(); + expected.put("name", singletonMap("type", "text")); + expected.put("user_name", singletonMap("type", "text")); + assertEquals(expected, indexMapping.get("user").getSourceAsMap().get("properties")); } } From c2ee07b07c7c3ac3f601a3afdd569fb3f65458e8 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 17 Jul 2018 10:54:10 +0300 Subject: [PATCH 068/107] Enable testing in FIPS140 JVM (#31666) Ensure our tests can run in a FIPS JVM JKS keystores cannot be used in a FIPS JVM as attempting to use one in order to init a KeyManagerFactory or a TrustManagerFactory is not allowed.( JKS keystore algorithms for private key encryption are not FIPS 140 approved) This commit replaces JKS keystores in our tests with the corresponding PEM encoded key and certificates both for key and trust configurations. Whenever it's not possible to refactor the test, i.e. when we are testing that we can load a JKS keystore, etc. we attempt to mute the test when we are running in FIPS 140 JVM. Testing for the JVM is naive and is based on the name of the security provider as we would control the testing infrastrtucture and so this would be reliable enough. Other cases of tests being muted are the ones that involve custom TrustStoreManagers or KeyStoreManagers, null TLS Ciphers and the SAMLAuthneticator class as we cannot sign XML documents in the way we were doing. SAMLAuthenticator tests in a FIPS JVM can be reenabled with precomputed and signed SAML messages at a later stage. IT will be covered in a subsequent PR --- client/rest/build.gradle | 4 + .../client/RestClientBuilderIntegTests.java | 24 ++- client/rest/src/test/resources/test.crt | 24 +++ client/rest/src/test/resources/test.der | Bin 0 -> 1218 bytes .../plugins/InstallPluginCommandTests.java | 6 + .../ingest/common/DateProcessorTests.java | 2 + .../AzureDiscoveryClusterFormationTests.java | 10 ++ .../common/settings/KeyStoreWrapperTests.java | 2 + .../org/elasticsearch/test/ESTestCase.java | 6 + .../xpack/core/ssl/CertParsingUtils.java | 4 +- .../license/LicenseServiceClusterTests.java | 1 + .../license/SelfGeneratedLicenseTests.java | 1 + .../xpack/core/ssl/CertGenUtilsTests.java | 6 + .../xpack/core/ssl/PemUtilsTests.java | 1 + .../ssl/SSLConfigurationReloaderTests.java | 94 +++++++---- .../xpack/core/ssl/SSLServiceTests.java | 92 ++++++----- .../xpack/core/ssl/StoreKeyConfigTests.java | 2 + .../transport/ssl/certs/simple/samba4.crt | 22 +++ .../ssl/certs/simple/testnode-ip-only.pem | 30 ++++ ...tname.cert => testnode-no-subjaltname.crt} | 0 .../certs/simple/testnode-no-subjaltname.pem | 30 ++++ .../ssl/certs/simple/testnode_updated.crt | 49 +++--- .../ssl/certs/simple/testnode_updated.jks | Bin 2333 -> 4003 bytes .../ssl/certs/simple/testnode_updated.p12 | Bin 2628 -> 4300 bytes .../ssl/certs/simple/testnode_updated.pem | 77 ++++++--- .../test/SecuritySettingsSource.java | 27 +++- .../test/SettingsFilterTests.java | 13 +- .../security/PkiRealmBootstrapCheckTests.java | 20 +-- .../esnative/ESNativeMigrateToolTests.java | 16 +- .../tool/CommandLineHttpClientTests.java | 23 ++- .../authc/ldap/GroupsResolverTestCase.java | 4 +- .../security/authc/ldap/LdapTestUtils.java | 2 +- .../LdapUserSearchSessionFactoryTests.java | 9 +- .../authc/pki/PkiAuthenticationTests.java | 75 +++++---- .../authc/saml/SamlAuthenticatorTests.java | 4 +- .../authc/saml/SamlMetadataCommandTests.java | 14 +- .../security/authc/saml/SamlRealmTests.java | 40 +++-- ...ServerTransportFilterIntegrationTests.java | 46 +++--- .../netty4/IPHostnameVerificationTests.java | 40 ++--- ...ecurityNetty4HttpServerTransportTests.java | 42 ++--- .../SecurityNetty4ServerTransportTests.java | 37 +++-- .../netty4/SslHostnameVerificationTests.java | 55 ++++--- .../transport/nio/SSLDriverTests.java | 31 ++-- .../nio/SimpleSecurityNioTransportTests.java | 16 +- .../transport/ssl/EllipticCurveSSLTests.java | 4 +- .../transport/ssl/SslIntegrationTests.java | 11 +- .../transport/ssl/SslMultiPortTests.java | 153 +++++++++--------- .../transport/ssl/SslNullCipherTests.java | 6 + .../xpack/ssl/SSLClientAuthTests.java | 65 ++++---- .../xpack/ssl/SSLReloadIntegTests.java | 103 ++++++------ .../xpack/ssl/SSLTrustRestrictionsTests.java | 1 + .../xpack/security/authc/ldap/support/ad.crt | 23 +++ .../security/authc/ldap/support/smb_ca.crt | 22 +++ .../security/authc/ldap/support/smb_cert.crt | 22 +++ .../webhook/WebhookHttpsIntegrationTests.java | 12 +- .../watcher/common/http/HttpClientTests.java | 100 ++++++------ .../xpack/security/keystore/testnode.crt | 21 +++ .../xpack/security/keystore/testnode.pem | 27 ++++ .../keystore/truststore-testnode-only.crt | 21 +++ .../certs/simple/testnode-no-subjaltname.crt | 20 +++ .../certs/simple/testnode-no-subjaltname.pem | 30 ++++ x-pack/qa/openldap-tests/build.gradle | 1 + ...OpenLdapUserSearchSessionFactoryTests.java | 14 +- .../authc/ldap/SearchGroupsResolverTests.java | 2 +- .../ssl/CertificateGenerateToolTests.java | 6 + .../xpack/core/ssl/CertificateToolTests.java | 6 + .../ADLdapUserSearchSessionFactoryTests.java | 9 +- .../ldap/AbstractActiveDirectoryTestCase.java | 38 +++-- .../ldap/AbstractAdLdapRealmTestCase.java | 76 ++++++--- .../ActiveDirectoryGroupsResolverTests.java | 2 +- .../ActiveDirectorySessionFactoryTests.java | 24 ++- .../authc/ldap/MultipleAdRealmIT.java | 5 +- 72 files changed, 1215 insertions(+), 610 deletions(-) create mode 100644 client/rest/src/test/resources/test.crt create mode 100644 client/rest/src/test/resources/test.der create mode 100644 x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt create mode 100644 x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem rename x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/{testnode-no-subjaltname.cert => testnode-no-subjaltname.crt} (100%) create mode 100644 x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt create mode 100644 x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt create mode 100644 x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem diff --git a/client/rest/build.gradle b/client/rest/build.gradle index b1ed05a834213..fc2ab0bc4c05d 100644 --- a/client/rest/build.gradle +++ b/client/rest/build.gradle @@ -59,6 +59,10 @@ forbiddenApisMain { PrecommitTasks.getResource('/forbidden/http-signatures.txt')] } +forbiddenPatterns { + exclude '**/*.der' +} + forbiddenApisTest { //we are using jdk-internal instead of jdk-non-portable to allow for com.sun.net.httpserver.* usage bundledSignatures -= 'jdk-non-portable' diff --git a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java index 0d1c3ffd6b8bb..49eefc527baf1 100644 --- a/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java +++ b/client/rest/src/test/java/org/elasticsearch/client/RestClientBuilderIntegTests.java @@ -36,7 +36,13 @@ import java.io.InputStream; import java.net.InetAddress; import java.net.InetSocketAddress; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.KeyFactory; import java.security.KeyStore; +import java.security.cert.Certificate; +import java.security.cert.CertificateFactory; +import java.security.spec.PKCS8EncodedKeySpec; import static org.hamcrest.Matchers.instanceOf; import static org.junit.Assert.assertEquals; @@ -101,12 +107,20 @@ private RestClient buildRestClient() { private static SSLContext getSslContext() throws Exception { SSLContext sslContext = SSLContext.getInstance("TLS"); - try (InputStream in = RestClientBuilderIntegTests.class.getResourceAsStream("/testks.jks")) { - KeyStore keyStore = KeyStore.getInstance("JKS"); - keyStore.load(in, "password".toCharArray()); - KeyManagerFactory kmf = KeyManagerFactory.getInstance("SunX509"); + try (InputStream certFile = RestClientBuilderIntegTests.class.getResourceAsStream("/test.crt")) { + // Build a keystore of default type programmatically since we can't use JKS keystores to + // init a KeyManagerFactory in FIPS 140 JVMs. + KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); + keyStore.load(null, "password".toCharArray()); + CertificateFactory certFactory = CertificateFactory.getInstance("X.509"); + PKCS8EncodedKeySpec privateKeySpec = new PKCS8EncodedKeySpec(Files.readAllBytes(Paths.get(RestClientBuilderIntegTests.class + .getResource("/test.der").toURI()))); + KeyFactory keyFactory = KeyFactory.getInstance("RSA"); + keyStore.setKeyEntry("mykey", keyFactory.generatePrivate(privateKeySpec), "password".toCharArray(), + new Certificate[]{certFactory.generateCertificate(certFile)}); + KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); kmf.init(keyStore, "password".toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance("SunX509"); + TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); tmf.init(keyStore); sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), null); } diff --git a/client/rest/src/test/resources/test.crt b/client/rest/src/test/resources/test.crt new file mode 100644 index 0000000000000..ab1b8c2265df0 --- /dev/null +++ b/client/rest/src/test/resources/test.crt @@ -0,0 +1,24 @@ +-----BEGIN CERTIFICATE----- +MIIEATCCAumgAwIBAgIEObhDZDANBgkqhkiG9w0BAQsFADBnMQswCQYDVQQGEwJV +UzELMAkGA1UECBMCQ0ExFjAUBgNVBAcTDU1vdW50YWluIFZpZXcxEDAOBgNVBAoT +B2VsYXN0aWMxDTALBgNVBAsTBHRlc3QxEjAQBgNVBAMTCXRlc3Qgbm9kZTAeFw0x +NzA3MTcxNjEyNTZaFw0yNzA3MTUxNjEyNTZaMGcxCzAJBgNVBAYTAlVTMQswCQYD +VQQIEwJDQTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMHZWxhc3Rp +YzENMAsGA1UECxMEdGVzdDESMBAGA1UEAxMJdGVzdCBub2RlMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEAnXtuGIgAq6vWzUD34HXkYF+0u103hb8d1h35 +kjeuNApkUhS6x/VbuNp7TpWmprfDgG5w9TourHvyiqcQMDEWrBunS6rmKo1jK1Wm +le3qA3F2l9VIZSNeeYQgezmzuElEPPmBjN8XBByIWKYjZcGd5u7DiquPUh9QLIev +itgB2jfi9D8ewyvaSbVAQuQwyIaDN9L74wKyMC8EuzzAWNSDjgIhhwcR5qg17msa +ItyM44/3hik+ObIGpMlLSxQu2V1U9bOaq48JjQBLHVg1vzC9VzGuNdEb8haFnhJN +UrdESdHymbtBSUvy30iB+kHq5R8wQ4pC+WxChQnbA2GskuFrMQIDAQABo4G0MIGx +MIGPBgNVHREEgYcwgYSHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAGCCWxvY2FsaG9z +dIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghdsb2NhbGhvc3Q0 +LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5sb2NhbGRvbWFp +bjYwHQYDVR0OBBYEFFwNcqIKfGBCBGo9faQJ3TsHmp0SMA0GCSqGSIb3DQEBCwUA +A4IBAQBvUJTRjSOf/+vtyS3OokwRilg1ZGF3psg0DWhjH2ehIRfNibU1Y8FVQo3I +VU8LjcIUK1cN85z+AsYqLXo/C4qmJPydQ1tGpQL7uIrPD4h+Xh3tY6A2DKRJRQFO +w2LjswPidGufMztpPbXxLREqvkvn80VkDnc44UPxYfHvZFqYwYyxZccA5mm+BhYu +IerjfvgX+8zMWIQZOd+jRq8EaVTmVK2Azwwhc5ImWfc0DA3pmGPdECzE4N0VVoIJ +N8PCVltXXP3F7K3LoT6CLSiJ3c/IDVNoVS4pRV6R6Y4oIKD9T/T1kAgAvOrUGRWY +ejWQ41GdUmkmxrqCaMbVCO4s72BC +-----END CERTIFICATE----- diff --git a/client/rest/src/test/resources/test.der b/client/rest/src/test/resources/test.der new file mode 100644 index 0000000000000000000000000000000000000000..454bfd286bd97937020cb8d1535c7cb3eec43557 GIT binary patch literal 1218 zcmV;z1U>sOf&{(-0RS)!1_>&LNQUrs4#*Aqyhl|0)hbn0G)eo7>EF? ztJcjx_uzHpU|+PmT{nfl9o8NBk~gk23S?3gy2tffxY~P8m8Pb*!+>sZ^*S!Bd-95> z5HK+otQ)6Gs^%(XizNwq*i zg`N^kQny4&(ejzQK}k#U-$;S_LF(lnFhhz$`D{Xk3EKlajxI^3zY*DMf42|U+NcycGN5Ii(&fd7Vo1LaD{Jj zk^&xUtM1`nB%&;_e1ejavA5F)$JNV-kx=7#3=1 zNR_IxrXGQvev4;>7K$A&AgJXrpFZ9tBu-XU3Uf%X^y(KqMsL0Z-C#fGDTJnM2BQ?+ zKE>d@a-CM0qU0BWzEYKKBGDEJ^dka+fdITy>*NbhtzpqD5?=qK)<@mW-;493$$ORU z8{^b}t5lA~o?5U70h1-GV?nHhZXY@>$0xT2_4l9h5S-b#TRao!cqSaPrDKxeqJOjo zml{5yQNEYHnnJMOUHUBmo4Q=cM1qpnio#8Uxx~|?($N?p><9S#EFe*7KU@iKo|^)J zfdGPA6Co{I{$ZaM#M7<2E=U`i@W9cZVz3Rl(n@%p zC!K#o3d*d6FqrbI!Bq4gl1aW28R!`EhPQJRp7x7Sbl81yUf$@cT zR|0{70Hk{CRiE_&-^6Tw?fk}J`Eyu temp) { System.setProperty("java.io.tmpdir", temp.apply("tmpdir").toString()); } + @BeforeClass + public static void testIfFipsMode() { + assumeFalse("Can't run in a FIPS JVM because this depends on BouncyCastle (non-fips)", inFipsJvm()); + } + @Override @Before public void setUp() throws Exception { diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java index 43a5f9245b185..23aac797859e7 100644 --- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java +++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateProcessorTests.java @@ -114,6 +114,8 @@ public void testInvalidJodaPattern() { } public void testJodaPatternLocale() { + //TODO investigate if this is a bug in Joda + assumeFalse("Can't run in a FIPS JVM, Joda parse date error", inFipsJvm()); DateProcessor dateProcessor = new DateProcessor(randomAlphaOfLength(10), templatize(ZoneId.of("Europe/Amsterdam")), templatize(Locale.ITALIAN), "date_as_string", Collections.singletonList("yyyy dd MMM"), "date_as_date"); diff --git a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java index 1b8ca38aec442..c5a5f1df98ee2 100644 --- a/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java +++ b/plugins/discovery-azure-classic/src/test/java/org/elasticsearch/discovery/azure/classic/AzureDiscoveryClusterFormationTests.java @@ -40,6 +40,8 @@ import org.elasticsearch.transport.TcpTransport; import org.junit.AfterClass; import org.junit.BeforeClass; +import org.junit.ClassRule; +import org.junit.rules.ExternalResource; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.SSLContext; @@ -87,6 +89,14 @@ protected Collection> nodePlugins() { private static Path keyStoreFile; + @ClassRule + public static final ExternalResource MUTE_IN_FIPS_JVM = new ExternalResource() { + @Override + protected void before() { + assumeFalse("Can't run in a FIPS JVM because none of the supported Keystore types can be used", inFipsJvm()); + } + }; + @BeforeClass public static void setupKeyStore() throws IOException { Path tempDir = createTempDir(); diff --git a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java index 849841943ecc6..fe7b02d63ecce 100644 --- a/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java +++ b/server/src/test/java/org/elasticsearch/common/settings/KeyStoreWrapperTests.java @@ -290,6 +290,7 @@ public void testIllegalSettingName() throws Exception { } public void testBackcompatV1() throws Exception { + assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm()); Path configDir = env.configFile(); SimpleFSDirectory directory = new SimpleFSDirectory(configDir); try (IndexOutput output = directory.createOutput("elasticsearch.keystore", IOContext.DEFAULT)) { @@ -320,6 +321,7 @@ public void testBackcompatV1() throws Exception { } public void testBackcompatV2() throws Exception { + assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm()); Path configDir = env.configFile(); SimpleFSDirectory directory = new SimpleFSDirectory(configDir); byte[] fileBytes = new byte[20]; diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java index 9cdfc6776f883..4c00527a93267 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java @@ -125,6 +125,7 @@ import java.nio.file.Files; import java.nio.file.Path; import java.time.ZoneId; +import java.security.Security; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; @@ -132,6 +133,7 @@ import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; +import java.util.Locale; import java.util.Map; import java.util.Objects; import java.util.Random; @@ -1363,4 +1365,8 @@ public TestAnalysis(IndexAnalyzers indexAnalyzers, } } + public static boolean inFipsJvm() { + return Security.getProviders()[0].getName().toLowerCase(Locale.ROOT).contains("fips"); + } + } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java index 8f66af14dfcad..1617a92b550ac 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ssl/CertParsingUtils.java @@ -158,7 +158,7 @@ public static X509ExtendedKeyManager keyManager(Certificate[] certificateChain, private static KeyStore getKeyStore(Certificate[] certificateChain, PrivateKey privateKey, char[] password) throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException { - KeyStore keyStore = KeyStore.getInstance("jks"); + KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); keyStore.load(null, null); // password must be non-null for keystore... keyStore.setKeyEntry("key", privateKey, password, certificateChain); @@ -242,7 +242,7 @@ public static X509ExtendedTrustManager trustManager(Certificate[] certificates) static KeyStore trustStore(Certificate[] certificates) throws KeyStoreException, IOException, NoSuchAlgorithmException, CertificateException { assert certificates != null : "Cannot create trust store with null certificates"; - KeyStore store = KeyStore.getInstance("jks"); + KeyStore store = KeyStore.getInstance(KeyStore.getDefaultType()); store.load(null, null); int counter = 0; for (Certificate certificate : certificates) { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java index a58491dea3bc4..00d1c47cdedaa 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/LicenseServiceClusterTests.java @@ -153,6 +153,7 @@ public void testClusterRestartWhileExpired() throws Exception { } public void testClusterRestartWithOldSignature() throws Exception { + assumeFalse("Can't run in a FIPS JVM. We can't generate old licenses since PBEWithSHA1AndDESede is not available", inFipsJvm()); wipeAllLicenses(); internalCluster().startNode(); ensureGreen(); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/SelfGeneratedLicenseTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/SelfGeneratedLicenseTests.java index 4e061623ccd94..0b61604ed7cc9 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/SelfGeneratedLicenseTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/SelfGeneratedLicenseTests.java @@ -76,6 +76,7 @@ public void testFrom1x() throws Exception { } public void testTrialLicenseVerifyWithOlderVersion() throws Exception { + assumeFalse("Can't run in a FIPS JVM. We can't generate old licenses since PBEWithSHA1AndDESede is not available", inFipsJvm()); long issueDate = System.currentTimeMillis(); License.Builder specBuilder = License.builder() .issuedTo("customer") diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java index cf6ab53b9f57b..20259144b420b 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/CertGenUtilsTests.java @@ -12,6 +12,7 @@ import org.elasticsearch.common.network.InetAddresses; import org.elasticsearch.common.network.NetworkAddress; import org.elasticsearch.test.ESTestCase; +import org.junit.BeforeClass; import java.math.BigInteger; import java.net.InetAddress; @@ -32,6 +33,11 @@ */ public class CertGenUtilsTests extends ESTestCase { + @BeforeClass + public static void muteInFips(){ + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); + } + public void testSerialNotRepeated() { int iterations = scaledRandomIntBetween(10, 100); List list = new ArrayList<>(iterations); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java index 880cb03a64fc5..b82275a883311 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/PemUtilsTests.java @@ -53,6 +53,7 @@ public void testReadPKCS8EcKey() throws Exception { } public void testReadEncryptedPKCS8Key() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PBE KeySpec is not available", inFipsJvm()); Key key = getKeyFromKeystore("RSA"); assertThat(key, notNullValue()); assertThat(key, instanceOf(PrivateKey.class)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java index 72cd13471df1f..d8e0b693f7008 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLConfigurationReloaderTests.java @@ -42,7 +42,10 @@ import java.security.PrivilegedActionException; import java.security.PrivilegedExceptionAction; import java.security.UnrecoverableKeyException; +import java.security.cert.Certificate; import java.security.cert.CertificateException; +import java.util.Collections; +import java.util.List; import java.util.concurrent.CountDownLatch; import java.util.function.Consumer; @@ -76,6 +79,7 @@ public void cleanup() throws Exception { * Tests reloading a keystore that is used in the KeyManager of SSLContext */ public void testReloadingKeyStore() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); final Path tempDir = createTempDir(); final Path keystorePath = tempDir.resolve("testnode.jks"); final Path updatedKeystorePath = tempDir.resolve("testnode_updated.jks"); @@ -133,12 +137,10 @@ public void testPEMKeyConfigReloading() throws Exception { Path updatedKeyPath = tempDir.resolve("testnode_updated.pem"); Path certPath = tempDir.resolve("testnode.crt"); Path updatedCertPath = tempDir.resolve("testnode_updated.crt"); - final Path clientTruststorePath = tempDir.resolve("testnode.jks"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"), keyPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), updatedKeyPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), updatedCertPath); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"), certPath); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), clientTruststorePath); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); final Settings settings = Settings.builder() @@ -150,7 +152,7 @@ public void testPEMKeyConfigReloading() throws Exception { final Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); // Load HTTPClient once. Client uses a keystore containing testnode key/cert as a truststore - try (CloseableHttpClient client = getSSLClient(clientTruststorePath, "testnode")) { + try (CloseableHttpClient client = getSSLClient(Collections.singletonList(certPath))) { final Consumer keyMaterialPreChecks = (context) -> { try (MockWebServer server = new MockWebServer(context, false)) { server.enqueue(new MockResponse().setResponseCode(200).setBody("body")); @@ -190,6 +192,7 @@ public void testPEMKeyConfigReloading() throws Exception { * reloadable SSLContext used in the HTTPClient) and as a KeyStore for the MockWebServer */ public void testReloadingTrustStore() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path tempDir = createTempDir(); Path trustStorePath = tempDir.resolve("testnode.jks"); Path updatedTruststorePath = tempDir.resolve("testnode_updated.jks"); @@ -240,19 +243,21 @@ public void testReloadingTrustStore() throws Exception { */ public void testReloadingPEMTrustConfig() throws Exception { Path tempDir = createTempDir(); - Path clientCertPath = tempDir.resolve("testnode.crt"); - Path keyStorePath = tempDir.resolve("testnode.jks"); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), keyStorePath); + Path serverCertPath = tempDir.resolve("testnode.crt"); + Path serverKeyPath = tempDir.resolve("testnode.pem"); + Path updatedCert = tempDir.resolve("updated.crt"); //Our keystore contains two Certificates it can present. One build from the RSA keypair and one build from the EC keypair. EC is // used since it keyManager presents the first one in alias alphabetical order (and testnode_ec comes before testnode_rsa) - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_ec.crt"), clientCertPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"), serverCertPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"), serverKeyPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), updatedCert); Settings settings = Settings.builder() - .putList("xpack.ssl.certificate_authorities", clientCertPath.toString()) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.certificate_authorities", serverCertPath) + .put("path.home", createTempDir()) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); // Create the MockWebServer once for both pre and post checks - try (MockWebServer server = getSslServer(keyStorePath, "testnode")) { + try (MockWebServer server = getSslServer(serverKeyPath, serverCertPath, "testnode")) { final Consumer trustMaterialPreChecks = (context) -> { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { privilegedConnect(() -> client.execute(new HttpGet("https://localhost:" + server.getPort())).close()); @@ -263,10 +268,7 @@ public void testReloadingPEMTrustConfig() throws Exception { final Runnable modifier = () -> { try { - Path updatedCert = tempDir.resolve("updated.crt"); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), - updatedCert, StandardCopyOption.REPLACE_EXISTING); - atomicMoveIfPossible(updatedCert, clientCertPath); + atomicMoveIfPossible(updatedCert, serverCertPath); } catch (Exception e) { throw new RuntimeException("failed to modify file", e); } @@ -277,7 +279,7 @@ public void testReloadingPEMTrustConfig() throws Exception { try (CloseableHttpClient client = HttpClients.custom().setSSLContext(updatedContext).build()) { SSLHandshakeException sslException = expectThrows(SSLHandshakeException.class, () -> privilegedConnect(() -> client.execute(new HttpGet("https://localhost:" + server.getPort())).close())); - assertThat(sslException.getCause().getMessage(), containsString("PKIX path building failed")); + assertThat(sslException.getCause().getMessage(), containsString("PKIX path validation failed")); } catch (Exception e) { throw new RuntimeException("Error closing CloseableHttpClient", e); } @@ -291,16 +293,17 @@ public void testReloadingPEMTrustConfig() throws Exception { * that is being monitored */ public void testReloadingKeyStoreException() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path tempDir = createTempDir(); Path keystorePath = tempDir.resolve("testnode.jks"); Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"), keystorePath); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", keystorePath) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.keystore.path", keystorePath) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); @@ -336,12 +339,12 @@ public void testReloadingPEMKeyConfigException() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.key", keyPath) - .put("xpack.ssl.certificate", certPath) - .putList("xpack.ssl.certificate_authorities", certPath.toString(), clientCertPath.toString()) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .putList("xpack.ssl.certificate_authorities", certPath.toString(), clientCertPath.toString()) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); @@ -373,10 +376,10 @@ public void testTrustStoreReloadException() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.truststore.path", trustStorePath) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.truststore.path", trustStorePath) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); Environment env = randomBoolean() ? null : TestEnvironment.newEnvironment(settings); final SSLService sslService = new SSLService(settings, env); final SSLConfiguration config = sslService.getSSLConfiguration("xpack.ssl"); @@ -482,6 +485,20 @@ private static MockWebServer getSslServer(Path keyStorePath, String keyStorePass return server; } + private static MockWebServer getSslServer(Path keyPath, Path certPath, String password) throws KeyStoreException, CertificateException, + NoSuchAlgorithmException, IOException, KeyManagementException, UnrecoverableKeyException { + KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); + keyStore.load(null, password.toCharArray()); + keyStore.setKeyEntry("testnode_ec", PemUtils.readPrivateKey(keyPath, password::toCharArray), password.toCharArray(), + CertParsingUtils.readCertificates(Collections.singletonList(certPath))); + final SSLContext sslContext = new SSLContextBuilder().loadKeyMaterial(keyStore, password.toCharArray()) + .build(); + MockWebServer server = new MockWebServer(sslContext, false); + server.enqueue(new MockResponse().setResponseCode(200).setBody("body")); + server.start(); + return server; + } + private static CloseableHttpClient getSSLClient(Path trustStorePath, String trustStorePass) throws KeyStoreException, NoSuchAlgorithmException, KeyManagementException, IOException, CertificateException { @@ -493,6 +510,23 @@ private static CloseableHttpClient getSSLClient(Path trustStorePath, String trus return HttpClients.custom().setSSLContext(sslContext).build(); } + /** + * Creates a {@link CloseableHttpClient} that only trusts the given certificate(s) + * + * @param trustedCertificatePaths The certificates this client trusts + **/ + private static CloseableHttpClient getSSLClient(List trustedCertificatePaths) throws KeyStoreException, + NoSuchAlgorithmException, + KeyManagementException, IOException, CertificateException { + KeyStore trustStore = KeyStore.getInstance(KeyStore.getDefaultType()); + trustStore.load(null, null); + for (Certificate cert : CertParsingUtils.readCertificates(trustedCertificatePaths)) { + trustStore.setCertificateEntry(cert.toString(), cert); + } + final SSLContext sslContext = new SSLContextBuilder().loadTrustMaterial(trustStore, null).build(); + return HttpClients.custom().setSSLContext(sslContext).build(); + } + private static void privilegedConnect(CheckedRunnable runnable) throws Exception { try { AccessController.doPrivileged((PrivilegedExceptionAction) () -> { diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java index df764bb3f4772..048ad2e8e3692 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/SSLServiceTests.java @@ -72,6 +72,8 @@ public class SSLServiceTests extends ESTestCase { private Path testnodeStore; private String testnodeStoreType; private Path testclientStore; + private Path testnodeCert; + private Path testnodeKey; private Environment env; @Before @@ -80,17 +82,20 @@ public void setup() throws Exception { if (randomBoolean()) { testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); // The default is to use JKS. Randomly test with explicit and with the default value. - testnodeStoreType = randomBoolean() ? "jks" : null; + testnodeStoreType = "jks"; } else { testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12"); testnodeStoreType = randomBoolean() ? "PKCS12" : null; } logger.info("Using [{}] key/truststore [{}]", testnodeStoreType, testnodeStore); + testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); testclientStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"); env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); } public void testThatCustomTruststoreCanBeSpecified() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path testClientStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); @@ -110,7 +115,7 @@ public void testThatCustomTruststoreCanBeSpecified() throws Exception { .setSecureSettings(secureCustomSettings) .build(); - SSLConfiguration configuration = new SSLConfiguration(customTruststoreSettings, globalConfiguration(sslService)); + SSLConfiguration configuration = new SSLConfiguration(customTruststoreSettings, globalConfiguration(sslService)); SSLEngine sslEngineWithTruststore = sslService.createSSLEngine(configuration, null, -1); assertThat(sslEngineWithTruststore, is(not(nullValue()))); @@ -126,10 +131,10 @@ public void testThatCustomTruststoreCanBeSpecified() throws Exception { public void testThatSslContextCachingWorks() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) .setSecureSettings(secureSettings) .build(); SSLService sslService = new SSLService(settings, env); @@ -145,6 +150,7 @@ public void testThatSslContextCachingWorks() throws Exception { } public void testThatKeyStoreAndKeyCanHaveDifferentPasswords() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path differentPasswordsStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-different-passwords.jks"); MockSecureSettings secureSettings = new MockSecureSettings(); @@ -160,6 +166,7 @@ public void testThatKeyStoreAndKeyCanHaveDifferentPasswords() throws Exception { } public void testIncorrectKeyPasswordThrowsException() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); Path differentPasswordsStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-different-passwords.jks"); try { @@ -180,12 +187,12 @@ public void testIncorrectKeyPasswordThrowsException() throws Exception { public void testThatSSLv3IsNotEnabled() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -214,6 +221,7 @@ public void testThatCreateSSLEngineWithOnlyTruststoreWorks() throws Exception { public void testCreateWithKeystoreIsValidForServer() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); Settings settings = Settings.builder() @@ -227,6 +235,7 @@ public void testCreateWithKeystoreIsValidForServer() throws Exception { } public void testValidForServerWithFallback() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); MockSecureSettings secureSettings = new MockSecureSettings(); secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() @@ -251,6 +260,7 @@ public void testValidForServerWithFallback() throws Exception { } public void testGetVerificationMode() throws Exception { + assumeFalse("Can't run in a FIPS JVM, TrustAllConfig is not a SunJSSE TrustManagers", inFipsJvm()); SSLService sslService = new SSLService(Settings.EMPTY, env); assertThat(globalConfiguration(sslService).verificationMode(), is(XPackSettings.VERIFICATION_MODE_DEFAULT)); @@ -273,7 +283,7 @@ public void testIsSSLClientAuthEnabled() throws Exception { Settings settings = Settings.builder() .put("xpack.ssl.client_authentication", "none") .put("xpack.security.transport.ssl.client_authentication", "optional") - .put("transport.profiles.foo.port", "9400-9410") + .put("transport.profiles.foo.port", "9400-9410") .build(); sslService = new SSLService(settings, env); assertFalse(sslService.isSSLClientAuthEnabled(globalConfiguration(sslService))); @@ -325,13 +335,12 @@ public void testCiphersAndInvalidCiphersWork() throws Exception { ciphers.add("foo"); ciphers.add("bar"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .putList("xpack.ssl.ciphers", ciphers.toArray(new String[ciphers.size()])) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -342,14 +351,14 @@ public void testCiphersAndInvalidCiphersWork() throws Exception { public void testInvalidCiphersOnlyThrowsException() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); - + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .putList("xpack.ssl.cipher_suites", new String[] { "foo", "bar" }) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .putList("xpack.ssl.cipher_suites", new String[]{"foo", "bar"}) + .setSecureSettings(secureSettings) + .build(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> new SSLService(settings, env)); assertThat(e.getMessage(), is("none of the ciphers [foo, bar] are supported by this JVM")); @@ -357,12 +366,12 @@ public void testInvalidCiphersOnlyThrowsException() throws Exception { public void testThatSSLEngineHasCipherSuitesOrderSet() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -372,12 +381,12 @@ public void testThatSSLEngineHasCipherSuitesOrderSet() throws Exception { public void testThatSSLSocketFactoryHasProperCiphersAndProtocols() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration config = globalConfiguration(sslService); final SSLSocketFactory factory = sslService.sslSocketFactory(config); @@ -397,12 +406,12 @@ public void testThatSSLSocketFactoryHasProperCiphersAndProtocols() throws Except public void testThatSSLEngineHasProperCiphersAndProtocols() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testnodeStore) - .put("xpack.ssl.keystore.type", testnodeStoreType) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate", testnodeCert) + .put("xpack.ssl.key", testnodeKey) + .setSecureSettings(secureSettings) + .build(); SSLService sslService = new SSLService(settings, env); SSLConfiguration configuration = globalConfiguration(sslService); SSLEngine engine = sslService.createSSLEngine(configuration, null, -1); @@ -462,8 +471,8 @@ public void testEmptyTrustManager() throws Exception { assertThat(trustManager.getAcceptedIssuers(), emptyArray()); } - public void testGetConfigurationByContextName() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); final SSLContext sslContext = SSLContext.getInstance("TLSv1.2"); sslContext.init(null, null, null); final String[] cipherSuites = sslContext.getSupportedSSLParameters().getCipherSuites(); @@ -525,7 +534,8 @@ public void testGetConfigurationByContextName() throws Exception { assertThat(mon3Config, sameInstance(global)); } - public void testReadCertificateInformation () throws Exception { + public void testReadCertificateInformation() throws Exception { + assumeFalse("Can't run in a FIPS JVM, JKS keystores can't be used", inFipsJvm()); final Path jksPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); final Path p12Path = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.p12"); final Path pemPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/active-directory-ca.crt"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java index 6dd9bb2b46eb0..511fd77811365 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ssl/StoreKeyConfigTests.java @@ -22,10 +22,12 @@ public class StoreKeyConfigTests extends ESTestCase { public void testCreateKeyManagerUsingJKS() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); tryReadPrivateKeyFromKeyStore("jks", ".jks"); } public void testCreateKeyManagerUsingPKCS12() throws Exception { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); tryReadPrivateKeyFromKeyStore("PKCS12", ".p12"); } diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt new file mode 100644 index 0000000000000..59ecbd22e8b23 --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDoDCCAoigAwIBAgIUMVGoHuyNTjTFaoRmqFELz75jzDEwDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTgwMjE1MTc0OTExWhcNMjEwMjE0MTc0OTExWjARMQ8wDQYD +VQQDEwZzYW1iYTQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtGBwa +n+7JN2vweSUsYh4zPmh8RPIE+nEVjK1lx/rADUBY7UVjfTYC+MVKKiezZe7gYCNT +7JNKazPpgVI9e3ZFKw/UxomLqRuuvn5bTh+1tMs3afY5+GGzi7oPmEbBO3ceg0Hi +rNSTDa1rfroZnRYK8uIeSZacQnAW90plITI7rBBt9jq+W9albFbDybfDgNv+yS/C +rzIsofm4rbFC3SMRYfrT6HvwDhjOmmYKZci5x7tsn0T+3tSiR44Bw5/DgiN5kX3m +/kl9qg1eoYWbCUy1dKmQlb4Nb4uNcxrIugLB3zjBkfhMZ0OHoveKh/lJASTWik9k +xQ9rEYbpsRbuXpsHAgMBAAGjgcwwgckwHQYDVR0OBBYEFJOLa7UXKtLPibgKeFh7 +Kq1+rS0/MG8GA1UdIwRoMGaAFGaNmN5mi9jaMW25MEWYgt+5OkDBoTikNjA0MTIw +MAYDVQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBD +QYIUdwsnIxjgSneHNVKT6JNCCsrQ3T0wLAYDVR0RBCUwI4IJbG9jYWxob3N0hwR/ +AAABhxAAAAAAAAAAAAAAAAAAAAABMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQAD +ggEBAEHqT1WHkcF8DuOgyIBx7wKcUVQ5H1qYYlJ1xgMGrKFFZLUzouLcON7oadEu +HLIJ4Z3AKD3bqWpcls5XJ9MTECGR48tou67x9cXqTV7jR3Rh0H/VGwzwhR85vbpu +o8ielOPL8XAQOfnAFESJii5sfCU4ZwLg+3evmGZdKfhU6rqQtLimgG/Gm96vOJne +y0a/TZTWrfAarithkOHHXSSAhEI5SdW5SlZAytF4AmYqFvafwxe1+NyFwfCRy0Xl +H40WgVsq+z84psU+WyORb3THX5rgB4au9nuMXOqFKAtrJSI/uApncYraaqU28rqB +gYd8XrtjhKOLw+6viqAKu8l7/cs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem new file mode 100644 index 0000000000000..63dca9651ed7e --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-192-CBC,D097C279AD03E97C34B81B834539C0BB + +rQkO27X8phcP9ydaFArZy4SFDcoCGFsr2TcR0M6Vn8g0ZT9MIrnj2xHCmeMExlKT +MViBQzmupBs0IW7y/ovpEUBgRd4TnSigKNF2XZ3+C6F4rkziTheWOlaUq7PIqOnW +dTmf/WZDLlgms6hzrc7B447lO2FcNUDB6uXkxS1dAhh9T6DFcq9KuW7KJOWt9Dre +75z6rh5nHboUw2i01VqJK+86aL5V3vNK3bjYcbIHKHrgM/+V+hFUNJ6aEepVf+G0 +jzicgEBJwtzOg4MTSqR2z3niNbz9LLBJkH+uR+tN941+JPFmEwsLtkq3OBH7nraG +Srqz/5YcdA45jdZNme+KqorT8mNX/Hx2BsmQYnw+A0ZAqnSWeMcNZgFaAn8OcVxy +d5VS0KFlQD7DPnZi89cyTRScKhcj451NhwOPWIE7w1UpNPVGRj5FBMAtDVaE2Wkc +wuQ0nSwsC3EOvVDMe/fmv2VcoWceh1V9esA2H0n9NWQApGSqz17PPebwQadPX3Y0 +atrbbXT7fYTD3Ij38bmYZiDOluHiXxFchWxVUReq6nHJD3yo8ch2CKpx9XzjElLv +6leUZhlIlq026QxGNVf+OQLTlLXjF8jGbRC31Y4yiHj1I12P0+dic0W8YvUkC5Pp +w7NHMtYG6VHLGkPbCQxzTMS+JU24j/wMIokZWlmaRNDf2DZZAS0asQ/EOG/I1afc +SXyHDQUhi0/8N7CJWE/i1xLrazFKiIkxucxY+WDqGrk5sZnP+cH0cM7Zja253NTy +2B8LZJX360peCD15grkMt1oRtdMvZxF1rS/3BDPuANC59yg5i4eC+P39wkeMf8hu +o8I2Hp5021mT9AWE1Dxf8gSZIJZxIeEgioRvoY1LOEfghe/OXQHR0SXJq9k5GNiA +z4Tz3cfCT3WrHbReY585o2qtbpWu2OeSdxrv8p9kYi8GkevbR+nLq8NaH5WPRD4E +b9RLUz1NfiK8DOW7tk8+gwsNun3hmil7xubw1uhc6a0OGKb9ijMS0rslYW9zeowu +dkROuVwhUhXHOx0ZGWUGauJifERzICWR8Jx48/8DWD4xW3DkIRt5gh3CvzHcdSbv +4VEFSyndWeyNk2Yc6+sX0H35Rngc7gjedCAn8hUBnUq9srfhGKaZ6bahlUt0xsmK +Y3cxcd1or/2S2hONcN4NE2MfB/RRUVeHxdp34RPeW6L8qH/YZFxqt8dUm19QXr0c +CdgSEmVyKOpCPebGJwPwdJEmbxPS/98AjiqOpt83JLUhMeUGhjawXvLzl0YEBJV9 ++6waTV4Xl94aJszyvDeW/+cKIgeso9SeQSN6fLsXgdAVABCZ5yJ+liw6I84G0f2n +D6e51P0JQAL8v28bBACdoB0Qxr9UTV/X8smGTwWobv/KW1BPdvWETsc7TrtWLZ6F +qiZj7mI0h1oeyrC1h1+1oVuUTpy7JICCBloL4ir56lcSWmNZm1GRfar8UhXA7aNw +klAkS6rYHH4gDxhvwd1k/pN1HlCtbmwLyYC/f11+89RnPr0FFW48qMqmwBls63dp +4aAqneUiEow/evE4fBTLhFrgkvfZnjhd41IpzXfMWB5x9SHjrrS4/rjsHXcHUrAh +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.cert b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt similarity index 100% rename from x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.cert rename to x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem new file mode 100644 index 0000000000000..b0f7a585d7f9b --- /dev/null +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-192-CBC,2F36F79E75ACA7803AF1BC1B70C2360C + +d4/f7dnpPW9DfhGXuGDx7r56BjQ64iNcsQdrC1CTZB363iAvBBeHaoJFaWpQOUmT +WCBdM6YmRuMi6z4sMtm8Vre3i+Mu2Jti0LTDXqH8Y6FMpptxAYoFb9hkM0OvUGDJ +ASLGTa1XKyCzZWOe2IGLtWIS8zkGv/fSXHqDJV8ddq8uLHGZXBxAye5oJUD0654T +DasIllbiTlBEYlEJ10OyIAspS47n4LWVE5ggyGdiyIgh5s3lAMPO3+yenDU3/dzj +YF2zHKnfe0Nx2lUQVJRYknQvFCx0WTGH9gNl40FovoOMHyRjEuOHD9PFNvL/TCh6 +9h0jJPWu/SsIiYaIqR0pDgqWdHXHA5Ea3s2+w0YHbv7DqkGXWZdceLUdZtcXCBJk +P8QL9IWa7VE50SS6wV5uXX9tV5lHzMFsNGkwtGcR1UMU5cXYqckFXgoBqDN0fyWe +V5iEknSJ4Na+MHv75rvRat0kv6upu9i5NSpYTc5jLHdWZWsYMZ/ZMiMoLBP+KAPT +DQ3eyph/84BU3DePaQF3Rsp0ZvPxqQ361Zwc4zC5CKbHur1QX8WAY5XlBMfuBpkf +CKr5wgwF+ZpS7zsfUpMPPe9Y1E8TWnhx/DtCVLEslBpr2u/rMaxPp6ev9/Wry7N+ +UFBOwodipBhlfSvLqjc511L+bXRzqXiINuW0eSKUQv0J/G0Ur894kJJ6feDYlskj +JiZjOgOyyKhB+K9AXmkfRdvWUJeweL8pgDuYSyQdQ0zoUCZALEoYK2cBWzti/wep +QPFD5oz8076aXNHKMHLsRmSFuEZb6IN0PtUNVf958EbrtABNIuoufKlKtJsEnUyK +VHIEUxExEgyECiozKnxvhr7RQ9nTQXhNdgtec6jJblYnla/+OWAfHdxtHhBjp5AX +WvLyUhmgrmLNdKd1KSzcXynBHgh0hi0HJXYx31FilwbxsdhwN1LwN/Do4T4qGkUr +InrQC3ZHRuh0yAOPrwRFEWbW5m/PzVP/xYVgFtVWP7w38ftZbaBy5xPmtswn+PH+ +cIMt1Y9PaAlhLNpW/Vfn503T9M+05vu73vbU1xgu/B1kePOqE/WO0cOZl0KdaMmT +wAQBKuI7qTACH+/8g3Uir1YSChLthH+1Gs6h686EP6ZydwXq9GYXXkNmJNJJsnmU +RDjoT0F4XBKvcQdX3EeQYs3Af2yZWFDC59c1Ews2dqMK7vy2tYITbx2yn30DBDAl +xvjp2izzmAgQJEG/RqCYsUHCCEv7wz3tpsSOkFem9IHZpR2h8Rqy88GH9qYOkgwo ++fKSmIgC4RLQXsHuh7RRuyNc2FaWDgRgSxs5V4f9xOSU/ZbUftYWnwEyCwbu3RJp +CIXQFZhzU2t5l1Eh+x40rwpEJDXBEwmOIUO3x1oOqGZPPEQ674uMal5TRjvdOVGD +h665Fpo5Xu9EQwQZHYddeRl/7yw8F6LCxBLgHlngKRHHGDUHlTscLfYRqNh+x3jT +3S8dfaGzlnwdQEx32gyLAV0/nsFnzh1AknFMT8jesIYF7PLiAi67PNyNwRCc7TFp +jpKvzkDRVP72bivTmCyP5aKR0Q2oIrAw51MMinT6R2VaoR7COjoVbqYsRLwkxu+p +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt index 75aeecd0c3b36..5500e5db4c57f 100644 --- a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt +++ b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt @@ -1,23 +1,34 @@ -----BEGIN CERTIFICATE----- -MIID0zCCArugAwIBAgIJAPqdcmPxQKraMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +MIIF0zCCA7ugAwIBAgIJAJZQBp49qNzmMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp -Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTgwNDMwMTUzODM1WhcNMjIwNDI5MTUzODM1 +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTgwNzEyMTUyMjU0WhcNNDMwMzAzMTUyMjU0 WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV -BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC -AQ8AMIIBCgKCAQEA0wNzfQ9K9WIs9Le6pfiEHdCI97zGZRqOREKb+zFoDllXEEWW -Y5mfXRlTYXbxBiCIV5DDW0yaXlleq62j7/O/6prNUBiYo5sK4Wfj+1UlXLmMikkv -bShm9XzBAXHK20coLJTKOH8QOnFyuVYUvHPCLsoEhQtCHU4zoGaaPmk8w1JU/bfR -+kWmU+x0Ak4rGuRWvMMqg/bu/W/1LmESO5Jsm2CnIyB/22vB08kFn1pO0IIrPQhr -dXqPxddzxc7DuAyyMyzsrLi5msugynEwm1CTBNL9cG45ujNhWzd1rqQe1HF94mEw -RinFe2Ui+vLFpNbh8EesLsy0p18J3QkGQ/0xjQIDAQABo4G/MIG8MAkGA1UdEwQC -MAAwHQYDVR0OBBYEFLUR8gs3uCNdLIwJlkp7SwngpjfhMIGPBgNVHREEgYcwgYSC -CWxvY2FsaG9zdIIVbG9jYWxob3N0LmxvY2FsZG9tYWluggpsb2NhbGhvc3Q0ghds -b2NhbGhvc3Q0LmxvY2FsZG9tYWluNIIKbG9jYWxob3N0NoIXbG9jYWxob3N0Ni5s -b2NhbGRvbWFpbjaHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAEwDQYJKoZIhvcNAQEL -BQADggEBAB73kmak89jgW1q3wIUMfxisokCsk7XuYqPBpc8D06UZBaArCvW2cuLP -5PLI2bR7fSIhgZmlbawa3adOLZ9n9lOJrWUHTh43kKyiSX8EobE0T/MSGVMfTEFu -c92erLS2gSDk4wLahzm5htd1h1KwzJ5j5kdzlLGaQsnxDDjbu9BiX/otEyCl8n69 -AZKOXoYscl2NxBgr2V6C2frthJFfQ9Gxzg5q6V/P3aIUyV0xsC3Ve1wdfXqNeRfO -nrnFbKRDsxJAJ/JtO3GTtqBrFjods0sepKNxFg13r/QLJnYjYW6t7o91JZj2AFOs -1INZnCOAMV3vR/FOwwOT86HDgrwSy2o= +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIICIjANBgkqhkiG9w0BAQEFAAOC +Ag8AMIICCgKCAgEAqLiqAPYBBEBvSJCiQOzV/NPgr0kLQkZGaxQ29PPoKbsKXVp+ +0Uyv9DUitPw+G04h/eOC2wiu2u5sn2rD4Ew09L41qiaViQRY6dHazAkPVq8Chet/ +GWrc+umLJUKLU7MTyC8ozmKjgkyvIuvQ6ndEP31z3JG/j9DsBAX8NWIIJSm9Jaz5 +XUS4fIXwSce141k8zb39+oStyA1qIhv6n59+oNIuuXu1XIJzjQnZCnyAO8/9i7LI +uoL93zu7xNT+ns6Tt7zhweUQEI8FeRdj/k/Of8prbaulFH9oM1g/lnGKLV7E5vh/ +foP1+SRW+MWRjAUA8MExTCtvFhRAb3x6FYzCPX3VERKn8M3m6Rewz/LQ7XG2VzdM +/dw/JSZilAWBZItkY9H1InTeIz9Sw9Z53um7tO5nzq1QQxZijh6n9vzSLoFn6qA5 +SDQl2YycjBE35i/9JBUl0KmVMOfFzpoWLiKzTJMRzNJIeRxJl3MvscbRl8fY0Kco +XQ+w84QMTo+Tn+8Ztfme4uGtHHCTRzrLSo+Hyh8BTm9jJKCaUbMnNW1p2LoxJlq5 +ah+W8QRLaFbWt4xl9TQR0kVnoOyfSGx+3SmTBODa+6Wg038eim8Tw8K+EBvxynWU +sKF1ReL0aOZLi2zqPiQTUtR2y5ue/xjsFWx+tMMGTz3Ci6UoFs9yKqQ++qkCAwEA +AaOBvzCBvDAJBgNVHRMEAjAAMB0GA1UdDgQWBBQLFB6mVsZpiHNMkxOgriN53HZv +ODCBjwYDVR0RBIGHMIGEgglsb2NhbGhvc3SCFWxvY2FsaG9zdC5sb2NhbGRvbWFp +boIKbG9jYWxob3N0NIIXbG9jYWxob3N0NC5sb2NhbGRvbWFpbjSCCmxvY2FsaG9z +dDaCF2xvY2FsaG9zdDYubG9jYWxkb21haW42hwR/AAABhxAAAAAAAAAAAAAAAAAA +AAABMA0GCSqGSIb3DQEBCwUAA4ICAQAQtteI+//qUZ7pNQ5tBuwc24UlSH4dbHFr +79pA0YN/Y7Y/PfU60YeGpPf/QzZWLnRk/8mELWy2Ok7KzhlNjr/nAksiF97LUUnl +6dP15a4ATzheYAtv/47In8ShOrPZ7YG+VAmNWRB8Rj62RuE5khcoL8BpWzCHL8Gx +Kif7lN5SOV06ExGHrBr4Y20QtZsTgkWhAP1UC5EwXxKcchCAIrb60Rx1OShzQrxz +I0RF4mfJ06Jad6qUePkPoUm1lPcFfNvAnJ+hBA210J87NSMFTSsgXT197upnCdhl +9QmKHyk12oVoMtTtf2kc1yTZQ1fnm/V4/PZ7ypyhy+jMsIQSTwEKQr7JYEQoYpdt +yTMHc9L4gPkLTwAbxUx/M1lSuQ4yD3SclBt77CxCM8I9Z76qCVakalb4VhplRbsU +sYs3dgvslFOAJKoOIPeTqm+2L6psxiV9WxA6tvEAk6P/AwjOK6Y4hclnoQDgNI2Q +U+T+ZPi5WRmibs0WxfQv4Vv4MQG/LRT3pwsKWQ76TzgtHKNHgtAKd9Sn0Nylacsh +yhDbvI0jnvwoOlUPUVWerSJLxzazG5mRLi94ZxZMb+7TUUtDBEPtkCquCAA2b4Kh +ykHbmBqhC8Pn9qc9iP0uHuuQETUl/zd7E2ZD3RIj4uYYspovihE5D1Svi5m+3NuS +sCsfHRjmMw== -----END CERTIFICATE----- diff --git a/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks b/x-pack/plugin/core/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks index 5a5596dae48da6686f4fee825d3c76cdaf0c8b2a..0df3fdba60daf866548fb5f6eb9be1de8ae09659 100644 GIT binary patch delta 3666 zcmV-Y4z2N>5~ClG7=L7s8Wr6D011dNf(e8$4h9M<1_1;CDgqG!0R;dAf(dZsxuawg zT;MDy9y7HOX{8g`m{ODd*LxA?dybEj!?yR!Gd6aI>u_UXnJOOZ;Et+(XKrmUfY`~2 z^U|#G5l(HJA$8OABuzcxy>*jTp41PQ$q^EF!zR1%2z3} z2malaxy|9Fuv1F;;nr!jvFV_(W_ipY>sablBHx_Q6AKM+6z>{y`{hKpMtF8DC-gJO zYN$2{C1S{ThJR&dZ68*PGaFF)xsC_b?qi#~nrpyOM}USTW@gzTs7fJA{r@PRJDIT2 z9%6%+U0+8By{3Kp8Y+nZ^^I{~!q{Tk64Y5R=;`RY-JKmnU%-!-WK&NK%F^XpK<{}- zI|8K6AI!2mq2GJ5;J2j`>kbfbL*1Nn00LgYvf`aoP=7Ed+nOt3!sm!)MqEq1%V0y9 z=z*Jd;0Rew74tgx`An@9b~fF!-D6ijllDb;byrW>{l=g+q8g4tX9YD4QPB!L;~B7R z3v{0V*Z)|Juh)#`CO@Wo_T09%QaP_?XT&?iG{Lpx%G3aoHjW>$pQg^e_@e|-IBt(B zwfbrew|^B4q@5EX4*z@d!ART}cbPiYHQ;{2oa#8+5uzuI{oE`L%SIE38%!X0H~-*H>tA3)c^07K{gc1oBwTWWuX%rt z(S_}ar<{6bG~+VzYnsy+e0$GNYIQ&&_r9AkFs zvc$ost=mEggr-2lz#iIZp@_dB!2WqNV}B0C5^C~p-L9I4C)=|S&i;bG!sU|cK_ZQR zB#6lu$gl3aJ)O}=0B}0ZT3UT&Ds#2acKRO%f#VIeD>aE@M)8MbIyk<2+N4CBLBV-o zGvqZaAPtcdT_7^ncY^;|89BmD8)UAQCa|vv$EO`Ly?WOg98^)yH0AGh(EtvcuYb3y ztQHArF9H@{j)x&cCKeD*6sXV}w}J`#^Vi^Rq`I$v_}*zMuH4IpoT*B1Y$rN9?|2b3Mfeyx_*Ks#eK1(enM zOo(H~$mp=Y!FnEuw0Cglet)lx)4%?3kg^`ReTs(1dPfC0n$QkQi7K0$40T;uL_jtP zoL&c{t)-{N5h2bwY&g6wkZeUEjK7bOtO><8fTl%UTHTn|N)C8cq!Hg_k zYZE;s87kPvm7S!28UO|D$3Rsl&H6K%`GSsB-LQ&fC0<<+M(ESAz~OEzvld|en`v7a zmSAx}i!fgacnQb=e}4iiPdyD?E$-V3ax$`IJ_$ne_1Rddj(~k_pu}f`+flOYP#i=n zIWc3vi|Ku`IOE{$yOyCtiThph(-5|zrG>lTC_NlMkJi|m=!wK*=U3vFdse>M6l8)T z+x-_E4~^^BYiH2|zu%mxW|?XS)}%aKZcT8K-CJjpP)ERF4u5IzZXeq4Bwja`GN;K4 z_W-e*SE#jZak_UoOfB6JEG{R%MXoraZOY})tAq!_UwQ zTkYT7%QPB6(aen}+|#NTl%_31+&K8&EgY_@LtM|mA5<~6zvcAY84Tzti_80005~}) zFL?La-5fFC|5{iCfvVf3t6ib8l} zmQ=VVe8`cL=?ZxBLWb`#2fR(~5>=d}g0c-_0A{ZSTufH(%c26YAh_$@9;bp;r)@&0EPLNv9~kKk!< z`n{_RuEn$b_rwH9S?&`u07acEk@T)iF2)UTXqrZeVdY|w*1H!2gsum@30$*qN5?eG zn_$%^<*K2#cikKI;rdxp9F`9rm16Ft_F{#5Mt_~DdoS7`=1$0t(Fw>drhdAO#+^D8 zDARnVyMQ0572y~G@oAd#{)fBs_=XwJ{Ean>4Z`jaO-Sc%fp%11e?*_8L}s*^suSXl zX5tS<5(g1w7GdJaPBd!HFG7$N?vW1DP(d&J(SqUDtqo@a$m%_7O_trfkyR$aZ_~A9 zsY`bmh%d|On3u?6Y&q|Parc9$w06M?1zZU4Jk2dP#U3&n{4xLl00966SS~d%IRF3! z*D!(w(=dVqyPyLC0RjRE0G3b&o;|4C=95YVU>P?tGBGtWGBq?>7Y#HsFf%YSF*PzW zH8fh2Sp`dff&wBi4F(A+hDe6@4FLfG1potr0uKN%f&vNxf&u{msJN;C_5lPyZ%B}$ zKCxKE2@h7U0)^{;8EV}6>5C;oi&L|I6UZ+p&SIm2Os^vA(CT+YKYerD zk-v}7>;wh;HDU-QDZM4E`CUY~e1-5y$F<{GJk7oR`h=~>4Qe7A`k$YEpwce6d$nAG zbBzhv3VeV&&;5(C$hw04-#fd+)c&5%lefI#!Q~JTj|F)bWByOhf68lZtECiwXfs$p zmT`)IEndXt_@+&kB7=10ZwmYB%qp6vnMreY1q0kCR({_AC~b1 zOK4Wsw~S@=G!fE8XQ1q#NNj%HDU$@?+WV!T(|;a{Zxh49z7QMn%5{{mp>;*#^l0WJ zON(skJ|q)T)OO38p8pu^6>NUA!v;@1!i%LS7SD1jq(1tolP(7#6blp{rdGyjh;vMn z6QHgmdE9nyIFn-sqkjSc01&pVK{ZtE)Fkcd!au9$bw))W=bvY<=LcDV$L`C9f$W|wa}kCH6sO0DQuA!{_#=J&64-9_x@1H6{NydlP0u-4Y|><`}Y?FNzU44^*#< knZDfHk{hrq9~~IxGnDbwQWQ%u!*BHQXGMxm(5Wer1$FAwC{R>yf*C`5JzgPUBD~`nWv0t-)Y?Wi2!^(I#vOeMy;98d z727@B@6v*%VB&Wh5jRfuY`19Iy^TTA1KRgMWbNrD`t+{u1njo{a#&`fav-TeV^l1# zX3|hGS$|~G(yVB>LQ3OkEDMxk)Ju+yrq@dnQ9V~>a-q~t6JlR)lFLn2o+)mcTNPGg z(?ws(l&sM(d)1tM@Lx|AJ^?)>8jkn}A#UgA-WsO=4N+sm&hKhn?i%`ZT1q{LoYv5X zX8Cj>l+o>G;cG+1ooK0wA0c%_v@kzt9|tee`dSL773#+Xy>-&qo?g1z*|0%r@{1lp zUfSRy%AmLR{gNd{PRlq5e!ykO|oPQ19u-AK{)#Uxw?h9J>3y_qcW|*0HvViD_ zaJW3^H)4(~-q;!h@-EY~WgoJ9k;GUSm(^45$~DvF{snY?7{8kxMc&auYNB2+j7}WU z2a8O=qWII&fELe)3t<$uXHjRj1-&@=a~^;1N;hjR<}de)`Z9GE5ozfXwezJGiJ z!V@>8T1l4&lCe7+;*ZyE5JqZmcb^x1!+#y>#3J*a<+va` z(`&#{J&AuwN8E65W91Xa;!!h$lYhLtzO4LQZuHJ%*0cTu-%HTR)VXH396we1ys>_zWI;+jY1Hn0coz%aBPI!(WX8Li#ec|c@TGZ z5vb_^ZBBk&g1vG1U^Ni{9M;gCabawlKlWQLGx2`7xbNObWA-?~?9rkQk350VjUMfX zx{n51PVx))F~e=&DjApn000311z0XMFgXAK1J^Ku1Jf{q0=u9C0s#U72>|+?a%1s8 zs@jv#31AsCGcYkVGdMFfS{Ds6GB7kUIWaXeI5Ra`livwTe}VxbFbxI?Duzgg_YDC7 z0R;d9f&mWzFoFRJ0)hbn0Mi3=eGf|YVl4Exx~2Gp9ngsPyvAi3jzmJ6`!Q$^SyvE6 zmSdTpT^UnhcJT%vh*yxqTTGf>SzfEHqwn*->YB|^7?`7*3gKtt`&A`exr~ZQFKsAh z^?bnramw3Ae<&=J$~b=zI&pHjRusH*!Y;}Lg$qI*PBWlpnm%bf!%|fJx6%4Vrc>;6 z0!}L$#m>XH z46-vU?5w!Cn#-WdaWI=ulLXTJaBexeGhtgdb*`iyBGhqx;$bjGDaCtbBKq>hq}Ji^ zN31T)w5MMQ-3bOm{V|P`vwGr|QH@G8REQ|@3N_$HQ;HEd>lj#hje*pmi9`}-F zr1RL|TUxikg$#cftfD}yleO+*qrs)m1Jk7$1)wVm^|o^2&*bvR*|d9oA|Zj9rERPl z-KS11pJ(<{iLGS^P98UqtfEPO1fj7sPxBHPQy)x0Zgbt9thBa)Amrl$+J`y0hSzn6 zQn1XPW9CP5l(CvZ$?*(0f7`pzVqf|#6CkDXe!T&bj$Vc=a$SwY7%SOcg4z1*gpprE z(Xq}BYUy9k-J%r9T`{mN)q7kWeR_>~7tWr!#cZTQvl2ij@@+eDleVC17CIfXOCF@7 zaTX1Cuk;Hhb|YbKt?rL?C7AXAQ>@g3S)3z)F;6$;*9Do?=k4RF&WGd zaFOhAJt9X68VocpIAAqvCZz}Py_P!~frq@5l-YsI!A*(e4bO&> zzC*GP9QIY9>VaPcoS|_msv$M})wFTtXQBR2Nr=K7>e`d$!^xP2Bu=|6cF~=M7l_eg zuY#)9EfFH?#myV>6Z%kBD*s3|?V7!Ja56|iPO;U{iF!K;&{m5Fi!F9LDrggi^tA<= z#Zy+o2uULNO{yWS=`!&Yl%kiKe20H~YZ1Zm?oyQx)-hM(Bq&XMm--W#aR;s(tTt)b zrimFOPvrNgYy=cy6dzg3p<5Yx9)gB!ld!D7R#5$=)lNQtUb|0rLJCuD;Z$hPMM}ga zdwjQgL9(*1Ig70{Di!FC4gAa@f{T_dR^^?rrW;BHf{Uj?bs$lDa(C*b6Dxm{_je-j z6YP7mTl6AEf9(n`WImQ*sgoM;CVuW~cgU?qsPm(P-vJq?!&!OLStbey#niEU*~^4% zIk!M#Dx0d4H=#pc((HKgE{48p3f}^(n7N+KT>5n9TP*DI1J5Kl)? zJsG6oRA`8I@ne9Lw?s$>+_QfrED&R8L6!r$Rk<}$K^l3p6Pd=(JRn%_8WmUCv!Bbp0!7w|hPGOJrf2Vj5PuLh&q6?YjiSf)N0$3$=-err52f1^pSDDAK!wOJ^F%7UMlsxs9|+RwAe@_RDeN7` zh|^^=;yF)?4j%`xh(3%~k6|yut&oAqU8j8syJ8$tD6dM(OJt-V7jQZlHqHVCufBEB zAHk@-ZPNctNn$W<#{z%+{hPny{ET%Xv^S;Y`nFk&`ijWE@o}l z>r{nd-7iv`x4$N!d4JfR0ru)_u0m8aUvhNfkWrrRT=qSOVvZ*Rqa*yv4S6(LrNFsN zf@H+aFO{=w97nHM=gb{}a8ykz%kLSb!NpP0-iH~$#yLtg0C@uwcg*N`-*94ayG_9+(e-WCZt#d{nN0%k2_Z#tCT(6|E7 z=F&Byhjdzj>UTyl&AWOG4JYlxX1T`@%A7jtp^kZAN<4oT-?T9djtiyoef!bQxyb5y z@P11!U2Y0;GqD*=wG9JOrOSXhDe>atgv04tG})xCzCH+#RC*T`*7ypymOY)^q7n`t zhOTp<*3bip7y0Yf!-rpeVeuc7ngeQIpW2x$!d5BSrAzUqy7(u#~p2uB$KxfB^bcnEASq3-EV{$$8_SC$=V zMpUUo`rEM5+*6I?bW8BeG|C3!sF&is7 znD~DVJS*g37aIetC*t93C`1rj_WsV^lqzqNF%K)*m9<#Ld$x~=RU0>f7vx0<&@8mt z1VJpw3yVwFdgK{B)lcc_6+b(_ZdB{yFbHcFC-54ISHgD?o|tpdVxTT55_KvEF;ajq zf(hUT2`Yw2hW8Bt2LYgh3DE?C3C%Eq3CVvjf(gY23o3?4hW8Bt3)al{q;l|(MG*llr~lY}!J^hk4E@j%(?B~h=^!z4BFi^ap}|B1BdE`opj zy}W7Qnr2HuFii;aXTwC6vy73GKkQEhYEATr@X%^r$N>+^$8m*?H zN%a*TRc#^=4^dxBp*h9<^zCFfWE+s*7IoN3WkiU2`Tv~fmlx3Ou&aXZ8C{1?n6m0& zEGZ#g0f>7#r{eI?)g%-C+;x90jG~~@M8aCFa={~HIM%Or6zO{MD=F+idoO>KBKM8( z)8W37vDxqZ5hDPE-fJ1$inQmy>H!S>1c32z?T=zFa`Fm9kz(sZ$lAzWno?<{hZ^FEV6vcAy~$b`rd*=*;`);y zQr@S63=Qh!J86KzIoqkBg&=e1uGZ4a@@Digt!7)`bsj*1-n)73f2UwvQ(pg zx)tulxh}es>isucSg+ZLgY3QknFY6mWAXmCvqW)N`oYyE&L=~HwT6Goo=cCrOi;k% z+ECD84K`RTNhSa2Ab)Ybr2;p_9sRQ&xv{;jWE6TNCw4AepD6QPkCLKC9?ouO4;c#O z)sjC)M#{=8MX0s^h6a5TV^tyx-xp5Tg!ySAZfq7A2uhpUS7d~f+;%GT;XihA=s;jy ztJA`RY&3)3g0tOeo|=D;WPu3~{{e#J&~1ABYd;#nEjj+C;U{K}QGpy3X!Lcf9S=0Z zlh?zjE|bk&h$)n1luZ5!pO{gDlsGAyAw1HQyljz^OHbVw+$n0A8I>t&3F54TO{E1Z zBs=fhtPS2H` z;<22am^%kA56<|&l&Qs=g5wj#+d{DXzLk+@;kE!KPN8~KRtNt03mHML+$2QX;?2Lv zS*vy@CSac?RYf6VtCCc4Dg zt9@!OHkN5Xn2qR4RlqTEco84x-JNs8nK?lSbRR(BSRkHA?uOX4doz6B#6OQDx~&D3c9s~`GO6&S#ZD(X+?0g!`GJVP?V z{=FsoCgnHA7RYH&mX+@dSpP~zAX-xxZdJJVC4;y*B z`?=QO2E)$xq+_q}9tEv)Ft_866r%*SL*A5O-Uxr^MR-bxavh`@s}X2#qNA#m>Sg6} zk?`RFVuba&+FlP4If!xV9OykP<9-cGuu3U+dJ&sg>bLCI`?loZi%x2IpCDa>5|?2i z3Z0}+{xHdoPu~dWGNj7Q8$dNF(mF#4AmN1W7KL5>E>IyL1=@DTVctHiF*oucAdQog zT8V%1hJ*(r<6?ZlVdc%}=ifR1Q#LSjF6&M+^05Pn^<@crp{Y=4;cVhE>FV3paBXJ# z#|qn&!kT`Hw^xxGItq*w2VxNtj%{H6CUu@@mDWf*FG`TmiZ9SYSKu7YY(Nf zc*J5uCG8DA1Rk2H@0n)-_n3m+dcmcRV{7sh)m)kL>_3|eXax529MT{HJ<_XFtat*G zr9l1fy=FZU!GL}#Jl@ZF^YJkq%I_zqj+Iz2ZKqmacBc|jV|Io7#Ip*mO>LVVdE}$a zvPn=?8G>k$IlSTOvnq6HdR+0Gr3c-2}x-yV<$ zpv~ldu(RC;4~TwuD~(Mwn&Mb@6p~3W+>YZ8+|Ttu4is%x|HlLHNWtO8?mNn6g+DqD zxN+$3BlfI}BZMu3-R!n{Oi$HawlMr}cX?_foI7}}g&K}A^n9C?9j?TpW;uU{iY}Bl zE=3$Fkr=qK@5lJ$wMJp#y$2F{^wq6maq+O+CZNsmQv;^>LTF2jw}QCD`j+Z^O6W)!`^pKc%J_2 zd8`u!AEzaFoSyG&=`MnXOVZ+?S-X5 zdq(G!buUn+ix2+6II$SrC&HrIAcfaxc#gT#PNyWH4P;>M55)r>sJ1I02+)Y76R{@e z^hb0%Rt?G3*#0%+;88cVWcO`0QAB@f_KRO`qIbYs)vGHp*eU$eT9xF5SjQJ$R!tWT ze_?zb0`&~sk#CD09|j(dH_?~Ax^g&F~*q(?1heY8m6L zoj3pW?8QZWK=_)jALDC_8E1gl_Zm+v_=#X#8mFY5$Nq;N?3;!#&f&HyWriAZm9>A^ zu4X=MvQqHXji(DoH-wAE?;-Kq5*i^ldLTsVH$YB``qnzKsH4FuNuZ-Cc!o-Sm+dWO zwf1Tghpxy^dupjE`EO|56QzW#E6xmGEd%kNF9iTXOWdJUE>!G5lnIY!X910o`N8aC zbImUmW%cA?MqfYMAJxb)bK++d&sKlBuMF%0!8HFcGbD?V)!Q*Ho8VBO`d=9#)Su5P zh29%wDdXo*A9w-d^a+|)idko?9V;xz#A*s(myNvTr@*J}lB0a0a08RjQwT%Yv5nZVS2kZ1N0w#P2>8PI>KD^jvx(43ZHf#07W_O#UsF2;}?oe<9EVcDe)ntXY1 z>$KWxRNRN`M7b!lhWeICsJ8k@OT2HYLU0(m#_flwHB8NEu3hw}@W)h8s#Qhy)`uOa z`!ts4&s&?%VPj!UJA45O4EV!%Py{)Jd0dpthdDBbZZ?1fvFgFqz?FYwvyET_5~@lK zqVaNgILK7FQnnVjxh6x`7nua!WmW!xtT{66?7UVO9gS+i z3*wdRN~YYqCfOW&0mFYa8sr|+?E5W3WxFhumUAZx8E{(RUaes6%uCHmqk_6e++{I^ zi6@SAhMd&mb48(sAhj;5nn4L4k{_FIv)?cvJqyHv5r!xm6}aoNO+}IGDAs-)5eKwI zz)Y9Bdl6%?ltaLp!))(_M-VVOxINC2UPISoQgXk3=@t@`ktu)UvCT)XW_0Egt7A(d zxVDRp0_)5nl!W7xc422~?Yf+gtQ9Kek5fjba2^lLV@;qqg0mCd0WG{NO13!|n~%q+ zpYKoPV@&srkY(!OUEKx17G>*wvj1$W6~~@QB%3>yOz|R|$zZu>l`Sfj# zSIX(=Qq}lby~%#Mv9N6WiX!f#}zMd@sRTf)MsCe$80o>*3FZYP|6~)8#CWORO38Uw? zr^QofK{_H!-YKAU?!CGze~6W@TS3uAitUrW(UyNWXK>T-k+`a5NXmm81Z2G;){uyUT&M(n3th(Qka=XDxk&pYxf%Z%3I+`eJ`w(5 zp!9z|*lQJwj`zqH&^}7VSIXi=if(fTZXSt*veIIzLE|;p7~OAJ2qvmL<&?JaPMWn$c1zq?QNTY*qE>$~ z4qf{FUndBaZ4~SrPLEEMhAB&qbLG%kM??jxuhX74+NiRmXx& zqi7b}b#cC4VvIU8Z~?FL;`r_f2j5GmpMaHlVM`q~n&Cj%S(q0>cyzKWwn&XM(g6;V zF54IyDa`q1dSV{!u^Z`D&G>aW5rBW9V$erF!nqwBP({Kt22^$gumsu9(SIu#8`i8` zxT)u5UHvNi=-ZH`_)nO|BU@rgUfyM_A#j+-020pe4$$|-?zjEm`(X~`RoHj|Oe@D+ z65^ZJoxmESb6jG3?^uUsgbhi4qAg9wrv_k#7+uI@`&73inHHx<*y%ZGV+1Bcv_w(oGpK*Kt~6(w!47Nv%$@`iBOYZrk}Zvh$Xkak?XC87v@OB zE9PtE`(%%4QkC8KfnjAK0WfLQ4=(>G6wV1>;Tdm;40eBx!zwvjlM^}Mb{E2_*$vp7 zd8l{m!O5XLr7?O`X!{-oV75NWP!f8bj&XY1WMRfa+U{Kz trustedCertificates) { + addSSLSettingsForPEMFiles(builder, "", keyPath, password, certificatePath, trustedCertificates, true, true, true); + } + private static void addSSLSettingsForPEMFiles(Settings.Builder builder, String prefix, String keyPath, String password, String certificatePath, List trustedCertificates, boolean sslEnabled, boolean hostnameVerificationEnabled, boolean transportClient) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java index 6447be7e69cbe..1886dd4249b14 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SettingsFilterTests.java @@ -60,13 +60,16 @@ public void testFiltering() throws Exception { // pki filtering configureUnfilteredSetting("xpack.security.authc.realms.pki1.type", "pki"); configureUnfilteredSetting("xpack.security.authc.realms.pki1.order", "0"); - configureFilteredSetting("xpack.security.authc.realms.pki1.truststore.path", + if (inFipsJvm() == false) { + configureFilteredSetting("xpack.security.authc.realms.pki1.truststore.path", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks").toString()); + configureFilteredSetting("xpack.ssl.keystore.path", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks").toString()); + } configureSecureSetting("xpack.security.authc.realms.pki1.truststore.secure_password", "truststore-testnode-only"); configureFilteredSetting("xpack.security.authc.realms.pki1.truststore.algorithm", "SunX509"); - configureFilteredSetting("xpack.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks").toString()); + configureFilteredSetting("xpack.ssl.cipher_suites", Strings.arrayToCommaDelimitedString(XPackSettings.DEFAULT_CIPHERS.toArray())); configureFilteredSetting("xpack.ssl.supported_protocols", randomFrom("TLSv1", "TLSv1.1", "TLSv1.2")); @@ -78,8 +81,10 @@ public void testFiltering() throws Exception { // client profile configureUnfilteredSetting("transport.profiles.client.port", "9500-9600"); - configureFilteredSetting("transport.profiles.client.xpack.security.ssl.keystore.path", + if (inFipsJvm() == false) { + configureFilteredSetting("transport.profiles.client.xpack.security.ssl.keystore.path", getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks").toString()); + } configureFilteredSetting("transport.profiles.client.xpack.security.ssl.cipher_suites", Strings.arrayToCommaDelimitedString(XPackSettings.DEFAULT_CIPHERS.toArray())); configureFilteredSetting("transport.profiles.client.xpack.security.ssl.supported_protocols", diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java index f9b1be65736e1..6966b7edf67d8 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/PkiRealmBootstrapCheckTests.java @@ -100,16 +100,18 @@ public void testBootstrapCheckWithDisabledRealm() throws Exception { public void testBootstrapCheckWithClosedSecuredSetting() throws Exception { final boolean expectFail = randomBoolean(); final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.security.http.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.security.http.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.security.authc.realms.test_pki.type", PkiRealmSettings.TYPE) - .put("xpack.security.http.ssl.enabled", true) - .put("xpack.security.http.ssl.client_authentication", expectFail ? "none" : "optional") - .put("xpack.security.http.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.security.authc.realms.test_pki.type", PkiRealmSettings.TYPE) + .put("xpack.security.http.ssl.enabled", true) + .put("xpack.security.http.ssl.client_authentication", expectFail ? "none" : "optional") + .put("xpack.security.http.ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put("xpack.security.http.ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); Environment env = TestEnvironment.newEnvironment(settings); final PkiRealmBootstrapCheck check = new PkiRealmBootstrapCheck(new SSLService(settings, env)); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java index 14b0a58419a22..212ee7ea499ec 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ESNativeMigrateToolTests.java @@ -20,6 +20,7 @@ import java.nio.charset.StandardCharsets; import java.nio.file.Path; +import java.util.Arrays; import java.util.HashSet; import java.util.Set; @@ -92,8 +93,12 @@ public void testRetrieveUsers() throws Exception { Settings.Builder builder = Settings.builder() .put("path.home", home) .put("path.conf", conf.toString()); - SecuritySettingsSource.addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + SecuritySettingsSource.addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); Settings settings = builder.build(); logger.error("--> retrieving users using URL: {}, home: {}", url, home); @@ -134,8 +139,11 @@ public void testRetrieveRoles() throws Exception { String url = getHttpURL(); ESNativeRealmMigrateTool.MigrateUserOrRoles muor = new ESNativeRealmMigrateTool.MigrateUserOrRoles(); Settings.Builder builder = Settings.builder().put("path.home", home); - SecuritySettingsSource.addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); + SecuritySettingsSource.addSSLSettingsForPEMFiles(builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", + "testclient", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); Settings settings = builder.build(); logger.error("--> retrieving roles using URL: {}, home: {}", url, home); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java index 0d689adcdf594..9b8c3878a038d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/tool/CommandLineHttpClientTests.java @@ -49,20 +49,23 @@ public void shutdown() throws Exception { } public void testCommandLineHttpClientCanExecuteAndReturnCorrectResultUsingSSLSettings() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); MockSecureSettings secureSettings = new MockSecureSettings(); Settings settings; if (randomBoolean()) { // with http ssl settings secureSettings.setString("xpack.security.http.ssl.truststore.secure_password", "testnode"); - settings = Settings.builder().put("xpack.security.http.ssl.truststore.path", resource.toString()) + settings = Settings.builder().put("xpack.security.http.ssl.certificate_authorities", certPath.toString()) .put("xpack.security.http.ssl.verification_mode", VerificationMode.CERTIFICATE).setSecureSettings(secureSettings) .build(); } else { // with global settings secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); - settings = Settings.builder().put("xpack.ssl.truststore.path", resource.toString()) - .put("xpack.ssl.verification_mode", VerificationMode.CERTIFICATE).setSecureSettings(secureSettings).build(); + settings = Settings.builder() + .put("xpack.ssl.certificate_authorities", certPath.toString()) + .put("xpack.ssl.verification_mode", VerificationMode.CERTIFICATE) + .setSecureSettings(secureSettings) + .build(); } CommandLineHttpClient client = new CommandLineHttpClient(settings, environment); HttpResponse httpResponse = client.execute("GET", new URL("https://localhost:" + webServer.getPort() + "/test"), "u1", @@ -74,11 +77,15 @@ public void testCommandLineHttpClientCanExecuteAndReturnCorrectResultUsingSSLSet } private MockWebServer createMockWebServer() { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); - Settings settings = - Settings.builder().put("xpack.ssl.keystore.path", resource.toString()).setSecureSettings(secureSettings).build(); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); + Settings settings = Settings.builder() + .put("xpack.ssl.key", keyPath.toString()) + .put("xpack.ssl.certificate", certPath.toString()) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings, environment); return new MockWebServer(sslService.sslContext(), false); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java index 26cd513ec78e5..23010e400a52b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/GroupsResolverTestCase.java @@ -34,8 +34,8 @@ public abstract class GroupsResolverTestCase extends ESTestCase { @Before public void setUpLdapConnection() throws Exception { - Path truststore = getDataPath(trustPath()); - this.ldapConnection = LdapTestUtils.openConnection(ldapUrl(), bindDN(), bindPassword(), truststore); + Path trustPath = getDataPath(trustPath()); + this.ldapConnection = LdapTestUtils.openConnection(ldapUrl(), bindDN(), bindPassword(), trustPath); } @After diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java index 8bdfd02d2fcc5..966f2e3f5492d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapTestUtils.java @@ -67,6 +67,6 @@ public static LDAPConnection openConnection(String url, String bindDN, String bi sslConfiguration = sslService.getSSLConfiguration("xpack.security.authc.realms.foo.ssl"); } return LdapUtils.privilegedConnect(() -> new LDAPConnection(sslService.sslSocketFactory(sslConfiguration), options, - ldapurl.getHost(), ldapurl.getPort(), bindDN, bindPassword)); + ldapurl.getHost(), ldapurl.getPort(), bindDN, bindPassword)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java index 9d8fd1544f5a6..19b0d4e71bb8a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/ldap/LdapUserSearchSessionFactoryTests.java @@ -54,7 +54,7 @@ public class LdapUserSearchSessionFactoryTests extends LdapTestCase { @Before public void init() throws Exception { - Path keystore = getDataPath("support/ADtrust.jks"); + Path certPath = getDataPath("support/smb_ca.crt"); Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. @@ -63,10 +63,9 @@ public void init() throws Exception { */ globalSettings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.truststore.path", keystore) - .setSecureSettings(newSecureSettings("xpack.ssl.truststore.secure_password", "changeit")) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.certificate_authorities", certPath) + .build(); sslService = new SSLService(globalSettings, env); threadPool = new TestThreadPool("LdapUserSearchSessionFactoryTests"); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java index c458a9c42eabb..931acc1e79dca 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/pki/PkiAuthenticationTests.java @@ -26,22 +26,24 @@ import org.elasticsearch.xpack.core.security.SecurityField; import org.elasticsearch.xpack.core.security.authc.file.FileRealmSettings; import org.elasticsearch.xpack.core.security.authc.pki.PkiRealmSettings; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.elasticsearch.xpack.core.ssl.SSLClientAuth; import org.elasticsearch.xpack.security.LocalStateSecurity; -import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.TrustManager; -import java.io.InputStream; import java.net.InetSocketAddress; -import java.nio.file.Files; -import java.nio.file.Path; -import java.security.KeyStore; import java.security.SecureRandom; +import java.util.Arrays; +import java.util.Collections; +import java.util.List; import java.util.Locale; +import java.util.stream.Collectors; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.is; @@ -60,16 +62,16 @@ protected Settings nodeSettings() { SSLClientAuth sslClientAuth = randomBoolean() ? SSLClientAuth.REQUIRED : SSLClientAuth.OPTIONAL; Settings.Builder builder = Settings.builder() - .put(super.nodeSettings()) - .put("xpack.security.http.ssl.enabled", true) - .put("xpack.security.http.ssl.client_authentication", sslClientAuth) - .put("xpack.security.authc.realms.file.type", FileRealmSettings.TYPE) - .put("xpack.security.authc.realms.file.order", "0") - .put("xpack.security.authc.realms.pki1.type", PkiRealmSettings.TYPE) - .put("xpack.security.authc.realms.pki1.order", "1") - .put("xpack.security.authc.realms.pki1.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.security.authc.realms.pki1.files.role_mapping", getDataPath("role_mapping.yml")); + .put(super.nodeSettings()) + .put("xpack.security.http.ssl.enabled", true) + .put("xpack.security.http.ssl.client_authentication", sslClientAuth) + .put("xpack.security.authc.realms.file.type", FileRealmSettings.TYPE) + .put("xpack.security.authc.realms.file.order", "0") + .put("xpack.security.authc.realms.pki1.type", PkiRealmSettings.TYPE) + .put("xpack.security.authc.realms.pki1.order", "1") + .put("xpack.security.authc.realms.pki1.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("xpack.security.authc.realms.pki1.files.role_mapping", getDataPath("role_mapping.yml")); SecuritySettingsSource.addSecureSettings(builder, secureSettings -> secureSettings.setString("xpack.security.authc.realms.pki1.truststore.secure_password", "truststore-testnode-only")); @@ -90,7 +92,13 @@ protected boolean enableWarningsCheck() { public void testTransportClientCanAuthenticateViaPki() { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList + ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient client = createTransportClient(builder.build())) { client.addTransportAddress(randomFrom(node().injector().getInstance(Transport.class).boundAddress().boundAddresses())); IndexResponse response = client.prepareIndex("foo", "bar").setSource("pki", "auth").get(); @@ -113,7 +121,11 @@ public void testTransportClientAuthenticationFailure() { } public void testRestAuthenticationViaPki() throws Exception { - SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { @@ -124,7 +136,10 @@ public void testRestAuthenticationViaPki() throws Exception { } public void testRestAuthenticationFailure() throws Exception { - SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); + SSLContext context = getRestSSLContext("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", + "testclient", "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (CloseableHttpClient client = HttpClients.custom().setSSLContext(context).build()) { HttpPut put = new HttpPut(getNodeUrl() + "foo"); try (CloseableHttpResponse response = SocketAccess.doPrivileged(() -> client.execute(put))) { @@ -135,21 +150,13 @@ public void testRestAuthenticationFailure() throws Exception { } } - private SSLContext getRestSSLContext(String keystoreResourcePath, String password) throws Exception { + private SSLContext getRestSSLContext(String keyPath, String password, String certPath, List trustedCertPaths) throws Exception { SSLContext context = SSLContext.getInstance("TLS"); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - Path store = getDataPath(keystoreResourcePath); - KeyStore ks; - try (InputStream in = Files.newInputStream(store)) { - ks = KeyStore.getInstance("jks"); - ks.load(in, password.toCharArray()); - } - - kmf.init(ks, password.toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(ks); - context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); - + TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(trustedCertPaths.stream().map(p -> getDataPath + (p)).collect(Collectors.toList()))); + KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), password::toCharArray), password.toCharArray()); + context.init(new KeyManager[]{km}, new TrustManager[]{tm}, new SecureRandom()); return context; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java index 7f79ae35adac2..5a7015a4e8dfa 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlAuthenticatorTests.java @@ -128,6 +128,8 @@ public class SamlAuthenticatorTests extends SamlTestCase { @BeforeClass public static void init() throws Exception { + assumeFalse("Can't run in a FIPS JVM, there is no DOM XMLSignature Factory so we can't sign XML documents", inFipsJvm()); + // TODO: Refactor the signing to use org.opensaml.xmlsec.signature.support.Signer so that we can run the tests SamlUtils.initialize(Loggers.getLogger(SamlAuthenticatorTests.class)); // Initialise Apache XML security so that the signDoc methods work correctly. Init.init(); @@ -218,7 +220,7 @@ public void testParseContentWithNoAssertionsIsRejected() throws Exception { "" + IDP_ENTITY_ID + "" + "" + - ""); + ""); final ElasticsearchSecurityException exception = expectSamlException(() -> authenticator.authenticate(token)); assertThat(exception.getMessage(), containsString("No assertions found in SAML response")); assertThat(exception.getCause(), nullValue()); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java index cf41673b86bce..e239c8706b99f 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlMetadataCommandTests.java @@ -296,6 +296,7 @@ public void testHandleAttributesInBatchMode() throws Exception { } public void testSigningMetadataWithPfx() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm()); final Path certPath = getDataPath("saml.crt"); final Path keyPath = getDataPath("saml.key"); final Path p12Path = getDataPath("saml.p12"); @@ -355,6 +356,7 @@ public void testSigningMetadataWithPfx() throws Exception { } public void testSigningMetadataWithPasswordProtectedPfx() throws Exception { + assumeFalse("Can't run in a FIPS JVM, PKCS12 keystores are not usable", inFipsJvm()); final Path certPath = getDataPath("saml.crt"); final Path keyPath = getDataPath("saml.key"); final Path p12Path = getDataPath("saml_with_password.p12"); @@ -393,11 +395,13 @@ public void testSigningMetadataWithPasswordProtectedPfx() throws Exception { public void testErrorSigningMetadataWithWrongPassword() throws Exception { final Path certPath = getDataPath("saml.crt"); final Path keyPath = getDataPath("saml.key"); - final Path p12Path = getDataPath("saml_with_password.p12"); + final Path signingKeyPath = getDataPath("saml_with_password.key"); final SamlMetadataCommand command = new SamlMetadataCommand((e) -> randomFrom(keyStore, null)); final OptionSet options = command.getParser().parse(new String[]{ - "-signing-bundle", p12Path.toString(), - "-signing-key-password", "wrong_password" + "-signing-cert", certPath.toString(), + "-signing-key", signingKeyPath.toString(), + "-signing-key-password", "wrongpassword" + }); final boolean useSigningCredentials = randomBoolean(); @@ -422,7 +426,7 @@ public void testErrorSigningMetadataWithWrongPassword() throws Exception { final UserException userException = expectThrows(UserException.class, () -> command.possiblySignDescriptor(terminal, options, descriptor, env)); assertThat(userException.getMessage(), containsString("Unable to create metadata document")); - assertThat(terminal.getOutput(), containsString("keystore password was incorrect")); + assertThat(terminal.getOutput(), containsString("Error parsing Private Key from")); } public void testSigningMetadataWithPem() throws Exception { @@ -473,7 +477,7 @@ public void testSigningMetadataWithPasswordProtectedPem() throws Exception { final OptionSet options = command.getParser().parse(new String[]{ "-signing-cert", certPath.toString(), "-signing-key", signingKeyPath.toString(), - "-signing-key-password", "saml" + "-signing-key-password", "saml" }); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java index 6dc9c021fc813..980abc46831c6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/saml/SamlRealmTests.java @@ -105,13 +105,17 @@ public void testReadIdpMetadataFromHttps() throws Exception { final Path path = getDataPath("idp1.xml"); final String body = new String(Files.readAllBytes(path), StandardCharsets.UTF_8); final MockSecureSettings mockSecureSettings = new MockSecureSettings(); - mockSecureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + mockSecureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); final Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put("path.home", createTempDir()) - .setSecureSettings(mockSecureSettings) - .build(); + .put("xpack.ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put("xpack.ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("path.home", createTempDir()) + .setSecureSettings(mockSecureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings, TestEnvironment.newEnvironment(settings)); try (MockWebServer proxyServer = new MockWebServer(sslService.sslContext(Settings.EMPTY), false)) { proxyServer.start(); @@ -563,17 +567,21 @@ private Tuple buildConfig(String path) throws Exception private Settings.Builder buildSettings(String idpMetaDataPath) { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString(REALM_SETTINGS_PREFIX + ".ssl.keystore.secure_password", "testnode"); + secureSettings.setString(REALM_SETTINGS_PREFIX + ".ssl.secure_key_passphrase", "testnode"); return Settings.builder() - .put(REALM_SETTINGS_PREFIX + ".ssl.verification_mode", "certificate") - .put(REALM_SETTINGS_PREFIX + ".ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put(REALM_SETTINGS_PREFIX + ".type", "saml") - .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_PATH.getKey(), idpMetaDataPath) - .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_ENTITY_ID.getKey(), TEST_IDP_ENTITY_ID) - .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_HTTP_REFRESH.getKey(), METADATA_REFRESH + "ms") - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings); + .put(REALM_SETTINGS_PREFIX + ".ssl.verification_mode", "certificate") + .put(REALM_SETTINGS_PREFIX + ".ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put(REALM_SETTINGS_PREFIX + ".ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put(REALM_SETTINGS_PREFIX + ".ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put(REALM_SETTINGS_PREFIX + ".type", "saml") + .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_PATH.getKey(), idpMetaDataPath) + .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_ENTITY_ID.getKey(), TEST_IDP_ENTITY_ID) + .put(REALM_SETTINGS_PREFIX + "." + SamlRealmSettings.IDP_METADATA_HTTP_REFRESH.getKey(), METADATA_REFRESH + "ms") + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings); } private RealmConfig realmConfigFromRealmSettings(Settings realmSettings) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java index bac5e0b3f5008..abd5768bebec9 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterIntegrationTests.java @@ -43,11 +43,10 @@ import java.util.Collection; import java.util.concurrent.CountDownLatch; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.elasticsearch.xpack.security.test.SecurityTestUtils.writeFile; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; -import static org.hamcrest.CoreMatchers.is; public class ServerTransportFilterIntegrationTests extends SecurityIntegTestCase { private static int randomClientPort; @@ -66,25 +65,18 @@ public boolean transportSSLEnabled() { protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder settingsBuilder = Settings.builder(); String randomClientPortRange = randomClientPort + "-" + (randomClientPort+100); - - Path store; - try { - store = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); - assertThat(Files.exists(store), is(true)); - } catch (Exception e) { - throw new RuntimeException(e); - } - + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); settingsBuilder.put(super.nodeSettings(nodeOrdinal)) - .put("transport.profiles.client.xpack.security.ssl.truststore.path", store) // settings for client truststore - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("transport.profiles.client.xpack.security.type", "client") - .put("transport.profiles.client.port", randomClientPortRange) - // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent - .put("transport.profiles.client.bind_host", "localhost") - .put("xpack.security.audit.enabled", false) - .put(XPackSettings.WATCHER_ENABLED.getKey(), false) - .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false); + .putList("transport.profiles.client.xpack.security.ssl.certificate_authorities", + Arrays.asList(certPath.toString())) // settings for client truststore + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("transport.profiles.client.xpack.security.type", "client") + .put("transport.profiles.client.port", randomClientPortRange) + // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent + .put("transport.profiles.client.bind_host", "localhost") + .put("xpack.security.audit.enabled", false) + .put(XPackSettings.WATCHER_ENABLED.getKey(), false) + .put(TestZenDiscovery.USE_MOCK_PINGS.getKey(), false); if (randomBoolean()) { settingsBuilder.put("transport.profiles.default.xpack.security.type", "node"); // this is default lets set it randomly } @@ -120,7 +112,12 @@ public void testThatConnectionToServerTypeConnectionWorks() throws IOException, //.put("xpack.ml.autodetect_process", false); Collection> mockPlugins = Arrays.asList( LocalStateSecurity.class, TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class); - addSSLSettingsForStore(nodeSettings, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + addSSLSettingsForPEMFiles( + nodeSettings, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (Node node = new MockNode(nodeSettings.build(), mockPlugins)) { node.start(); ensureStableCluster(cluster().size() + 1); @@ -159,7 +156,12 @@ public void testThatConnectionToClientTypeConnectionIsRejected() throws IOExcept //.put("xpack.ml.autodetect_process", false); Collection> mockPlugins = Arrays.asList( LocalStateSecurity.class, TestZenDiscovery.TestPlugin.class, MockHttpTransport.TestPlugin.class); - addSSLSettingsForStore(nodeSettings, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); + addSSLSettingsForPEMFiles( + nodeSettings, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem", + "testnode", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (Node node = new MockNode(nodeSettings.build(), mockPlugins)) { node.start(); TransportService instance = node.injector().getInstance(TransportService.class); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java index f03a4255b7fe7..bc674ae1aa00e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/IPHostnameVerificationTests.java @@ -21,7 +21,8 @@ // TODO delete this test? public class IPHostnameVerificationTests extends SecurityIntegTestCase { - Path keystore; + private Path certPath; + private Path keyPath; @Override protected boolean transportSSLEnabled() { @@ -46,36 +47,37 @@ protected Settings nodeSettings(int nodeOrdinal) { .putList("discovery.zen.ping.unicast.hosts", newUnicastAddresses); try { - //This keystore uses a cert with a CN of "Elasticsearch Test Node" and IPv4+IPv6 ip addresses as SubjectAlternativeNames - keystore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.jks"); - assertThat(Files.exists(keystore), is(true)); + //Use a cert with a CN of "Elasticsearch Test Node" and IPv4+IPv6 ip addresses as SubjectAlternativeNames + certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.crt"); + keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-ip-only.pem"); + assertThat(Files.exists(certPath), is(true)); } catch (Exception e) { throw new RuntimeException(e); } SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings -> { - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode-ip-only"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode-ip-only"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-ip-only"); }); - return settingsBuilder.put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) // settings for client truststore - .put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) // settings for client truststore - .put(TcpTransport.BIND_HOST.getKey(), "127.0.0.1") - .put("network.host", "127.0.0.1") - .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) - .put("xpack.ssl.verification_mode", "full") - .build(); + return settingsBuilder.put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .put("xpack.ssl.certificate_authorities", certPath.toAbsolutePath()) + .put(TcpTransport.BIND_HOST.getKey(), "127.0.0.1") + .put("network.host", "127.0.0.1") + .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) + .put("xpack.ssl.verification_mode", "full") + .build(); } @Override protected Settings transportClientSettings() { Settings clientSettings = super.transportClientSettings(); return Settings.builder().put(clientSettings.filter(k -> k.startsWith("xpack.ssl.") == false)) - .put("xpack.ssl.verification_mode", "certificate") - .put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.keystore.password", "testnode-ip-only") - .put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.truststore.password", "testnode-ip-only") - .build(); + .put("xpack.ssl.verification_mode", "certificate") + .put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .put("xpack.ssl.key_passphrase", "testnode-ip-only") + .put("xpack.ssl.certificate_authorities", certPath) + .build(); } public void testTransportClientConnectionWorksWithIPOnlyHostnameVerification() throws Exception { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java index ad64dea79a587..23ca3c1fe9fe4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4HttpServerTransportTests.java @@ -39,17 +39,21 @@ public class SecurityNetty4HttpServerTransportTests extends ESTestCase { private SSLService sslService; private Environment env; - + private Path testnodeCert; + private Path testnodeKey; @Before public void createSSLService() throws Exception { - Path testNodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); + testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", testNodeStore) - .put("path.home", createTempDir()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); } @@ -144,15 +148,11 @@ public void testCustomSSLConfiguration() throws Exception { } public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() throws Exception { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .setSecureSettings(secureSettings) - .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.certificate_authorities", testnodeCert) + .put(XPackSettings.HTTP_SSL_ENABLED.getKey(), true) + .put("path.home", createTempDir()) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, @@ -163,13 +163,13 @@ public void testThatExceptionIsThrownWhenConfiguredWithoutSslKey() throws Except public void testNoExceptionWhenConfiguredWithoutSslKeySSLDisabled() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); SecurityNetty4HttpServerTransport transport = new SecurityNetty4HttpServerTransport(settings, diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java index f87ab36d3d574..e9d91f5bd2d6a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SecurityNetty4ServerTransportTests.java @@ -41,15 +41,17 @@ public class SecurityNetty4ServerTransportTests extends ESTestCase { @Before public void createSSLService() throws Exception { - Path testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); + Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.keystore.path", testnodeStore) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()) + .build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); } @@ -179,17 +181,18 @@ public void testProfileOptionalClientAuth() throws Exception { public void testTransportSSLOverridesGlobalSSL() throws Exception { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.security.transport.ssl.keystore.secure_password", "testnode"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "truststore-testnode-only"); + secureSettings.setString("xpack.security.transport.ssl.secure_key_passphrase", "testnode"); Settings.Builder builder = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.security.transport.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks")) - .put("xpack.security.transport.ssl.client_authentication", "none") - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.security.transport.ssl.key", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem")) + .put("xpack.security.transport.ssl.certificate", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .put("xpack.security.transport.ssl.client_authentication", "none") + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .setSecureSettings(secureSettings) + .put("path.home", createTempDir()); Settings settings = builder.build(); env = TestEnvironment.newEnvironment(settings); sslService = new SSLService(settings, env); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java index 148453b5f84b0..c61b5782f75c4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/netty4/SslHostnameVerificationTests.java @@ -19,6 +19,7 @@ import java.net.InetSocketAddress; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Arrays; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; @@ -35,25 +36,33 @@ protected Settings nodeSettings(int nodeOrdinal) { Settings settings = super.nodeSettings(nodeOrdinal); Settings.Builder settingsBuilder = Settings.builder(); settingsBuilder.put(settings.filter(k -> k.startsWith("xpack.ssl.") == false), false); - Path keystore; + Path keyPath; + Path certPath; + Path nodeCertPath; try { /* * This keystore uses a cert without any subject alternative names and a CN of "Elasticsearch Test Node No SAN" * that will not resolve to a DNS name and will always cause hostname verification failures */ - keystore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks"); - assert keystore != null; - assertThat(Files.exists(keystore), is(true)); + keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); + certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); + nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + assert keyPath != null; + assert certPath != null; + assert nodeCertPath != null; + assertThat(Files.exists(certPath), is(true)); + assertThat(Files.exists(nodeCertPath), is(true)); + assertThat(Files.exists(keyPath), is(true)); } catch (Exception e) { throw new RuntimeException(e); } SecuritySettingsSource.addSecureSettings(settingsBuilder, secureSettings -> { - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode-no-subjaltname"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode-no-subjaltname"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-no-subjaltname"); }); - return settingsBuilder.put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) + return settingsBuilder.put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(certPath.toString(), nodeCertPath.toString())) // disable hostname verification as this test uses certs without a valid SAN or DNS in the CN .put("xpack.ssl.verification_mode", "certificate") .build(); @@ -61,22 +70,32 @@ protected Settings nodeSettings(int nodeOrdinal) { @Override protected Settings transportClientSettings() { - Path keystore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks"); - assert keystore != null; + Path keyPath; + Path certPath; + Path nodeCertPath; + try { + keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); + certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); + nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + assert keyPath != null; + assert certPath != null; + assert nodeCertPath != null; + assertThat(Files.exists(certPath), is(true)); + assertThat(Files.exists(nodeCertPath), is(true)); + assertThat(Files.exists(keyPath), is(true)); + } catch (Exception e) { + throw new RuntimeException(e); + } Settings settings = super.transportClientSettings(); // remove all ssl settings Settings.Builder builder = Settings.builder(); builder.put(settings.filter( k -> k.startsWith("xpack.ssl.") == false), false); builder.put("xpack.ssl.verification_mode", "certificate") - .put("xpack.ssl.keystore.path", keystore.toAbsolutePath()) // settings for client keystore - .put("xpack.ssl.keystore.password", "testnode-no-subjaltname"); - - if (randomBoolean()) { - // randomly set the truststore, if not set the keystore should be used - builder.put("xpack.ssl.truststore.path", keystore.toAbsolutePath()) - .put("xpack.ssl.truststore.password", "testnode-no-subjaltname"); - } + .put("xpack.ssl.key", keyPath.toAbsolutePath()) + .put("xpack.ssl.key_passphrase", "testnode-no-subjaltname") + .put("xpack.ssl.certificate", certPath.toAbsolutePath()) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(certPath.toString(), nodeCertPath.toString())); return builder.build(); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java index b98e4e0ce5735..e1e05032014ba 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SSLDriverTests.java @@ -7,21 +7,21 @@ import org.elasticsearch.nio.InboundChannelBuffer; import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; -import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; import javax.net.ssl.SSLEngine; import javax.net.ssl.SSLException; -import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.TrustManager; import java.io.IOException; -import java.io.InputStream; import java.nio.Buffer; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; -import java.nio.file.Files; -import java.security.KeyStore; import java.security.SecureRandom; import java.util.Arrays; +import java.util.Collections; import java.util.function.Supplier; public class SSLDriverTests extends ESTestCase { @@ -205,19 +205,16 @@ private void failedCloseAlert(SSLDriver sendDriver, SSLDriver receiveDriver) thr } private SSLContext getSSLContext() throws Exception { - String relativePath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"; + String certPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"; + String keyPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem"; SSLContext sslContext; - try (InputStream in = Files.newInputStream(getDataPath(relativePath))) { - KeyStore keyStore = KeyStore.getInstance("jks"); - keyStore.load(in, "testclient".toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(keyStore); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - kmf.init(keyStore, "testclient".toCharArray()); - sslContext = SSLContext.getInstance("TLSv1.2"); - sslContext.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); - return sslContext; - } + TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath)))); + KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), "testclient"::toCharArray), "testclient".toCharArray()); + sslContext = SSLContext.getInstance("TLSv1.2"); + sslContext.init(new KeyManager[] { km }, new TrustManager[] { tm }, new SecureRandom()); + return sslContext; } private void normalClose(SSLDriver sendDriver, SSLDriver receiveDriver) throws IOException { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java index 9f33da7ae88af..feca093e581af 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/nio/SimpleSecurityNioTransportTests.java @@ -55,15 +55,17 @@ public class SimpleSecurityNioTransportTests extends AbstractSimpleTransportTestCase { private SSLService createSSLService() { - Path testnodeStore = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); + Path testnodeCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeKey = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.keystore.path", testnodeStore) - .setSecureSettings(secureSettings) - .put("path.home", createTempDir()) - .build(); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.key", testnodeKey) + .put("xpack.ssl.certificate", testnodeCert) + .put("path.home", createTempDir()) + .setSecureSettings(secureSettings) + .build(); try { return new SSLService(settings, TestEnvironment.newEnvironment(settings)); } catch (Exception e) { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java index 42c5cd7c7ab45..df49103a25999 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/EllipticCurveSSLTests.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.security.transport.ssl; -import com.unboundid.util.ssl.TrustAllTrustManager; import org.elasticsearch.action.admin.cluster.node.info.NodesInfoResponse; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.transport.TransportAddress; @@ -77,7 +76,8 @@ public void testConnection() throws Exception { X509ExtendedKeyManager x509ExtendedKeyManager = CertParsingUtils.keyManager(certs, privateKey, new char[0]); SSLContext sslContext = SSLContext.getInstance("TLS"); sslContext.init(new X509ExtendedKeyManager[] { x509ExtendedKeyManager }, - new TrustManager[] { new TrustAllTrustManager(false) }, new SecureRandom()); + new TrustManager[]{CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Collections.singletonList(certPath)))}, + new SecureRandom()); SSLSocketFactory socketFactory = sslContext.getSocketFactory(); NodesInfoResponse response = client().admin().cluster().prepareNodesInfo().setTransport(true).get(); TransportAddress address = randomFrom(response.getNodes()).getTransport().getAddress().publishAddress(); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java index fa8fd00aeba61..541e660691275 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslIntegrationTests.java @@ -42,12 +42,13 @@ import java.security.KeyStore; import java.security.SecureRandom; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.List; import java.util.Locale; import java.util.Set; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.instanceOf; @@ -97,6 +98,7 @@ public void testThatUnconfiguredCiphersAreRejected() throws Exception { // no SSL exception as this is the exception is returned when connecting public void testThatTransportClientUsingSSLv3ProtocolIsRejected() { + assumeFalse("Can't run in a FIPS JVM as SSLv3 SSLContext not available", inFipsJvm()); try (TransportClient transportClient = new TestXPackTransportClient(Settings.builder() .put(transportClientSettings()) .put("node.name", "programmatic_transport_client") @@ -116,7 +118,11 @@ public void testThatTransportClientUsingSSLv3ProtocolIsRejected() { public void testThatConnectionToHTTPWorks() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks", "testclient"); + addSSLSettingsForPEMFiles( + builder, "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem", + "testclient", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); SSLService service = new SSLService(builder.build(), null); CredentialsProvider provider = new BasicCredentialsProvider(); @@ -135,6 +141,7 @@ public void testThatConnectionToHTTPWorks() throws Exception { } public void testThatHttpUsingSSLv3IsRejected() throws Exception { + assumeFalse("Can't run in a FIPS JVM as we can't even get an instance of SSL SSL Context", inFipsJvm()); SSLContext sslContext = SSLContext.getInstance("SSL"); TrustManagerFactory factory = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); factory.init((KeyStore) null); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java index 1d7ec67762ba0..d3ab5d092ab5b 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslMultiPortTests.java @@ -20,10 +20,11 @@ import java.net.InetAddress; import java.nio.file.Files; import java.nio.file.Path; +import java.util.Arrays; import java.util.Collections; import static org.elasticsearch.test.SecuritySettingsSource.TEST_USER_NAME; -import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForStore; +import static org.elasticsearch.test.SecuritySettingsSource.addSSLSettingsForPEMFiles; import static org.elasticsearch.test.SecuritySettingsSourceField.TEST_PASSWORD; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.Matchers.containsString; @@ -42,9 +43,9 @@ public static void getRandomPort() { /** * On each node sets up the following profiles: *
    - *
  • default: testnode keystore. Requires client auth
  • - *
  • client: testnode-client-profile keystore that only trusts the testclient cert. Requires client auth
  • - *
  • no_client_auth: testnode keystore. Does not require client auth
  • + *
  • default: testnode keypair. Requires client auth
  • + *
  • client: testnode-client-profile profile that only trusts the testclient cert. Requires client auth
  • + *
  • no_client_auth: testnode keypair. Does not require client auth
  • *
*/ @Override @@ -52,26 +53,25 @@ protected Settings nodeSettings(int nodeOrdinal) { String randomClientPortRange = randomClientPort + "-" + (randomClientPort+100); String randomNoClientAuthPortRange = randomNoClientAuthPort + "-" + (randomNoClientAuthPort+100); - Path store; + Path trustCert; try { - store = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-client-profile.jks"); - assertThat(Files.exists(store), is(true)); + trustCert = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt"); + assertThat(Files.exists(trustCert), is(true)); } catch (Exception e) { throw new RuntimeException(e); } Settings settings = Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - // client set up here - .put("transport.profiles.client.port", randomClientPortRange) - // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent - .put("transport.profiles.client.bind_host", "localhost") - .put("transport.profiles.client.xpack.security.ssl.truststore.path", store.toAbsolutePath()) - .put("transport.profiles.client.xpack.security.ssl.truststore.password", "testnode-client-profile") - .put("transport.profiles.no_client_auth.port", randomNoClientAuthPortRange) - .put("transport.profiles.no_client_auth.bind_host", "localhost") - .put("transport.profiles.no_client_auth.xpack.security.ssl.client_authentication", SSLClientAuth.NONE) - .build(); + .put(super.nodeSettings(nodeOrdinal)) + // client set up here + .put("transport.profiles.client.port", randomClientPortRange) + // make sure this is "localhost", no matter if ipv4 or ipv6, but be consistent + .put("transport.profiles.client.bind_host", "localhost") + .put("transport.profiles.client.xpack.security.ssl.certificate_authorities", trustCert.toAbsolutePath()) + .put("transport.profiles.no_client_auth.port", randomNoClientAuthPortRange) + .put("transport.profiles.no_client_auth.bind_host", "localhost") + .put("transport.profiles.no_client_auth.xpack.security.ssl.client_authentication", SSLClientAuth.NONE) + .build(); logger.info("node {} settings:\n{}", nodeOrdinal, settings); return settings; } @@ -140,15 +140,18 @@ public void testThatStandardTransportClientCannotConnectToClientProfile() throws } /** - * Uses a transport client with a custom keystore; this keystore testclient-client-profile.jks trusts the testnode + * Uses a transport client with a custom key pair; TransportClient only trusts the testnode * certificate and had its own self signed certificate. This test connects to the client profile, which is only * set to trust the testclient-client-profile certificate so the connection should always succeed */ public void testThatProfileTransportClientCanConnectToClientProfile() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks", - "testclient-client-profile"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", + "testclient-client-profile", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient transportClient = createTransportClient(builder.build())) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("client"))); assertGreenClusterState(transportClient); @@ -156,16 +159,19 @@ public void testThatProfileTransportClientCanConnectToClientProfile() throws Exc } /** - * Uses a transport client with a custom keystore; this keystore testclient-client-profile.jks trusts the testnode + * Uses a transport client with a custom key pair; TransportClient only trusts the testnode * certificate and had its own self signed certificate. This test connects to the no_client_auth profile, which * uses a truststore that does not trust the testclient-client-profile certificate but does not require client * authentication */ public void testThatProfileTransportClientCanConnectToNoClientAuthProfile() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks", - "testclient-client-profile"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", + "testclient-client-profile", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient transportClient = createTransportClient(builder.build())) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("no_client_auth"))); @@ -174,16 +180,19 @@ public void testThatProfileTransportClientCanConnectToNoClientAuthProfile() thro } /** - * Uses a transport client with a custom keystore; this keystore testclient-client-profile.jks trusts the testnode + * Uses a transport client with a custom key pair; TransportClient only trusts the testnode * certificate and had its own self signed certificate. This test connects to the default profile, which * uses a truststore that does not trust the testclient-client-profile certificate and requires client authentication * so the connection should always fail */ public void testThatProfileTransportClientCannotConnectToDefaultProfile() throws Exception { Settings.Builder builder = Settings.builder(); - addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks", - "testclient-client-profile"); + addSSLSettingsForPEMFiles( + builder, + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem", + "testclient-client-profile", + "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt", + Arrays.asList("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")); try (TransportClient transportClient = createTransportClient(builder.build())) { TransportAddress transportAddress = randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses()); transportClient.addTransportAddress(transportAddress); @@ -253,19 +262,17 @@ public void testThatTransportClientCannotConnectToNoClientAuthProfile() throws E } /** - * Uses a transport client with a custom truststore; this truststore truststore-testnode-only only trusts the testnode - * certificate and contains no other certification. This test connects to the no_client_auth profile, which uses - * the testnode certificate and does not require to present a certificate, so this connection should always succeed + * Uses a transport client that only trusts the testnode certificate. This test connects to the no_client_auth profile, + * which uses the testnode certificate and does not require to present a certificate, so this connection should always succeed */ public void testThatTransportClientWithOnlyTruststoreCanConnectToNoClientAuthProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.ssl.truststore.password", "truststore-testnode-only") - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), @@ -274,21 +281,19 @@ public void testThatTransportClientWithOnlyTruststoreCanConnectToNoClientAuthPro } /** - * Uses a transport client with a custom truststore; this truststore truststore-testnode-only only trusts the testnode - * certificate and contains no other certification. This test connects to the client profile, which uses + * Uses a transport client that only trusts the testnode certificate. This test connects to the client profile, which uses * the testnode certificate and requires the client to present a certificate, so this connection will never work as * the client has no certificate to present */ public void testThatTransportClientWithOnlyTruststoreCannotConnectToClientProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.ssl.truststore.password", "truststore-testnode-only") - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("client"))); @@ -300,21 +305,19 @@ public void testThatTransportClientWithOnlyTruststoreCannotConnectToClientProfil } /** - * Uses a transport client with a custom truststore; this truststore truststore-testnode-only only trusts the testnode - * certificate and contains no other certification. This test connects to the default profile, which uses + * Uses a transport client that only trusts the testnode certificate. This test connects to the default profile, which uses * the testnode certificate and requires the client to present a certificate, so this connection will never work as * the client has no certificate to present */ public void testThatTransportClientWithOnlyTruststoreCannotConnectToDefaultProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.ssl.truststore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/truststore-testnode-only.jks")) - .put("xpack.ssl.truststore.password", "truststore-testnode-only") - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.ssl.certificate_authorities", + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt")) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); @@ -332,11 +335,11 @@ public void testThatTransportClientWithOnlyTruststoreCannotConnectToDefaultProfi */ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToDefaultProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.security.transport.ssl.enabled", true) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(randomFrom(internalCluster().getInstance(Transport.class).boundAddress().boundAddresses())); @@ -354,11 +357,11 @@ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToDefaultProf */ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToClientProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.security.transport.ssl.enabled", true) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), getProfilePort("client"))); @@ -376,11 +379,11 @@ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToClientProfi */ public void testThatSSLTransportClientWithNoTruststoreCannotConnectToNoClientAuthProfile() throws Exception { Settings settings = Settings.builder() - .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) - .put("cluster.name", internalCluster().getClusterName()) - .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) - .put("xpack.security.transport.ssl.enabled", true) - .build(); + .put(SecurityField.USER_SETTING.getKey(), TEST_USER_NAME + ":" + TEST_PASSWORD) + .put("cluster.name", internalCluster().getClusterName()) + .put("xpack.ssl.client_authentication", SSLClientAuth.REQUIRED) + .put("xpack.security.transport.ssl.enabled", true) + .build(); try (TransportClient transportClient = new TestXPackTransportClient(settings, Collections.singletonList(LocalStateSecurity.class))) { transportClient.addTransportAddress(new TransportAddress(InetAddress.getLoopbackAddress(), diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java index fb5d567bb3627..7427c5a67e92d 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ssl/SslNullCipherTests.java @@ -10,12 +10,18 @@ import org.elasticsearch.client.Client; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.test.SecurityIntegTestCase; +import org.junit.BeforeClass; /** * An extremely simple test that shows SSL will work with a cipher that does not perform encryption */ public class SslNullCipherTests extends SecurityIntegTestCase { + @BeforeClass + public static void muteInFips() { + assumeFalse("Can't run in a FIPS JVM", inFipsJvm()); + } + @Override public boolean transportSSLEnabled() { return true; diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java index fdb66916884c7..d205c7cd9334e 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLClientAuthTests.java @@ -22,20 +22,22 @@ import org.elasticsearch.transport.Transport; import org.elasticsearch.xpack.core.TestXPackTransportClient; import org.elasticsearch.xpack.core.security.SecurityField; +import org.elasticsearch.xpack.core.ssl.CertParsingUtils; +import org.elasticsearch.xpack.core.ssl.PemUtils; import org.elasticsearch.xpack.core.ssl.SSLClientAuth; import org.elasticsearch.xpack.security.LocalStateSecurity; -import javax.net.ssl.KeyManagerFactory; +import javax.net.ssl.KeyManager; import javax.net.ssl.SSLContext; -import javax.net.ssl.TrustManagerFactory; +import javax.net.ssl.TrustManager; import java.io.IOException; -import java.io.InputStream; import java.nio.file.Files; import java.nio.file.Path; -import java.security.KeyStore; import java.security.SecureRandom; import java.security.cert.CertPathBuilderException; +import java.util.Arrays; +import java.util.Collections; import static org.elasticsearch.xpack.core.security.authc.support.UsernamePasswordToken.basicAuthHeaderValue; import static org.hamcrest.Matchers.containsString; @@ -74,7 +76,11 @@ public void testThatHttpFailsWithoutSslClientAuth() throws IOException { } catch (IOException e) { Throwable t = ExceptionsHelper.unwrap(e, CertPathBuilderException.class); assertThat(t, instanceOf(CertPathBuilderException.class)); - assertThat(t.getMessage(), containsString("unable to find valid certification path to requested target")); + if (inFipsJvm()) { + assertThat(t.getMessage(), containsString("Unable to find certificate chain")); + } else { + assertThat(t.getMessage(), containsString("unable to find valid certification path to requested target")); + } } } @@ -89,24 +95,27 @@ public void testThatHttpWorksWithSslClientAuth() throws IOException { } public void testThatTransportWorksWithoutSslClientAuth() throws IOException { - // specify an arbitrary keystore, that does not include the certs needed to connect to the transport protocol - Path store = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.jks"); + // specify an arbitrary key and certificate - not the certs needed to connect to the transport protocol + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.pem"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient-client-profile.crt"); + Path nodeCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); - if (Files.notExists(store)) { - throw new ElasticsearchException("store path doesn't exist"); + if (Files.notExists(keyPath) || Files.notExists(certPath)) { + throw new ElasticsearchException("key or certificate path doesn't exist"); } MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testclient-client-profile"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testclient-client-profile"); Settings settings = Settings.builder() - .put("xpack.security.transport.ssl.enabled", true) - .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) - .put("xpack.ssl.keystore.path", store) - .setSecureSettings(secureSettings) - .put("cluster.name", internalCluster().getClusterName()) - .put(SecurityField.USER_SETTING.getKey(), - transportClientUsername() + ":" + new String(transportClientPassword().getChars())) - .build(); + .put("xpack.security.transport.ssl.enabled", true) + .put("xpack.ssl.client_authentication", SSLClientAuth.NONE) + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .put("xpack.ssl.certificate_authorities", nodeCertPath) + .setSecureSettings(secureSettings) + .put("cluster.name", internalCluster().getClusterName()) + .put(SecurityField.USER_SETTING.getKey(), transportClientUsername() + ":" + new String(transportClientPassword().getChars())) + .build(); try (TransportClient client = new TestXPackTransportClient(settings, LocalStateSecurity.class)) { Transport transport = internalCluster().getDataNodeInstance(Transport.class); TransportAddress transportAddress = transport.boundAddress().publishAddress(); @@ -117,19 +126,19 @@ public void testThatTransportWorksWithoutSslClientAuth() throws IOException { } private SSLContext getSSLContext() { - try (InputStream in = - Files.newInputStream(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.jks"))) { - KeyStore keyStore = KeyStore.getInstance("jks"); - keyStore.load(in, "testclient".toCharArray()); - TrustManagerFactory tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()); - tmf.init(keyStore); - KeyManagerFactory kmf = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm()); - kmf.init(keyStore, "testclient".toCharArray()); + try { + String certPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"; + String nodeCertPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"; + String keyPath = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.pem"; + TrustManager tm = CertParsingUtils.trustManager(CertParsingUtils.readCertificates(Arrays.asList(getDataPath + (certPath), getDataPath(nodeCertPath)))); + KeyManager km = CertParsingUtils.keyManager(CertParsingUtils.readCertificates(Collections.singletonList(getDataPath + (certPath))), PemUtils.readPrivateKey(getDataPath(keyPath), "testclient"::toCharArray), "testclient".toCharArray()); SSLContext context = SSLContext.getInstance("TLSv1.2"); - context.init(kmf.getKeyManagers(), tmf.getTrustManagers(), new SecureRandom()); + context.init(new KeyManager[] { km }, new TrustManager[] { tm }, new SecureRandom()); return context; } catch (Exception e) { - throw new ElasticsearchException("failed to initialize a TrustManagerFactory", e); + throw new ElasticsearchException("failed to initialize SSLContext", e); } } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java index 4269d8a78eb7a..03f963cc59ca6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLReloadIntegTests.java @@ -12,9 +12,7 @@ import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.env.TestEnvironment; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.transport.Transport; -import org.elasticsearch.xpack.core.ssl.CertParsingUtils; import org.elasticsearch.xpack.core.ssl.SSLConfiguration; import org.elasticsearch.xpack.core.ssl.SSLService; @@ -23,16 +21,12 @@ import javax.net.ssl.SSLSocketFactory; import java.io.IOException; -import java.io.InputStream; -import java.io.OutputStream; import java.net.SocketException; import java.nio.file.AtomicMoveNotSupportedException; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.StandardCopyOption; -import java.security.KeyStore; -import java.security.cert.X509Certificate; -import java.util.Collections; +import java.util.Arrays; import java.util.concurrent.CountDownLatch; import static org.hamcrest.Matchers.containsString; @@ -43,34 +37,51 @@ */ public class SSLReloadIntegTests extends SecurityIntegTestCase { - private Path nodeStorePath; + private Path nodeKeyPath; + private Path nodeCertPath; + private Path clientCertPath; + private Path updateableCertPath; @Override public Settings nodeSettings(int nodeOrdinal) { - //Node starts with testnode.jks - if (nodeStorePath == null) { - Path origPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"); - Path tempDir = createTempDir(); - nodeStorePath = tempDir.resolve("testnode.jks"); - try { - Files.copy(origPath, nodeStorePath); - } catch (IOException e) { - throw new ElasticsearchException("failed to copy keystore"); + // Nodes start trusting testnode.crt and testclient.crt + Path origKeyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"); + Path origCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path origClientCertPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"); + Path tempDir = createTempDir(); + try { + if (nodeKeyPath == null) { + nodeKeyPath = tempDir.resolve("testnode.pem"); + Files.copy(origKeyPath, nodeKeyPath); + } + if (nodeCertPath == null) { + nodeCertPath = tempDir.resolve("testnode.crt"); + Files.copy(origCertPath, nodeCertPath); + } + if (clientCertPath == null) { + clientCertPath = tempDir.resolve("testclient.crt"); + Files.copy(origClientCertPath, clientCertPath); } + // Placeholder trusted certificate that will be updated later on + if (updateableCertPath == null) { + updateableCertPath = tempDir.resolve("updateable.crt"); + Files.copy(origCertPath, updateableCertPath); + } + } catch (IOException e) { + throw new ElasticsearchException("failed to copy key or certificate", e); } + Settings settings = super.nodeSettings(nodeOrdinal); Settings.Builder builder = Settings.builder() .put(settings.filter((s) -> s.startsWith("xpack.ssl.") == false)); - - SecuritySettingsSource.addSSLSettingsForStore(builder, - "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks", "testnode"); - builder.put("resource.reload.interval.high", "1s") - .put("xpack.ssl.keystore.path", nodeStorePath); - - if (builder.get("xpack.ssl.truststore.path") != null) { - builder.put("xpack.ssl.truststore.path", nodeStorePath); - } + builder.put("path.home", createTempDir()) + .put("xpack.ssl.key", nodeKeyPath) + .put("xpack.ssl.key_passphrase", "testnode") + .put("xpack.ssl.certificate", nodeCertPath) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(nodeCertPath.toString(), clientCertPath.toString(), + updateableCertPath.toString())) + .put("resource.reload.interval.high", "1s"); return builder.build(); } @@ -81,25 +92,27 @@ protected boolean transportSSLEnabled() { } public void testThatSSLConfigurationReloadsOnModification() throws Exception { - Path keystorePath = createTempDir().resolve("testnode_updated.jks"); - Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.jks"), keystorePath); - X509Certificate certificate = CertParsingUtils.readX509Certificates(Collections.singletonList(getDataPath - ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt")))[0]; + Path keyPath = createTempDir().resolve("testnode_updated.pem"); + Path certPath = createTempDir().resolve("testnode_updated.crt"); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.pem"), keyPath); + Files.copy(getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode_updated.crt"), certPath); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.keystore.path", keystorePath) - .put("xpack.ssl.truststore.path", nodeStorePath) - .setSecureSettings(secureSettings) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .putList("xpack.ssl.certificate_authorities", Arrays.asList(nodeCertPath.toString(), clientCertPath.toString(), + updateableCertPath.toString())) + .setSecureSettings(secureSettings) + .build(); String node = randomFrom(internalCluster().getNodeNames()); SSLService sslService = new SSLService(settings, TestEnvironment.newEnvironment(settings)); SSLConfiguration sslConfiguration = sslService.getSSLConfiguration("xpack.ssl"); SSLSocketFactory sslSocketFactory = sslService.sslSocketFactory(sslConfiguration); TransportAddress address = internalCluster() - .getInstance(Transport.class, node).boundAddress().publishAddress(); + .getInstance(Transport.class, node).boundAddress().publishAddress(); + // Fails as our nodes do not trust testnode_updated.crt try (SSLSocket socket = (SSLSocket) sslSocketFactory.createSocket(address.getAddress(), address.getPort())) { assertThat(socket.isConnected(), is(true)); socket.startHandshake(); @@ -107,19 +120,11 @@ public void testThatSSLConfigurationReloadsOnModification() throws Exception { } catch (SSLHandshakeException | SocketException expected) { logger.trace("expected exception", expected); } - KeyStore nodeStore = KeyStore.getInstance("jks"); - try (InputStream in = Files.newInputStream(nodeStorePath)) { - nodeStore.load(in, "testnode".toCharArray()); - } - nodeStore.setCertificateEntry("newcert", certificate); - Path path = nodeStorePath.getParent().resolve("updated.jks"); - try (OutputStream out = Files.newOutputStream(path)) { - nodeStore.store(out, "testnode".toCharArray()); - } + // Copy testnode_updated.crt to the placeholder updateable.crt so that the nodes will start trusting it now try { - Files.move(path, nodeStorePath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE); + Files.move(certPath, updateableCertPath, StandardCopyOption.REPLACE_EXISTING, StandardCopyOption.ATOMIC_MOVE); } catch (AtomicMoveNotSupportedException e) { - Files.move(path, nodeStorePath, StandardCopyOption.REPLACE_EXISTING); + Files.move(certPath, updateableCertPath, StandardCopyOption.REPLACE_EXISTING); } CountDownLatch latch = new CountDownLatch(1); assertBusy(() -> { diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java index beebf928fcf27..cf77ca975a4df 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/ssl/SSLTrustRestrictionsTests.java @@ -72,6 +72,7 @@ protected int maxNumberOfNodes() { @BeforeClass public static void setupCertificates() throws Exception { + assumeFalse("Can't run in a FIPS JVM, custom TrustManager implementations cannot be used.", inFipsJvm()); configPath = createTempDir(); Path caCertPath = PathUtils.get(SSLTrustRestrictionsTests.class.getResource ("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/nodes/ca.crt").toURI()); diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt new file mode 100644 index 0000000000000..2dbb06c49e6a6 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/ad.crt @@ -0,0 +1,23 @@ +-----BEGIN CERTIFICATE----- +MIID1zCCAr+gAwIBAgIQWA24rVK7FopAgOHfEio/VjANBgkqhkiG9w0BAQsFADB+ +MRMwEQYKCZImiZPyLGQBGRYDY29tMR0wGwYKCZImiZPyLGQBGRYNZWxhc3RpY3Nl +YXJjaDEUMBIGCgmSJomT8ixkARkWBHRlc3QxEjAQBgoJkiaJk/IsZAEZFgJhZDEe +MBwGA1UEAxMVYWQtRUxBU1RJQ1NFQVJDSEFELUNBMB4XDTE0MDgyNzE2MjI0MloX +DTI5MDgyNzE2MzI0MlowfjETMBEGCgmSJomT8ixkARkWA2NvbTEdMBsGCgmSJomT +8ixkARkWDWVsYXN0aWNzZWFyY2gxFDASBgoJkiaJk/IsZAEZFgR0ZXN0MRIwEAYK +CZImiZPyLGQBGRYCYWQxHjAcBgNVBAMTFWFkLUVMQVNUSUNTRUFSQ0hBRC1DQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALNNZsDJ+lhsE/pCIkNlq6/F +xwv3PU2M+E1/SbWrLEtfbb1ATnn98DwxjpCj00wS0bt26/7zrhHKyX5LaxyS27ER +8bKpLSO4qcVWzDIQnVNk2XfBrYS/Og+6Pi/Lw/ylt/vE++kHWIJBc4O6i+pPByOM +oypM6bh71kTkpK8OTPqf+HiPp0qKhRah6XVtqTc+kOCOku2+wkELbCz8RNzF9ca6 +Uu3YxLi73pNdk0wDTmg6JVaUyVRpSkjJH4BAp9SVma6Rxy6tbh4e5P+8K8lY9ptM +TBzTsDS1EhNK/92xULfQbGT814Z294pF3ARMEJ89N+aegS++kz7CqjciZ1+bA6EC +AwEAAaNRME8wCwYDVR0PBAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FIEKG0KdSVNknKcMZkbTlKo7N8MjMBAGCSsGAQQBgjcVAQQDAgEAMA0GCSqGSIb3 +DQEBCwUAA4IBAQBgbWBXPbEMTEsiVWzoxmTw1wJASBdPahx6CggutjGq3ASjby4p +nVCTwE4xdDEVyFGmeslSp9+23XjBuaiqVPtYw8P8hnG269J0q4cOF/VXOccRLeOw +HVDBv2a7xzgBSwc1KB50TLv07stcBmBYNu8anN6EwGksdgjb8IjRV6U3U+IvFNrI +rGifuIc/iRZD4Clhnpxw8tCsgcrcmz9CU7CN5RxKVEpZ6ou6ZjHO8l8H0t9zWrSI +PL+33iBGHNWlyU63N93XgJtxV1em1hHryLtTTtaVZJJ3R0OrLrUpG8SQ7zCUy62f +YtImFPClUMXY03yH+4DAhflueRvY/D1AKL12 +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt new file mode 100644 index 0000000000000..be5a6d0264579 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_ca.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDmzCCAoOgAwIBAgIUdwsnIxjgSneHNVKT6JNCCsrQ3T0wDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTgwMjE1MTc0ODQ2WhcNMjEwMjE0MTc0ODQ2WjA0MTIwMAYD +VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC +ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKQAP/hdsWdu3Ck/Zteosity +nmXJTCnkecBYSLtjgYh9rPDFppj9KdsZ7+5P9FvxLv/t4Yw81YI24TrHk0CnMrD/ +QBaXDiPGeT9b6T/gWWVm1zQj2/567gH2UaIkIffy7q09BI9ICXSKDBRXRMLgVR19 +iiJkwWb3b5TVvaQI4M8sEmJIHXei2/cfEKVR5hBprtzeKkvg6o9DXx+nDv2ZEUZ7 +it5pEN5AjD5t0S3ymtlUU5lqnr8er6/Qcrua2EXxE1HyPEkpN/Cwl7tF1ICMdguf +vght5ql1/Pk43VmBMulI/6z5e+7GZ1+x79YA17gabtGJ+onB0zJxgDBj0tto7H8C +AwEAAaOBpDCBoTAdBgNVHQ4EFgQUZo2Y3maL2NoxbbkwRZiC37k6QMEwbwYDVR0j +BGgwZoAUZo2Y3maL2NoxbbkwRZiC37k6QMGhOKQ2MDQxMjAwBgNVBAMTKUVsYXN0 +aWMgQ2VydGlmaWNhdGUgVG9vbCBBdXRvZ2VuZXJhdGVkIENBghR3CycjGOBKd4c1 +UpPok0IKytDdPTAPBgNVHRMBAf8EBTADAQH/MA0GCSqGSIb3DQEBCwUAA4IBAQBf +mkc4bvUR5+We/2rRqCmP4LFnl/LxfbZ9/pUPRdcxuowuK7YfxN8i44VXGpJvLtec +izhA8gvlj6GbYB/GNlHMogqEORbrMlu2o5Cev4HE/pcWpoqtVaDJqI5Hq4763EmJ +p2dXGMmU04H4LtkcCEt3xQfLQ+QIP4Dl2yEsNd248BKSsscCGm9V3vgzFzbdgndo +zUWv9hQCaEsKNtqvnkTqDy2uFjnf+xNoXFr/bI94gvD9HlZHnIC+g0TL5jjtSfCH +gjeXhC2bBKFtlSt4ClIdZTXWievYs6YDRREfaOi4F0757A/gf+hT0fjZ+9WWnUeM +UuvUnl71CNRnJ5JlNKBA +-----END CERTIFICATE----- diff --git a/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt new file mode 100644 index 0000000000000..59ecbd22e8b23 --- /dev/null +++ b/x-pack/plugin/security/src/test/resources/org/elasticsearch/xpack/security/authc/ldap/support/smb_cert.crt @@ -0,0 +1,22 @@ +-----BEGIN CERTIFICATE----- +MIIDoDCCAoigAwIBAgIUMVGoHuyNTjTFaoRmqFELz75jzDEwDQYJKoZIhvcNAQEL +BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l +cmF0ZWQgQ0EwHhcNMTgwMjE1MTc0OTExWhcNMjEwMjE0MTc0OTExWjARMQ8wDQYD +VQQDEwZzYW1iYTQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtGBwa +n+7JN2vweSUsYh4zPmh8RPIE+nEVjK1lx/rADUBY7UVjfTYC+MVKKiezZe7gYCNT +7JNKazPpgVI9e3ZFKw/UxomLqRuuvn5bTh+1tMs3afY5+GGzi7oPmEbBO3ceg0Hi +rNSTDa1rfroZnRYK8uIeSZacQnAW90plITI7rBBt9jq+W9albFbDybfDgNv+yS/C +rzIsofm4rbFC3SMRYfrT6HvwDhjOmmYKZci5x7tsn0T+3tSiR44Bw5/DgiN5kX3m +/kl9qg1eoYWbCUy1dKmQlb4Nb4uNcxrIugLB3zjBkfhMZ0OHoveKh/lJASTWik9k +xQ9rEYbpsRbuXpsHAgMBAAGjgcwwgckwHQYDVR0OBBYEFJOLa7UXKtLPibgKeFh7 +Kq1+rS0/MG8GA1UdIwRoMGaAFGaNmN5mi9jaMW25MEWYgt+5OkDBoTikNjA0MTIw +MAYDVQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBD +QYIUdwsnIxjgSneHNVKT6JNCCsrQ3T0wLAYDVR0RBCUwI4IJbG9jYWxob3N0hwR/ +AAABhxAAAAAAAAAAAAAAAAAAAAABMAkGA1UdEwQCMAAwDQYJKoZIhvcNAQELBQAD +ggEBAEHqT1WHkcF8DuOgyIBx7wKcUVQ5H1qYYlJ1xgMGrKFFZLUzouLcON7oadEu +HLIJ4Z3AKD3bqWpcls5XJ9MTECGR48tou67x9cXqTV7jR3Rh0H/VGwzwhR85vbpu +o8ielOPL8XAQOfnAFESJii5sfCU4ZwLg+3evmGZdKfhU6rqQtLimgG/Gm96vOJne +y0a/TZTWrfAarithkOHHXSSAhEI5SdW5SlZAytF4AmYqFvafwxe1+NyFwfCRy0Xl +H40WgVsq+z84psU+WyORb3THX5rgB4au9nuMXOqFKAtrJSI/uApncYraaqU28rqB +gYd8XrtjhKOLw+6viqAKu8l7/cs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java index 9858a5cd11851..a69445386d027 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/actions/webhook/WebhookHttpsIntegrationTests.java @@ -44,12 +44,14 @@ public class WebhookHttpsIntegrationTests extends AbstractWatcherIntegrationTest @Override protected Settings nodeSettings(int nodeOrdinal) { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); return Settings.builder() - .put(super.nodeSettings(nodeOrdinal)) - .put("xpack.http.ssl.keystore.path", resource.toString()) - .put("xpack.http.ssl.keystore.password", "testnode") - .build(); + .put(super.nodeSettings(nodeOrdinal)) + .put("xpack.http.ssl.key", keyPath) + .put("xpack.http.ssl.certificate", certPath) + .put("xpack.http.ssl.keystore.password", "testnode") + .build(); } @Before diff --git a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java index 10618b36e8ae9..03dcd7947155e 100644 --- a/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java +++ b/x-pack/plugin/watcher/src/test/java/org/elasticsearch/xpack/watcher/common/http/HttpClientTests.java @@ -169,30 +169,31 @@ public void testNoPathSpecified() throws Exception { } public void testHttps() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.jks"); + Path trustedCertPath = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); Settings settings; if (randomBoolean()) { - secureSettings.setString("xpack.http.ssl.truststore.secure_password", "truststore-testnode-only"); settings = Settings.builder() - .put("xpack.http.ssl.truststore.path", resource.toString()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.http.ssl.certificate_authorities", trustedCertPath) + .setSecureSettings(secureSettings) + .build(); } else { - secureSettings.setString("xpack.ssl.truststore.secure_password", "truststore-testnode-only"); settings = Settings.builder() - .put("xpack.ssl.truststore.path", resource.toString()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.certificate_authorities", trustedCertPath) + .setSecureSettings(secureSettings) + .build(); } try (HttpClient client = new HttpClient(settings, authRegistry, new SSLService(settings, environment))) { secureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it is only a truststore - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings2 = Settings.builder() - .put("xpack.ssl.keystore.path", getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks")) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings2, environment); testSslMockWebserver(client, sslService.sslContext(), false); @@ -200,34 +201,40 @@ public void testHttps() throws Exception { } public void testHttpsDisableHostnameVerification() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem"); Settings settings; if (randomBoolean()) { MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.http.ssl.truststore.secure_password", "testnode-no-subjaltname"); - settings = Settings.builder() - .put("xpack.http.ssl.truststore.path", resource.toString()) - .put("xpack.http.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)) - .setSecureSettings(secureSettings) - .build(); + Settings.Builder builder = Settings.builder() + .put("xpack.http.ssl.certificate_authorities", certPath); + if (inFipsJvm()) { + //Can't use TrustAllConfig in FIPS mode + builder.put("xpack.http.ssl.verification_mode", VerificationMode.CERTIFICATE); + } else { + builder.put("xpack.http.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)); + } + settings = builder.build(); } else { - MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.truststore.secure_password", "testnode-no-subjaltname"); - settings = Settings.builder() - .put("xpack.ssl.truststore.path", resource.toString()) - .put("xpack.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)) - .setSecureSettings(secureSettings) - .build(); + Settings.Builder builder = Settings.builder() + .put("xpack.ssl.certificate_authorities", certPath); + if (inFipsJvm()) { + //Can't use TrustAllConfig in FIPS mode + builder.put("xpack.ssl.verification_mode", VerificationMode.CERTIFICATE); + } else { + builder.put("xpack.ssl.verification_mode", randomFrom(VerificationMode.NONE, VerificationMode.CERTIFICATE)); + } + settings = builder.build(); } try (HttpClient client = new HttpClient(settings, authRegistry, new SSLService(settings, environment))) { MockSecureSettings secureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it only defines a truststore - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode-no-subjaltname"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode-no-subjaltname"); Settings settings2 = Settings.builder() - .put("xpack.ssl.keystore.path", - getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.jks")) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings2, environment); testSslMockWebserver(client, sslService.sslContext(), false); @@ -235,13 +242,15 @@ public void testHttpsDisableHostnameVerification() throws Exception { } public void testHttpsClientAuth() throws Exception { - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + secureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings settings = Settings.builder() - .put("xpack.ssl.keystore.path", resource.toString()) - .setSecureSettings(secureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(secureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(settings, environment); try (HttpClient client = new HttpClient(settings, authRegistry, sslService)) { @@ -365,30 +374,31 @@ public void testSetProxy() throws Exception { } public void testProxyCanHaveDifferentSchemeThanRequest() throws Exception { + Path trustedCertPath = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt"); + Path certPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.crt"); + Path keyPath = getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.pem"); // this test fakes a proxy server that sends a response instead of forwarding it to the mock web server // on top of that the proxy request is HTTPS but the real request is HTTP only MockSecureSettings serverSecureSettings = new MockSecureSettings(); // We can't use the client created above for the server since it is only a truststore - serverSecureSettings.setString("xpack.ssl.keystore.secure_password", "testnode"); + serverSecureSettings.setString("xpack.ssl.secure_key_passphrase", "testnode"); Settings serverSettings = Settings.builder() - .put("xpack.ssl.keystore.path", getDataPath("/org/elasticsearch/xpack/security/keystore/testnode.jks")) - .setSecureSettings(serverSecureSettings) - .build(); + .put("xpack.ssl.key", keyPath) + .put("xpack.ssl.certificate", certPath) + .setSecureSettings(serverSecureSettings) + .build(); TestsSSLService sslService = new TestsSSLService(serverSettings, environment); try (MockWebServer proxyServer = new MockWebServer(sslService.sslContext(), false)) { proxyServer.enqueue(new MockResponse().setResponseCode(200).setBody("fullProxiedContent")); proxyServer.start(); - Path resource = getDataPath("/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.jks"); MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.setString("xpack.http.ssl.truststore.secure_password", "truststore-testnode-only"); Settings settings = Settings.builder() .put(HttpSettings.PROXY_HOST.getKey(), "localhost") .put(HttpSettings.PROXY_PORT.getKey(), proxyServer.getPort()) .put(HttpSettings.PROXY_SCHEME.getKey(), "https") - .put("xpack.http.ssl.truststore.path", resource.toString()) - .setSecureSettings(secureSettings) + .put("xpack.http.ssl.certificate_authorities", trustedCertPath) .build(); HttpRequest.Builder requestBuilder = HttpRequest.builder("localhost", webServer.getPort()) diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt new file mode 100644 index 0000000000000..37e142afb23fe --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIJAJ+K5mGS3n/AMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTQxMjE2MTcwNDQ1WhcNMTgxMjE1MTcwNDQ1 +WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAzhpW7iwkm+Og+HP7U00nbmh0Hy9Z2Ldp5i8tJSlSQwTxCCvO +rse6jwJQN98Dk1ApaSzimZrlKOotFyPV1L3fnOzJbTp1Yq/VsYP4zJkjWtID0qUf +8Rg8bLhjKAG+ZlLuai5XZqnLkdmqvQeR61VhpXWFm0Om153tWmAiHL18ywY71gXN +EnkeFo9OW4fDqkz6h7NJziYvU6URSKErZDEixk5GIPv9K9hiIfi0KQM6xaHp0d2w +VCyFVC0OUdugz6untURzJVx4U3X1bQcv/o2BoUotWh/5h8o5eeiiv2OGZ1XlO+33 +1tweYI4wFjDwnAyHHRr/rk2ZIBiBYGaSzHnuhQIDAQABo1owWDAJBgNVHRMEAjAA +MB0GA1UdDgQWBBTwGg2LF8+mzsvBBWxJKv6VXv3dMTAsBgNVHREEJTAjgglsb2Nh +bGhvc3SHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAIwDQYJKoZIhvcNAQELBQADggEB +ABP4ufLToJhcUselVxV9LPD5VGPEHGLdIFqsUEix7DMsiNpR76X6a8qNQbZpdbd6 ++qPKqoaMgC7znX7qZtCqRbIXTWbudZPxFkcHdiWx3SiALMQYabeUGetClX3sCndU +SUoV8f34i8dJxfNcqhLcsh4zpgxtmwsvs5OLMTBvm0Xo2zUFUjlmrt41pBrWEuq9 +nkObc/cr6Syiz3sy4pYVJO1/YwHaZgE/URqjVlari70DR3ES4YnIUnLQajKx2Q0/ +gXVgzjbe68KPOUGCz6GYiWq+d4tcWdHzLv1GsaqQ1MD9P21ArfrX4DpzgPDrO6MP +9Ppq5DQGa2q4mz3kipd5RIs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem new file mode 100644 index 0000000000000..5de9c813fa815 --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/testnode.pem @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEpQIBAAKCAQEAzhpW7iwkm+Og+HP7U00nbmh0Hy9Z2Ldp5i8tJSlSQwTxCCvO +rse6jwJQN98Dk1ApaSzimZrlKOotFyPV1L3fnOzJbTp1Yq/VsYP4zJkjWtID0qUf +8Rg8bLhjKAG+ZlLuai5XZqnLkdmqvQeR61VhpXWFm0Om153tWmAiHL18ywY71gXN +EnkeFo9OW4fDqkz6h7NJziYvU6URSKErZDEixk5GIPv9K9hiIfi0KQM6xaHp0d2w +VCyFVC0OUdugz6untURzJVx4U3X1bQcv/o2BoUotWh/5h8o5eeiiv2OGZ1XlO+33 +1tweYI4wFjDwnAyHHRr/rk2ZIBiBYGaSzHnuhQIDAQABAoIBAQCWgv3A6VPC1DUV +u/1qFAobwwQqUfYXIbqgcwtQ/CAq+UzcXsGNOiavkUzrwF1oEz5qpHDHJCr9+iX7 +pBvgRNksTG+86NgYvbgc7vee0qbTCFPqXNQ6ySw3aWBgMwXMy/t4Z2dEffNAC+l4 +KjMR3UR2BKERhhItnBNd0J6Yxnh/+jg1Uf5fVMEh1/WOGLYCJnnn5oEHNKeon6XR +dobIn2QjD/PB8ZX7UubrSXmyezU0e9h3ARoI3oPMV6f8XQSa5K/KRrk8FUkVQ4vI +5+YAMjtY/K2I8xAEoPyprD/ILAVN+3E47J0K14EfKNTajSzQFVJhaaCvs7btxScA +Sx/zRsvxAoGBAP5KMH6vamdnBlZTPT2jtrsmzjyC0Z+9lbNokzRmVribps+DFdAW +YsGCbfApcbOYmpdLSeccFTA+uT5IbQ8hwBbWn/HKm+y8EDAPklf5tL0+w7pCZ4kU +50pKk6cjSTv/CDjO+hy4KIz2H/zXivXEV+4FtFKOZ3qUVg7m+1c/u5lDAoGBAM99 +L8/S9jwCkOjv+TKhmK+2/S5tVy1fdjlurTu9nI46CYa9MaOndZKY6EJ9ekBLIHUQ +h1QAsdPRHgkObuKDUHmpLr7qmoTRE7vtWC3sHK382j5CBEK00p+09wFHA03Bf40f +Jdjlzqe9F9jO6LH2RL/TECQDe7RJaTOQJrNlVtiXAoGBAOUUsNtv68t7ZJogIuuE +sPmo2+Jnd7EQeexGKVbrWvS0RHJtBRmRESaC+ceBjozczWe+y7UH946e8wLI/HbF +UOdCMpUAkbeTNIIXhR78NXbHNEx3xg4YZsTmql3HzBHgjueejnOQ8/cJQ4fkJauC +VjR3rxswbshfGagTLhpLsBVBAoGBAMBf5mN+ynRLQMXoMRlDgIhyVf2kvO5BkyCe +wBkirTOlFc4KPirpCXZ5NObo5d8UiKxhPcehuT6VpY5qBl8XtxaFyOSUKd24594W +qeox/0lFpaeRl9etRZdztoxFpgeCv1s9pN6b+2XESYboGBFgLs/XxiBN5nT6l4KK +RYeRDttTAoGAMoAreVa/i1l5YChhyskBTt+nePHGomsXC9tv7mZFLOrPQ+CLy5Xd +4PQqqYjRaJT/aP3N/q0NcauSKxYKmgnhInXpaasSVzGrM60DQLVw+SXfTiwXN0dH +V/bq2ybdSxEh2xQoyrfpiFDkCEecY0nYCL1Ff7UYY6g8P/Qj8DBiZGI= +-----END RSA PRIVATE KEY----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt new file mode 100644 index 0000000000000..37e142afb23fe --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/keystore/truststore-testnode-only.crt @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbTCCAlWgAwIBAgIJAJ+K5mGS3n/AMA0GCSqGSIb3DQEBCwUAMEgxDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEgMB4GA1UEAxMXRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUwHhcNMTQxMjE2MTcwNDQ1WhcNMTgxMjE1MTcwNDQ1 +WjBIMQwwCgYDVQQKEwNvcmcxFjAUBgNVBAsTDWVsYXN0aWNzZWFyY2gxIDAeBgNV +BAMTF0VsYXN0aWNzZWFyY2ggVGVzdCBOb2RlMIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEAzhpW7iwkm+Og+HP7U00nbmh0Hy9Z2Ldp5i8tJSlSQwTxCCvO +rse6jwJQN98Dk1ApaSzimZrlKOotFyPV1L3fnOzJbTp1Yq/VsYP4zJkjWtID0qUf +8Rg8bLhjKAG+ZlLuai5XZqnLkdmqvQeR61VhpXWFm0Om153tWmAiHL18ywY71gXN +EnkeFo9OW4fDqkz6h7NJziYvU6URSKErZDEixk5GIPv9K9hiIfi0KQM6xaHp0d2w +VCyFVC0OUdugz6untURzJVx4U3X1bQcv/o2BoUotWh/5h8o5eeiiv2OGZ1XlO+33 +1tweYI4wFjDwnAyHHRr/rk2ZIBiBYGaSzHnuhQIDAQABo1owWDAJBgNVHRMEAjAA +MB0GA1UdDgQWBBTwGg2LF8+mzsvBBWxJKv6VXv3dMTAsBgNVHREEJTAjgglsb2Nh +bGhvc3SHBH8AAAGHEAAAAAAAAAAAAAAAAAAAAAIwDQYJKoZIhvcNAQELBQADggEB +ABP4ufLToJhcUselVxV9LPD5VGPEHGLdIFqsUEix7DMsiNpR76X6a8qNQbZpdbd6 ++qPKqoaMgC7znX7qZtCqRbIXTWbudZPxFkcHdiWx3SiALMQYabeUGetClX3sCndU +SUoV8f34i8dJxfNcqhLcsh4zpgxtmwsvs5OLMTBvm0Xo2zUFUjlmrt41pBrWEuq9 +nkObc/cr6Syiz3sy4pYVJO1/YwHaZgE/URqjVlari70DR3ES4YnIUnLQajKx2Q0/ +gXVgzjbe68KPOUGCz6GYiWq+d4tcWdHzLv1GsaqQ1MD9P21ArfrX4DpzgPDrO6MP +9Ppq5DQGa2q4mz3kipd5RIs= +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt new file mode 100644 index 0000000000000..ced9d81d96fa6 --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.crt @@ -0,0 +1,20 @@ +-----BEGIN CERTIFICATE----- +MIIDTTCCAjWgAwIBAgIJALL7dwEsWamvMA0GCSqGSIb3DQEBCwUAME8xDDAKBgNV +BAoTA29yZzEWMBQGA1UECxMNZWxhc3RpY3NlYXJjaDEnMCUGA1UEAxMeRWxhc3Rp +Y3NlYXJjaCBUZXN0IE5vZGUgTm8gU0FOMB4XDTE0MTIxNjE5NTcyNloXDTE4MTIx +NTE5NTcyNlowTzEMMAoGA1UEChMDb3JnMRYwFAYDVQQLEw1lbGFzdGljc2VhcmNo +MScwJQYDVQQDEx5FbGFzdGljc2VhcmNoIFRlc3QgTm9kZSBObyBTQU4wggEiMA0G +CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCkIGS7A/V6TesR34ajMyNYL3tB1OjW +Raq4KtF8FfW1H6nHGrWa/qXjZWPirczy1k2n6ZL7YOCcv/YeY8xAqC9mGQxvEuqo +EaqXq2cjRdAs/7zqzRkdPPi3Jw/p/RHrDfOAzOsMnBGc0G2Hrsj//aP44vp85pek +fM3t2kNAYZWYCzXUqWAIUoxBDK4DcQdsN8H4KTMIwQEEiRtcKnL/b8QGKsyGLfLq +36ZABHZ4kY2SmcP3bWxZtbFN4hamdwoAtYe+lS0/ee8/fOTLyZ3Ey+X6EEmGO1lk +WR4XLli15k1L2HBzWGG7zwxVEC5r2h3Sx1njYh/Jq3khIdSvDbiMmM+VAgMBAAGj +LDAqMAkGA1UdEwQCMAAwHQYDVR0OBBYEFGm8wrYF9mJweJ1vloDw19e0PUuIMA0G +CSqGSIb3DQEBCwUAA4IBAQBbEZ73weDphNIcmvN25v6NIfjBebqgm0/2grDFwmZe +Z1DibzRoVfoQ7WeUqbPS7SHUQ+KzIN1GdfHXhW9r6mmLbtzPv90Q/8zBcNv5HNZZ +YK+T2r9hoAWEY6nB1fiOJ4udkFMYfAi6LiSxave4IPWp/WIqd0IWtPtkPl+MmG41 +TfRom8TnO+o+VsjgDkY5Q1JDsNQKy1BrtxzIZyz7d1zYKTQ+HXZ4yeYJoVoc3k4y +6w9eX2zAUZ6Z3d4an6CLr6Hew9Dj2VX1vqCj1a5/VvHZVyVxyh4hg8sHYm7tZOJX +wN3B5GcKwbbFjaMVBLaMlP62OdGg7tCh61evWm+l06S0 +-----END CERTIFICATE----- diff --git a/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem new file mode 100644 index 0000000000000..b0f7a585d7f9b --- /dev/null +++ b/x-pack/plugin/watcher/src/test/resources/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-no-subjaltname.pem @@ -0,0 +1,30 @@ +-----BEGIN RSA PRIVATE KEY----- +Proc-Type: 4,ENCRYPTED +DEK-Info: AES-192-CBC,2F36F79E75ACA7803AF1BC1B70C2360C + +d4/f7dnpPW9DfhGXuGDx7r56BjQ64iNcsQdrC1CTZB363iAvBBeHaoJFaWpQOUmT +WCBdM6YmRuMi6z4sMtm8Vre3i+Mu2Jti0LTDXqH8Y6FMpptxAYoFb9hkM0OvUGDJ +ASLGTa1XKyCzZWOe2IGLtWIS8zkGv/fSXHqDJV8ddq8uLHGZXBxAye5oJUD0654T +DasIllbiTlBEYlEJ10OyIAspS47n4LWVE5ggyGdiyIgh5s3lAMPO3+yenDU3/dzj +YF2zHKnfe0Nx2lUQVJRYknQvFCx0WTGH9gNl40FovoOMHyRjEuOHD9PFNvL/TCh6 +9h0jJPWu/SsIiYaIqR0pDgqWdHXHA5Ea3s2+w0YHbv7DqkGXWZdceLUdZtcXCBJk +P8QL9IWa7VE50SS6wV5uXX9tV5lHzMFsNGkwtGcR1UMU5cXYqckFXgoBqDN0fyWe +V5iEknSJ4Na+MHv75rvRat0kv6upu9i5NSpYTc5jLHdWZWsYMZ/ZMiMoLBP+KAPT +DQ3eyph/84BU3DePaQF3Rsp0ZvPxqQ361Zwc4zC5CKbHur1QX8WAY5XlBMfuBpkf +CKr5wgwF+ZpS7zsfUpMPPe9Y1E8TWnhx/DtCVLEslBpr2u/rMaxPp6ev9/Wry7N+ +UFBOwodipBhlfSvLqjc511L+bXRzqXiINuW0eSKUQv0J/G0Ur894kJJ6feDYlskj +JiZjOgOyyKhB+K9AXmkfRdvWUJeweL8pgDuYSyQdQ0zoUCZALEoYK2cBWzti/wep +QPFD5oz8076aXNHKMHLsRmSFuEZb6IN0PtUNVf958EbrtABNIuoufKlKtJsEnUyK +VHIEUxExEgyECiozKnxvhr7RQ9nTQXhNdgtec6jJblYnla/+OWAfHdxtHhBjp5AX +WvLyUhmgrmLNdKd1KSzcXynBHgh0hi0HJXYx31FilwbxsdhwN1LwN/Do4T4qGkUr +InrQC3ZHRuh0yAOPrwRFEWbW5m/PzVP/xYVgFtVWP7w38ftZbaBy5xPmtswn+PH+ +cIMt1Y9PaAlhLNpW/Vfn503T9M+05vu73vbU1xgu/B1kePOqE/WO0cOZl0KdaMmT +wAQBKuI7qTACH+/8g3Uir1YSChLthH+1Gs6h686EP6ZydwXq9GYXXkNmJNJJsnmU +RDjoT0F4XBKvcQdX3EeQYs3Af2yZWFDC59c1Ews2dqMK7vy2tYITbx2yn30DBDAl +xvjp2izzmAgQJEG/RqCYsUHCCEv7wz3tpsSOkFem9IHZpR2h8Rqy88GH9qYOkgwo ++fKSmIgC4RLQXsHuh7RRuyNc2FaWDgRgSxs5V4f9xOSU/ZbUftYWnwEyCwbu3RJp +CIXQFZhzU2t5l1Eh+x40rwpEJDXBEwmOIUO3x1oOqGZPPEQ674uMal5TRjvdOVGD +h665Fpo5Xu9EQwQZHYddeRl/7yw8F6LCxBLgHlngKRHHGDUHlTscLfYRqNh+x3jT +3S8dfaGzlnwdQEx32gyLAV0/nsFnzh1AknFMT8jesIYF7PLiAi67PNyNwRCc7TFp +jpKvzkDRVP72bivTmCyP5aKR0Q2oIrAw51MMinT6R2VaoR7COjoVbqYsRLwkxu+p +-----END RSA PRIVATE KEY----- diff --git a/x-pack/qa/openldap-tests/build.gradle b/x-pack/qa/openldap-tests/build.gradle index 24976ab6113ab..5c0399a1d9fa2 100644 --- a/x-pack/qa/openldap-tests/build.gradle +++ b/x-pack/qa/openldap-tests/build.gradle @@ -17,6 +17,7 @@ task openLdapFixture { String outputDir = "${project.buildDir}/generated-resources/${project.name}" task copyIdpTrust(type: Copy) { from idpFixtureProject.file('src/main/resources/certs/idptrust.jks'); + from idpFixtureProject.file('src/main/resources/certs/ca.crt'); into outputDir } if (project.rootProject.vagrantSupported) { diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java index 1c9d93873a493..c7a92dccab82c 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/OpenLdapUserSearchSessionFactoryTests.java @@ -34,7 +34,6 @@ import java.util.Locale; import java.util.Objects; -import static org.elasticsearch.test.OpenLdapTests.LDAPTRUST_PATH; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -45,22 +44,20 @@ public class OpenLdapUserSearchSessionFactoryTests extends ESTestCase { private Settings globalSettings; private ThreadPool threadPool; - private MockSecureSettings globalSecureSettings; + private static final String LDAPCACERT_PATH = "/ca.crt"; @Before public void init() throws Exception { - Path keystore = getDataPath(LDAPTRUST_PATH); + Path caPath = getDataPath(LDAPCACERT_PATH); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. * If we re-use a SSLContext, previously connected sessions can get re-established which breaks hostname * verification tests since a re-established connection does not perform hostname verification. */ - globalSecureSettings = newSecureSettings("xpack.ssl.truststore.secure_password", "changeit"); globalSettings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.truststore.path", keystore) - .setSecureSettings(globalSecureSettings) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.certificate_authorities", caPath) + .build(); threadPool = new TestThreadPool("LdapUserSearchSessionFactoryTests"); } @@ -94,7 +91,6 @@ public void testUserSearchWithBindUserOpenLDAP() throws Exception { .put(globalSettings, false); builder.put(Settings.builder().put(config.settings(), false).normalizePrefix("xpack.security.authc.realms.oldap-test.").build()); final MockSecureSettings secureSettings = new MockSecureSettings(); - secureSettings.merge(globalSecureSettings); if (useSecureBindPassword) { secureSettings.setString("xpack.security.authc.realms.oldap-test.secure_bind_password", OpenLdapTests.PASSWORD); } diff --git a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java index f277f5d84b371..b55431dee1b00 100644 --- a/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java +++ b/x-pack/qa/openldap-tests/src/test/java/org/elasticsearch/xpack/security/authc/ldap/SearchGroupsResolverTests.java @@ -177,4 +177,4 @@ protected String bindPassword() { protected String trustPath() { return "/idptrust.jks"; } -} \ No newline at end of file +} diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java index e11b62642eb57..dde0b7645df13 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java +++ b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateGenerateToolTests.java @@ -38,6 +38,7 @@ import org.elasticsearch.xpack.core.ssl.CertificateGenerateTool.Name; import org.hamcrest.Matchers; import org.junit.After; +import org.junit.BeforeClass; import javax.security.auth.x500.X500Principal; @@ -96,6 +97,11 @@ private Path initTempDir() throws Exception { return tempDir; } + @BeforeClass + public static void checkFipsJvm() { + assumeFalse("Can't run in a FIPS JVM, depends on Non FIPS BouncyCastle", inFipsJvm()); + } + @After public void tearDown() throws Exception { IOUtils.close(jimfs); diff --git a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java index 795dd074a80d7..706d5dbab5f5a 100644 --- a/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java +++ b/x-pack/qa/security-tools-tests/src/test/java/org/elasticsearch/xpack/core/ssl/CertificateToolTests.java @@ -47,6 +47,7 @@ import org.elasticsearch.xpack.core.ssl.CertificateTool.Name; import org.hamcrest.Matchers; import org.junit.After; +import org.junit.BeforeClass; import javax.net.ssl.KeyManagerFactory; import javax.net.ssl.TrustManagerFactory; @@ -108,6 +109,11 @@ private Path initTempDir() throws Exception { return tempDir; } + @BeforeClass + public static void chechFipsJvm() { + assumeFalse("Can't run in a FIPS JVM, depends on Non FIPS BouncyCastle", inFipsJvm()); + } + @After public void tearDown() throws Exception { IOUtils.close(jimfs); diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java index 98594917129f2..32452a609e245 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ADLdapUserSearchSessionFactoryTests.java @@ -40,7 +40,7 @@ public class ADLdapUserSearchSessionFactoryTests extends AbstractActiveDirectory @Before public void init() throws Exception { - Path keystore = getDataPath("support/ADtrust.jks"); + Path certPath = getDataPath("support/smb_ca.crt"); Environment env = TestEnvironment.newEnvironment(Settings.builder().put("path.home", createTempDir()).build()); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. @@ -49,10 +49,9 @@ public void init() throws Exception { */ globalSettings = Settings.builder() - .put("path.home", createTempDir()) - .put("xpack.ssl.truststore.path", keystore) - .setSecureSettings(newSecureSettings("xpack.ssl.truststore.secure_password", "changeit")) - .build(); + .put("path.home", createTempDir()) + .put("xpack.ssl.certificate_authorities", certPath) + .build(); sslService = new SSLService(globalSettings, env); threadPool = new TestThreadPool("ADLdapUserSearchSessionFactoryTests"); } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java index 7ef1bd674a32b..829e87c849df6 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractActiveDirectoryTestCase.java @@ -23,9 +23,16 @@ import org.elasticsearch.xpack.core.ssl.VerificationMode; import org.junit.Before; +import java.io.IOException; +import java.nio.file.FileVisitResult; +import java.nio.file.Files; import java.nio.file.Path; +import java.nio.file.SimpleFileVisitor; +import java.nio.file.attribute.BasicFileAttributes; import java.security.AccessController; import java.security.PrivilegedAction; +import java.util.ArrayList; +import java.util.List; public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { @@ -48,11 +55,25 @@ public abstract class AbstractActiveDirectoryTestCase extends ESTestCase { protected SSLService sslService; protected Settings globalSettings; protected boolean useGlobalSSL; + protected List certificatePaths; @Before public void initializeSslSocketFactory() throws Exception { useGlobalSSL = randomBoolean(); - Path truststore = getDataPath("../ldap/support/ADtrust.jks"); + // We use certificates in PEM format and `ssl.certificate_authorities` instead of ssl.trustore + // so that these tests can also run in a FIPS JVM where JKS keystores can't be used. + certificatePaths = new ArrayList<>(); + Files.walkFileTree(getDataPath + ("../ldap/support"), new SimpleFileVisitor() { + @Override + public FileVisitResult visitFile(Path file, BasicFileAttributes attrs) throws IOException { + String fileName = file.getFileName().toString(); + if (fileName.endsWith(".crt")) { + certificatePaths.add(getDataPath("../ldap/support/" + fileName).toString()); + } + return FileVisitResult.CONTINUE; + } + }); /* * Prior to each test we reinitialize the socket factory with a new SSLService so that we get a new SSLContext. * If we re-use a SSLContext, previously connected sessions can get re-established which breaks hostname @@ -60,20 +81,16 @@ public void initializeSslSocketFactory() throws Exception { */ Settings.Builder builder = Settings.builder().put("path.home", createTempDir()); if (useGlobalSSL) { - builder.put("xpack.ssl.truststore.path", truststore) - .put("xpack.ssl.truststore.password", "changeit"); + builder.putList("xpack.ssl.certificate_authorities", certificatePaths); // fake realm to load config with certificate verification mode - builder.put("xpack.security.authc.realms.bar.ssl.truststore.path", truststore); - builder.put("xpack.security.authc.realms.bar.ssl.truststore.password", "changeit"); + builder.putList("xpack.security.authc.realms.bar.ssl.certificate_authorities", certificatePaths); builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } else { // fake realms so ssl will get loaded - builder.put("xpack.security.authc.realms.foo.ssl.truststore.path", truststore); - builder.put("xpack.security.authc.realms.foo.ssl.truststore.password", "changeit"); + builder.putList("xpack.security.authc.realms.foo.ssl.certificate_authorities", certificatePaths); builder.put("xpack.security.authc.realms.foo.ssl.verification_mode", VerificationMode.FULL); - builder.put("xpack.security.authc.realms.bar.ssl.truststore.path", truststore); - builder.put("xpack.security.authc.realms.bar.ssl.truststore.password", "changeit"); + builder.putList("xpack.security.authc.realms.bar.ssl.certificate_authorities", certificatePaths); builder.put("xpack.security.authc.realms.bar.ssl.verification_mode", VerificationMode.CERTIFICATE); } globalSettings = builder.build(); @@ -99,8 +116,7 @@ Settings buildAdSettings(String ldapUrl, String adDomainName, String userSearchD builder.put(SessionFactorySettings.HOSTNAME_VERIFICATION_SETTING, hostnameVerification); } if (useGlobalSSL == false) { - builder.put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit"); + builder.putList("ssl.certificate_authorities", certificatePaths); } return builder.build(); } diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java index 11da59e44d6fe..d35e7ab0b0c08 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/AbstractAdLdapRealmTestCase.java @@ -21,7 +21,6 @@ import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.SecurityIntegTestCase; -import org.elasticsearch.test.SecuritySettingsSource; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingRequestBuilder; import org.elasticsearch.xpack.core.security.action.rolemapping.PutRoleMappingResponse; import org.elasticsearch.xpack.core.security.authc.ldap.ActiveDirectorySessionFactorySettings; @@ -100,7 +99,8 @@ public abstract class AbstractAdLdapRealmTestCase extends SecurityIntegTestCase ) }; - protected static final String TESTNODE_KEYSTORE = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.jks"; + protected static final String TESTNODE_KEY = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.pem"; + protected static final String TESTNODE_CERT = "/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"; protected static RealmConfig realmConfig; protected static List roleMappings; protected static boolean useGlobalSSL; @@ -122,7 +122,8 @@ public static void cleanupRealm() { @Override protected Settings nodeSettings(int nodeOrdinal) { final RealmConfig realm = AbstractAdLdapRealmTestCase.realmConfig; - Path store = getDataPath(TESTNODE_KEYSTORE); + final Path nodeCert = getDataPath(TESTNODE_CERT); + final Path nodeKey = getDataPath(TESTNODE_KEY); Settings.Builder builder = Settings.builder(); // don't use filter since it returns a prefixed secure setting instead of mock! Settings settingsToAdd = super.nodeSettings(nodeOrdinal); @@ -156,14 +157,15 @@ protected Settings nodeSettings(int nodeOrdinal) { } } } - addSslSettingsForStore(builder, store, "testnode"); - builder.put(buildRealmSettings(realm, roleMappings, store)); + addSslSettingsForKeyPair(builder, nodeKey, "testnode", nodeCert, getNodeTrustedCertificates()); + builder.put(buildRealmSettings(realm, roleMappings, getNodeTrustedCertificates())); return builder.build(); } - protected Settings buildRealmSettings(RealmConfig realm, List roleMappingEntries, Path store) { + protected Settings buildRealmSettings(RealmConfig realm, List roleMappingEntries, List + certificateAuthorities) { Settings.Builder builder = Settings.builder(); - builder.put(realm.buildSettings(store, "testnode")); + builder.put(realm.buildSettings(certificateAuthorities)); configureFileRoleMappings(builder, roleMappingEntries); return builder.build(); } @@ -216,10 +218,11 @@ private List getRoleMappingContent(Function co @Override protected Settings transportClientSettings() { if (useGlobalSSL) { - Path store = getDataPath(TESTNODE_KEYSTORE); + Path key = getDataPath(TESTNODE_KEY); + Path cert = getDataPath(TESTNODE_CERT); Settings.Builder builder = Settings.builder() .put(super.transportClientSettings().filter((s) -> s.startsWith("xpack.ssl.") == false)); - addSslSettingsForStore(builder, store, "testnode"); + addSslSettingsForKeyPair(builder, key, "testnode", cert, getNodeTrustedCertificates()); return builder.build(); } else { return super.transportClientSettings(); @@ -304,14 +307,35 @@ protected static String userHeader(String username, String password) { return UsernamePasswordToken.basicAuthHeaderValue(username, new SecureString(password.toCharArray())); } - private void addSslSettingsForStore(Settings.Builder builder, Path store, String password) { - SecuritySettingsSource.addSecureSettings(builder, secureSettings -> { - secureSettings.setString("xpack.ssl.keystore.secure_password", password); - secureSettings.setString("xpack.ssl.truststore.secure_password", password); - }); - builder.put("xpack.ssl.keystore.path", store) - .put("xpack.ssl.verification_mode", "certificate") - .put("xpack.ssl.truststore.path", store); + private void addSslSettingsForKeyPair(Settings.Builder builder, Path key, String keyPassphrase, Path cert, + List certificateAuthorities) { + builder.put("xpack.ssl.key", key) + .put("xpack.ssl.key_passphrase", keyPassphrase) + .put("xpack.ssl.verification_mode", "certificate") + .put("xpack.ssl.certificate", cert) + .putList("xpack.ssl.certificate_authorities", certificateAuthorities); + } + + /** + * Collects all the certificates that are normally trusted by the node ( contained in testnode.jks ) + * + * @return + */ + List getNodeTrustedCertificates() { + Path testnodeCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode.crt"); + Path testnodeClientProfileCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testnode-client-profile.crt"); + Path activedirCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/active-directory-ca.crt"); + Path testclientCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/testclient.crt"); + Path openldapCert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/openldap.crt"); + Path samba4Cert = + getDataPath("/org/elasticsearch/xpack/security/transport/ssl/certs/simple/samba4.crt"); + return Arrays.asList(testnodeCert.toString(), testnodeClientProfileCert.toString(), activedirCert.toString(), testclientCert + .toString(), openldapCert.toString(), samba4Cert.toString()); } static class RoleMappingEntry { @@ -429,19 +453,19 @@ enum RealmConfig { this.mapGroupsAsRoles = randomBoolean(); } - public Settings buildSettings(Path store, String password) { - return buildSettings(store, password, 1); + public Settings buildSettings(List certificateAuthorities) { + return buildSettings(certificateAuthorities, 1); } - protected Settings buildSettings(Path store, String password, int order) { + + protected Settings buildSettings(List certificateAuthorities, int order) { Settings.Builder builder = Settings.builder() - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".order", order) - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".hostname_verification", false) - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".unmapped_groups_as_roles", mapGroupsAsRoles) - .put(this.settings); + .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".order", order) + .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".hostname_verification", false) + .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".unmapped_groups_as_roles", mapGroupsAsRoles) + .put(this.settings); if (useGlobalSSL == false) { - builder.put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".ssl.truststore.path", store) - .put(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".ssl.truststore.password", password); + builder.putList(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + ".ssl.certificate_authorities", certificateAuthorities); } return builder.build(); diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java index fb7ea6c5dd754..330ec6b9a758c 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectoryGroupsResolverTests.java @@ -132,4 +132,4 @@ protected String bindPassword() { protected String trustPath() { return "/org/elasticsearch/xpack/security/authc/ldap/support/ADtrust.jks"; } -} \ No newline at end of file +} diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java index 8f85c250f481b..614d6659f2d80 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/ActiveDirectorySessionFactoryTests.java @@ -280,10 +280,9 @@ public void testStandardLdapConnection() throws Exception { .build(); if (useGlobalSSL == false) { settings = Settings.builder() - .put(settings) - .put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit") - .build(); + .put(settings) + .putList("ssl.certificate_authorities", certificatePaths) + .build(); } RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); @@ -317,10 +316,9 @@ public void testHandlingLdapReferralErrors() throws Exception { ignoreReferralErrors); if (useGlobalSSL == false) { settings = Settings.builder() - .put(settings) - .put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit") - .build(); + .put(settings) + .putList("ssl.certificate_authorities", certificatePaths) + .build(); } RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); @@ -346,10 +344,9 @@ public void testStandardLdapWithAttributeGroups() throws Exception { Settings settings = LdapTestCase.buildLdapSettings(new String[] { AD_LDAP_URL }, userTemplate, false); if (useGlobalSSL == false) { settings = Settings.builder() - .put(settings) - .put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit") - .build(); + .put(settings) + .putList("ssl.certificate_authorities", certificatePaths) + .build(); } RealmConfig config = new RealmConfig("ad-as-ldap-test", settings, globalSettings, TestEnvironment.newEnvironment(globalSettings), new ThreadContext(globalSettings)); @@ -408,8 +405,7 @@ private Settings buildAdSettings(String ldapUrl, String adDomainName, boolean ho } if (useGlobalSSL == false) { - builder.put("ssl.truststore.path", getDataPath("../ldap/support/ADtrust.jks")) - .put("ssl.truststore.password", "changeit"); + builder.putList("ssl.certificate_authorities", certificatePaths); } if (useBindUser) { diff --git a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java index c4e07a846fd56..3d1cdb202d19d 100644 --- a/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java +++ b/x-pack/qa/third-party/active-directory/src/test/java/org/elasticsearch/xpack/security/authc/ldap/MultipleAdRealmIT.java @@ -10,7 +10,6 @@ import org.junit.BeforeClass; import java.io.IOException; -import java.nio.file.Path; import java.util.Arrays; import java.util.List; import java.util.stream.Collectors; @@ -46,9 +45,9 @@ protected Settings nodeSettings(int nodeOrdinal) { Settings.Builder builder = Settings.builder(); builder.put(super.nodeSettings(nodeOrdinal)); - Path store = getDataPath(TESTNODE_KEYSTORE); final List secondaryRoleMappings = secondaryRealmConfig.selectRoleMappings(() -> true); - final Settings secondarySettings = super.buildRealmSettings(secondaryRealmConfig, secondaryRoleMappings, store); + final Settings secondarySettings = super.buildRealmSettings(secondaryRealmConfig, secondaryRoleMappings, + getNodeTrustedCertificates()); secondarySettings.keySet().forEach(name -> { String newName = name.replace(XPACK_SECURITY_AUTHC_REALMS_EXTERNAL, XPACK_SECURITY_AUTHC_REALMS_EXTERNAL + "2"); builder.copy(newName, name, secondarySettings); From 24547e820c44be2b13563e4c66c95bdfa1f2b247 Mon Sep 17 00:00:00 2001 From: Luca Cavanna Date: Tue, 17 Jul 2018 11:26:28 +0200 Subject: [PATCH 069/107] Check that client methods match API defined in the REST spec (#31825) We have been encountering name mismatches between API defined in our REST spec and method names that have been added to the high-level REST client. We should check this automatically to prevent furher mismatches, and correct all the current ones. This commit adds a test for this and corrects the issues found by it. --- .../gradle/test/RestIntegTestTask.groovy | 3 +- client/rest-high-level/build.gradle | 10 +- .../elasticsearch/client/IndicesClient.java | 35 +++- .../elasticsearch/client/IngestClient.java | 8 +- .../client/RestHighLevelClient.java | 102 ++++++++++- .../elasticsearch/client/SnapshotClient.java | 6 +- .../elasticsearch/client/BulkProcessorIT.java | 10 +- .../client/BulkProcessorRetryIT.java | 2 +- .../java/org/elasticsearch/client/CrudIT.java | 4 +- .../CustomRestHighLevelClientTests.java | 2 +- .../elasticsearch/client/IndicesClientIT.java | 6 +- .../elasticsearch/client/IngestClientIT.java | 2 +- .../client/RestHighLevelClientTests.java | 161 +++++++++++++++++- .../org/elasticsearch/client/SearchIT.java | 24 +-- .../org/elasticsearch/client/SnapshotIT.java | 14 +- .../documentation/CRUDDocumentationIT.java | 14 +- .../IndicesClientDocumentationIT.java | 10 +- .../IngestClientDocumentationIT.java | 4 +- .../documentation/SearchDocumentationIT.java | 20 +-- .../SnapshotClientDocumentationIT.java | 4 +- ...rossClusterSearchUnavailableClusterIT.java | 4 +- 21 files changed, 359 insertions(+), 86 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy index f2e6dc8e56186..d2101c48aabdc 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/RestIntegTestTask.groovy @@ -24,7 +24,6 @@ import org.elasticsearch.gradle.VersionProperties import org.gradle.api.DefaultTask import org.gradle.api.Project import org.gradle.api.Task -import org.gradle.api.Transformer import org.gradle.api.execution.TaskExecutionAdapter import org.gradle.api.internal.tasks.options.Option import org.gradle.api.provider.Property @@ -217,7 +216,7 @@ public class RestIntegTestTask extends DefaultTask { * @param project The project to add the copy task to * @param includePackagedTests true if the packaged tests should be copied, false otherwise */ - private static Task createCopyRestSpecTask(Project project, Provider includePackagedTests) { + static Task createCopyRestSpecTask(Project project, Provider includePackagedTests) { project.configurations { restSpec } diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 451452759f507..2fed806e98c57 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -18,8 +18,8 @@ */ import org.elasticsearch.gradle.precommit.PrecommitTasks -import org.gradle.api.XmlProvider -import org.gradle.api.publish.maven.MavenPublication +import org.elasticsearch.gradle.test.RestIntegTestTask +import org.gradle.api.internal.provider.Providers buildscript { repositories { @@ -41,6 +41,10 @@ apply plugin: 'com.github.johnrengelman.shadow' group = 'org.elasticsearch.client' archivesBaseName = 'elasticsearch-rest-high-level-client' +//we need to copy the yaml spec so we can check naming (see RestHighlevelClientTests#testApiNamingConventions) +Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE) +test.dependsOn(copyRestSpec) + publishing { publications { nebula(MavenPublication) { @@ -102,6 +106,8 @@ dependencies { testCompile "com.carrotsearch.randomizedtesting:randomizedtesting-runner:${versions.randomizedrunner}" testCompile "junit:junit:${versions.junit}" testCompile "org.hamcrest:hamcrest-all:${versions.hamcrest}" + //this is needed to make RestHighLevelClientTests#testApiNamingConventions work from IDEs + testCompile "org.elasticsearch:rest-api-spec:${version}" } dependencyLicenses { diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java index 2944b49bf18b0..250bbd520dad7 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IndicesClient.java @@ -174,7 +174,7 @@ public void putMappingAsync(PutMappingRequest putMappingRequest, RequestOptions * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public GetMappingsResponse getMappings(GetMappingsRequest getMappingsRequest, RequestOptions options) throws IOException { + public GetMappingsResponse getMapping(GetMappingsRequest getMappingsRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options, GetMappingsResponse::fromXContent, emptySet()); } @@ -187,8 +187,8 @@ public GetMappingsResponse getMappings(GetMappingsRequest getMappingsRequest, Re * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void getMappingsAsync(GetMappingsRequest getMappingsRequest, RequestOptions options, - ActionListener listener) { + public void getMappingAsync(GetMappingsRequest getMappingsRequest, RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(getMappingsRequest, RequestConverters::getMappings, options, GetMappingsResponse::fromXContent, listener, emptySet()); } @@ -474,8 +474,23 @@ public void getAsync(GetIndexRequest getIndexRequest, RequestOptions options, * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #forcemerge(ForceMergeRequest, RequestOptions)} instead */ + @Deprecated public ForceMergeResponse forceMerge(ForceMergeRequest forceMergeRequest, RequestOptions options) throws IOException { + return forcemerge(forceMergeRequest, options); + } + + /** + * Force merge one or more indices using the Force Merge API. + * See + * Force Merge API on elastic.co + * @param forceMergeRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public ForceMergeResponse forcemerge(ForceMergeRequest forceMergeRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options, ForceMergeResponse::fromXContent, emptySet()); } @@ -487,8 +502,22 @@ public ForceMergeResponse forceMerge(ForceMergeRequest forceMergeRequest, Reques * @param forceMergeRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #forcemergeAsync(ForceMergeRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public void forceMergeAsync(ForceMergeRequest forceMergeRequest, RequestOptions options, ActionListener listener) { + forcemergeAsync(forceMergeRequest, options, listener); + } + + /** + * Asynchronously force merge one or more indices using the Force Merge API. + * See + * Force Merge API on elastic.co + * @param forceMergeRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public void forcemergeAsync(ForceMergeRequest forceMergeRequest, RequestOptions options, ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(forceMergeRequest, RequestConverters::forceMerge, options, ForceMergeResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java index 340e14653971b..e889ec5beba80 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/IngestClient.java @@ -139,7 +139,7 @@ public void deletePipelineAsync(DeletePipelineRequest request, RequestOptions op * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public SimulatePipelineResponse simulatePipeline(SimulatePipelineRequest request, RequestOptions options) throws IOException { + public SimulatePipelineResponse simulate(SimulatePipelineRequest request, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity( request, RequestConverters::simulatePipeline, options, SimulatePipelineResponse::fromXContent, emptySet()); } @@ -154,9 +154,9 @@ public SimulatePipelineResponse simulatePipeline(SimulatePipelineRequest request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void simulatePipelineAsync(SimulatePipelineRequest request, - RequestOptions options, - ActionListener listener) { + public void simulateAsync(SimulatePipelineRequest request, + RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity( request, RequestConverters::simulatePipeline, options, SimulatePipelineResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java index b9e41b879328f..c71bebf6903ca 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/RestHighLevelClient.java @@ -384,8 +384,23 @@ public final void getAsync(GetRequest getRequest, RequestOptions options, Action * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #mget(MultiGetRequest, RequestOptions)} instead */ + @Deprecated public final MultiGetResponse multiGet(MultiGetRequest multiGetRequest, RequestOptions options) throws IOException { + return mget(multiGetRequest, options); + } + + + /** + * Retrieves multiple documents by id using the Multi Get API. + * See Multi Get API on elastic.co + * @param multiGetRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final MultiGetResponse mget(MultiGetRequest multiGetRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(multiGetRequest, RequestConverters::multiGet, options, MultiGetResponse::fromXContent, singleton(404)); } @@ -396,8 +411,21 @@ public final MultiGetResponse multiGet(MultiGetRequest multiGetRequest, RequestO * @param multiGetRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #mgetAsync(MultiGetRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public final void multiGetAsync(MultiGetRequest multiGetRequest, RequestOptions options, ActionListener listener) { + mgetAsync(multiGetRequest, options, listener); + } + + /** + * Asynchronously retrieves multiple documents by id using the Multi Get API. + * See Multi Get API on elastic.co + * @param multiGetRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void mgetAsync(MultiGetRequest multiGetRequest, RequestOptions options, ActionListener listener) { performRequestAsyncAndParseEntity(multiGetRequest, RequestConverters::multiGet, options, MultiGetResponse::fromXContent, listener, singleton(404)); } @@ -531,8 +559,23 @@ public final void searchAsync(SearchRequest searchRequest, RequestOptions option * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #msearch(MultiSearchRequest, RequestOptions)} instead */ + @Deprecated public final MultiSearchResponse multiSearch(MultiSearchRequest multiSearchRequest, RequestOptions options) throws IOException { + return msearch(multiSearchRequest, options); + } + + /** + * Executes a multi search using the msearch API. + * See Multi search API on + * elastic.co + * @param multiSearchRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final MultiSearchResponse msearch(MultiSearchRequest multiSearchRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(multiSearchRequest, RequestConverters::multiSearch, options, MultiSearchResponse::fromXContext, emptySet()); } @@ -544,9 +587,24 @@ public final MultiSearchResponse multiSearch(MultiSearchRequest multiSearchReque * @param searchRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #msearchAsync(MultiSearchRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public final void multiSearchAsync(MultiSearchRequest searchRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { + msearchAsync(searchRequest, options, listener); + } + + /** + * Asynchronously executes a multi search using the msearch API. + * See Multi search API on + * elastic.co + * @param searchRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void msearchAsync(MultiSearchRequest searchRequest, RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity(searchRequest, RequestConverters::multiSearch, options, MultiSearchResponse::fromXContext, listener, emptySet()); } @@ -559,8 +617,23 @@ public final void multiSearchAsync(MultiSearchRequest searchRequest, RequestOpti * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response + * @deprecated use {@link #scroll(SearchScrollRequest, RequestOptions)} instead */ + @Deprecated public final SearchResponse searchScroll(SearchScrollRequest searchScrollRequest, RequestOptions options) throws IOException { + return scroll(searchScrollRequest, options); + } + + /** + * Executes a search using the Search Scroll API. + * See Search Scroll + * API on elastic.co + * @param searchScrollRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @return the response + * @throws IOException in case there is a problem sending the request or parsing back the response + */ + public final SearchResponse scroll(SearchScrollRequest searchScrollRequest, RequestOptions options) throws IOException { return performRequestAndParseEntity(searchScrollRequest, RequestConverters::searchScroll, options, SearchResponse::fromXContent, emptySet()); } @@ -572,9 +645,24 @@ public final SearchResponse searchScroll(SearchScrollRequest searchScrollRequest * @param searchScrollRequest the request * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion + * @deprecated use {@link #scrollAsync(SearchScrollRequest, RequestOptions, ActionListener)} instead */ + @Deprecated public final void searchScrollAsync(SearchScrollRequest searchScrollRequest, RequestOptions options, - ActionListener listener) { + ActionListener listener) { + scrollAsync(searchScrollRequest, options, listener); + } + + /** + * Asynchronously executes a search using the Search Scroll API. + * See Search Scroll + * API on elastic.co + * @param searchScrollRequest the request + * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized + * @param listener the listener to be notified upon request completion + */ + public final void scrollAsync(SearchScrollRequest searchScrollRequest, RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity(searchScrollRequest, RequestConverters::searchScroll, options, SearchResponse::fromXContent, listener, emptySet()); } @@ -691,8 +779,8 @@ public final RankEvalResponse rankEval(RankEvalRequest rankEvalRequest, RequestO * See Multi Search Template API * on elastic.co. */ - public final MultiSearchTemplateResponse multiSearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest, - RequestOptions options) throws IOException { + public final MultiSearchTemplateResponse msearchTemplate(MultiSearchTemplateRequest multiSearchTemplateRequest, + RequestOptions options) throws IOException { return performRequestAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, options, MultiSearchTemplateResponse::fromXContext, emptySet()); } @@ -703,9 +791,9 @@ public final MultiSearchTemplateResponse multiSearchTemplate(MultiSearchTemplate * See Multi Search Template API * on elastic.co. */ - public final void multiSearchTemplateAsync(MultiSearchTemplateRequest multiSearchTemplateRequest, - RequestOptions options, - ActionListener listener) { + public final void msearchTemplateAsync(MultiSearchTemplateRequest multiSearchTemplateRequest, + RequestOptions options, + ActionListener listener) { performRequestAsyncAndParseEntity(multiSearchTemplateRequest, RequestConverters::multiSearchTemplate, options, MultiSearchTemplateResponse::fromXContext, listener, emptySet()); } diff --git a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java index f75f6cdef2405..ae115839baeaf 100644 --- a/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java +++ b/client/rest-high-level/src/main/java/org/elasticsearch/client/SnapshotClient.java @@ -63,7 +63,7 @@ public final class SnapshotClient { * @return the response * @throws IOException in case there is a problem sending the request or parsing back the response */ - public GetRepositoriesResponse getRepositories(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options) + public GetRepositoriesResponse getRepository(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options) throws IOException { return restHighLevelClient.performRequestAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options, GetRepositoriesResponse::fromXContent, emptySet()); @@ -78,8 +78,8 @@ public GetRepositoriesResponse getRepositories(GetRepositoriesRequest getReposit * @param options the request options (e.g. headers), use {@link RequestOptions#DEFAULT} if nothing needs to be customized * @param listener the listener to be notified upon request completion */ - public void getRepositoriesAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options, - ActionListener listener) { + public void getRepositoryAsync(GetRepositoriesRequest getRepositoriesRequest, RequestOptions options, + ActionListener listener) { restHighLevelClient.performRequestAsyncAndParseEntity(getRepositoriesRequest, RequestConverters::getRepositories, options, GetRepositoriesResponse::fromXContent, listener, emptySet()); } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java index 7605b1c715c74..fdd5634ddd6bd 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorIT.java @@ -79,7 +79,7 @@ public void testThatBulkProcessorCountIsCorrect() throws Exception { assertThat(listener.afterCounts.get(), equalTo(1)); assertThat(listener.bulkFailures.size(), equalTo(0)); assertResponseItems(listener.bulkItems, numDocs); - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } } @@ -105,7 +105,7 @@ public void testBulkProcessorFlush() throws Exception { assertThat(listener.afterCounts.get(), equalTo(1)); assertThat(listener.bulkFailures.size(), equalTo(0)); assertResponseItems(listener.bulkItems, numDocs); - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } } @@ -157,7 +157,7 @@ public void testBulkProcessorConcurrentRequests() throws Exception { assertThat(ids.add(bulkItemResponse.getId()), equalTo(true)); } - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } public void testBulkProcessorWaitOnClose() throws Exception { @@ -188,7 +188,7 @@ public void testBulkProcessorWaitOnClose() throws Exception { } assertThat(listener.bulkFailures.size(), equalTo(0)); assertResponseItems(listener.bulkItems, numDocs); - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), numDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), numDocs); } public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception { @@ -265,7 +265,7 @@ public void testBulkProcessorConcurrentRequestsReadOnlyIndex() throws Exception } } - assertMultiGetResponse(highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT), testDocs); + assertMultiGetResponse(highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT), testDocs); } private static MultiGetRequest indexDocs(BulkProcessor processor, int numDocs) throws Exception { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java index c20998eeb5826..5fd9fcb661c2b 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/BulkProcessorRetryIT.java @@ -129,7 +129,7 @@ public void afterBulk(long executionId, BulkRequest request, Throwable failure) } highLevelClient().indices().refresh(new RefreshRequest(), RequestOptions.DEFAULT); - int multiGetResponsesCount = highLevelClient().multiGet(multiGetRequest, RequestOptions.DEFAULT).getResponses().length; + int multiGetResponsesCount = highLevelClient().mget(multiGetRequest, RequestOptions.DEFAULT).getResponses().length; if (rejectedExecutionExpected) { assertThat(multiGetResponsesCount, lessThanOrEqualTo(numberOfAsyncOps)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java index 9de4c22611c3b..89f357477fa06 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CrudIT.java @@ -253,7 +253,7 @@ public void testMultiGet() throws IOException { MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add("index", "type", "id1"); multiGetRequest.add("index", "type", "id2"); - MultiGetResponse response = execute(multiGetRequest, highLevelClient()::multiGet, highLevelClient()::multiGetAsync); + MultiGetResponse response = execute(multiGetRequest, highLevelClient()::mget, highLevelClient()::mgetAsync); assertEquals(2, response.getResponses().length); assertTrue(response.getResponses()[0].isFailed()); @@ -285,7 +285,7 @@ public void testMultiGet() throws IOException { MultiGetRequest multiGetRequest = new MultiGetRequest(); multiGetRequest.add("index", "type", "id1"); multiGetRequest.add("index", "type", "id2"); - MultiGetResponse response = execute(multiGetRequest, highLevelClient()::multiGet, highLevelClient()::multiGetAsync); + MultiGetResponse response = execute(multiGetRequest, highLevelClient()::mget, highLevelClient()::mgetAsync); assertEquals(2, response.getResponses().length); assertFalse(response.getResponses()[0].isFailed()); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java index 3d1db23da16b6..ff27fe21c27e6 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/CustomRestHighLevelClientTests.java @@ -121,7 +121,7 @@ private static RequestOptions optionsForNodeName(String nodeName) { * so that they can be used by subclasses to implement custom logic. */ @SuppressForbidden(reason = "We're forced to uses Class#getDeclaredMethods() here because this test checks protected methods") - public void testMethodsVisibility() throws ClassNotFoundException { + public void testMethodsVisibility() { final String[] methodNames = new String[]{"parseEntity", "parseResponseException", "performRequest", diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java index 88cf445d436fe..36a45999b51ee 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IndicesClientIT.java @@ -443,7 +443,7 @@ public void testGetMapping() throws IOException { .types("_doc"); GetMappingsResponse getMappingsResponse = - execute(request, highLevelClient().indices()::getMappings, highLevelClient().indices()::getMappingsAsync); + execute(request, highLevelClient().indices()::getMapping, highLevelClient().indices()::getMappingAsync); Map mappings = getMappingsResponse.getMappings().get(indexName).get("_doc").sourceAsMap(); Map type = new HashMap<>(); @@ -796,7 +796,7 @@ public void testForceMerge() throws IOException { createIndex(index, settings); ForceMergeRequest forceMergeRequest = new ForceMergeRequest(index); ForceMergeResponse forceMergeResponse = - execute(forceMergeRequest, highLevelClient().indices()::forceMerge, highLevelClient().indices()::forceMergeAsync); + execute(forceMergeRequest, highLevelClient().indices()::forcemerge, highLevelClient().indices()::forcemergeAsync); assertThat(forceMergeResponse.getTotalShards(), equalTo(1)); assertThat(forceMergeResponse.getSuccessfulShards(), equalTo(1)); assertThat(forceMergeResponse.getFailedShards(), equalTo(0)); @@ -807,7 +807,7 @@ public void testForceMerge() throws IOException { assertFalse(indexExists(nonExistentIndex)); ForceMergeRequest forceMergeRequest = new ForceMergeRequest(nonExistentIndex); ElasticsearchException exception = expectThrows(ElasticsearchException.class, - () -> execute(forceMergeRequest, highLevelClient().indices()::forceMerge, highLevelClient().indices()::forceMergeAsync)); + () -> execute(forceMergeRequest, highLevelClient().indices()::forcemerge, highLevelClient().indices()::forcemergeAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); } } diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java index 6fd6f95059577..1f5914f392cf4 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/IngestClientIT.java @@ -135,7 +135,7 @@ private void testSimulatePipeline(boolean isVerbose, ); request.setVerbose(isVerbose); SimulatePipelineResponse response = - execute(request, highLevelClient().ingest()::simulatePipeline, highLevelClient().ingest()::simulatePipelineAsync); + execute(request, highLevelClient().ingest()::simulate, highLevelClient().ingest()::simulateAsync); List results = response.getResults(); assertEquals(1, results.size()); if (isVerbose) { diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java index 2925062e0e75b..47870125aa299 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/RestHighLevelClientTests.java @@ -20,8 +20,6 @@ package org.elasticsearch.client; import com.fasterxml.jackson.core.JsonParseException; - -import org.apache.http.Header; import org.apache.http.HttpEntity; import org.apache.http.HttpHost; import org.apache.http.HttpResponse; @@ -53,6 +51,7 @@ import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.common.CheckedFunction; import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -73,20 +72,30 @@ import org.elasticsearch.search.suggest.Suggest; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestApi; +import org.elasticsearch.test.rest.yaml.restspec.ClientYamlSuiteRestSpec; import org.junit.Before; import java.io.IOException; +import java.lang.reflect.Method; +import java.lang.reflect.Modifier; import java.net.SocketTimeoutException; import java.util.ArrayList; +import java.util.Arrays; import java.util.Collections; import java.util.HashMap; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicReference; +import java.util.stream.Collectors; +import java.util.stream.Stream; -import static org.elasticsearch.client.RestClientTestUtil.randomHeaders; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; +import static org.hamcrest.CoreMatchers.endsWith; +import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.CoreMatchers.instanceOf; import static org.mockito.Matchers.any; import static org.mockito.Mockito.mock; @@ -137,7 +146,6 @@ public void testPingSocketTimeout() throws IOException { } public void testInfo() throws IOException { - Header[] headers = randomHeaders(random(), "Header"); MainResponse testInfo = new MainResponse("nodeName", Version.CURRENT, new ClusterName("clusterName"), "clusterUuid", Build.CURRENT); mockResponse(testInfo); @@ -150,7 +158,7 @@ public void testSearchScroll() throws IOException { null, false, false, null, 1), randomAlphaOfLengthBetween(5, 10), 5, 5, 0, 100, ShardSearchFailure.EMPTY_ARRAY, SearchResponse.Clusters.EMPTY); mockResponse(mockSearchResponse); - SearchResponse searchResponse = restHighLevelClient.searchScroll( + SearchResponse searchResponse = restHighLevelClient.scroll( new SearchScrollRequest(randomAlphaOfLengthBetween(5, 10)), RequestOptions.DEFAULT); assertEquals(mockSearchResponse.getScrollId(), searchResponse.getScrollId()); assertEquals(0, searchResponse.getHits().totalHits); @@ -632,6 +640,149 @@ public void testProvidedNamedXContents() { assertTrue(names.contains(DiscountedCumulativeGain.NAME)); } + public void testApiNamingConventions() throws Exception { + //this list should be empty once the high-level client is feature complete + String[] notYetSupportedApi = new String[]{ + "cluster.remote_info", + "count", + "create", + "delete_by_query", + "exists_source", + "get_source", + "indices.delete_alias", + "indices.delete_template", + "indices.exists_template", + "indices.exists_type", + "indices.get_upgrade", + "indices.put_alias", + "mtermvectors", + "put_script", + "reindex", + "reindex_rethrottle", + "render_search_template", + "scripts_painless_execute", + "snapshot.restore", + "tasks.get", + "termvectors", + "update_by_query" + }; + //These API are not required for high-level client feature completeness + String[] notRequiredApi = new String[] { + "cluster.allocation_explain", + "cluster.pending_tasks", + "cluster.reroute", + "cluster.state", + "cluster.stats", + "indices.shard_stores", + "indices.upgrade", + "indices.recovery", + "indices.segments", + "indices.stats", + "ingest.processor_grok", + "nodes.info", + "nodes.stats", + "nodes.hot_threads", + "nodes.usage", + "search_shards", + }; + Set deprecatedMethods = new HashSet<>(); + deprecatedMethods.add("indices.force_merge"); + deprecatedMethods.add("multi_get"); + deprecatedMethods.add("multi_search"); + deprecatedMethods.add("search_scroll"); + + ClientYamlSuiteRestSpec restSpec = ClientYamlSuiteRestSpec.load("/rest-api-spec/api"); + Set apiSpec = restSpec.getApis().stream().map(ClientYamlSuiteRestApi::getName).collect(Collectors.toSet()); + + Set topLevelMethodsExclusions = new HashSet<>(); + topLevelMethodsExclusions.add("getLowLevelClient"); + topLevelMethodsExclusions.add("close"); + + Map methods = Arrays.stream(RestHighLevelClient.class.getMethods()) + .filter(method -> method.getDeclaringClass().equals(RestHighLevelClient.class) + && topLevelMethodsExclusions.contains(method.getName()) == false) + .map(method -> Tuple.tuple(toSnakeCase(method.getName()), method)) + .flatMap(tuple -> tuple.v2().getReturnType().getName().endsWith("Client") + ? getSubClientMethods(tuple.v1(), tuple.v2().getReturnType()) : Stream.of(tuple)) + .collect(Collectors.toMap(Tuple::v1, Tuple::v2)); + + Set apiNotFound = new HashSet<>(); + + for (Map.Entry entry : methods.entrySet()) { + Method method = entry.getValue(); + String apiName = entry.getKey(); + + assertTrue("method [" + apiName + "] is not final", + Modifier.isFinal(method.getClass().getModifiers()) || Modifier.isFinal(method.getModifiers())); + assertTrue(Modifier.isPublic(method.getModifiers())); + + //we convert all the method names to snake case, hence we need to look for the '_async' suffix rather than 'Async' + if (apiName.endsWith("_async")) { + assertTrue("async method [" + method.getName() + "] doesn't have corresponding sync method", + methods.containsKey(apiName.substring(0, apiName.length() - 6))); + assertThat(method.getReturnType(), equalTo(Void.TYPE)); + assertEquals(0, method.getExceptionTypes().length); + assertEquals(3, method.getParameterTypes().length); + assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); + assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); + assertThat(method.getParameterTypes()[2].getName(), equalTo(ActionListener.class.getName())); + } else { + //A few methods return a boolean rather than a response object + if (apiName.equals("ping") || apiName.contains("exist")) { + assertThat(method.getReturnType().getSimpleName(), equalTo("boolean")); + } else { + assertThat(method.getReturnType().getSimpleName(), endsWith("Response")); + } + + assertEquals(1, method.getExceptionTypes().length); + //a few methods don't accept a request object as argument + if (apiName.equals("ping") || apiName.equals("info")) { + assertEquals(1, method.getParameterTypes().length); + assertThat(method.getParameterTypes()[0].getName(), equalTo(RequestOptions.class.getName())); + } else { + assertEquals(apiName, 2, method.getParameterTypes().length); + assertThat(method.getParameterTypes()[0].getSimpleName(), endsWith("Request")); + assertThat(method.getParameterTypes()[1].getName(), equalTo(RequestOptions.class.getName())); + } + + boolean remove = apiSpec.remove(apiName); + if (remove == false && deprecatedMethods.contains(apiName) == false) { + //TODO xpack api are currently ignored, we need to load xpack yaml spec too + if (apiName.startsWith("xpack.") == false) { + apiNotFound.add(apiName); + } + } + } + } + assertThat("Some client method doesn't match a corresponding API defined in the REST spec: " + apiNotFound, + apiNotFound.size(), equalTo(0)); + + //we decided not to support cat API in the high-level REST client, they are supposed to be used from a low-level client + apiSpec.removeIf(api -> api.startsWith("cat.")); + Stream.concat(Arrays.stream(notYetSupportedApi), Arrays.stream(notRequiredApi)).forEach( + api -> assertTrue(api + " API is either not defined in the spec or already supported by the high-level client", + apiSpec.remove(api))); + assertThat("Some API are not supported but they should be: " + apiSpec, apiSpec.size(), equalTo(0)); + } + + private static Stream> getSubClientMethods(String namespace, Class clientClass) { + return Arrays.stream(clientClass.getMethods()).filter(method -> method.getDeclaringClass().equals(clientClass)) + .map(method -> Tuple.tuple(namespace + "." + toSnakeCase(method.getName()), method)); + } + + private static String toSnakeCase(String camelCase) { + StringBuilder snakeCaseString = new StringBuilder(); + for (Character aChar : camelCase.toCharArray()) { + if (Character.isUpperCase(aChar)) { + snakeCaseString.append('_'); + snakeCaseString.append(Character.toLowerCase(aChar)); + } else { + snakeCaseString.append(aChar); + } + } + return snakeCaseString.toString(); + } + private static class TrackingActionListener implements ActionListener { private final AtomicInteger statusCode = new AtomicInteger(-1); private final AtomicReference exception = new AtomicReference<>(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java index ce9091a91ff8b..9c9c5425f0006 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SearchIT.java @@ -597,7 +597,7 @@ public void testSearchScroll() throws Exception { } searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), - highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + highLevelClient()::scroll, highLevelClient()::scrollAsync); assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(35)); @@ -606,7 +606,7 @@ public void testSearchScroll() throws Exception { } searchResponse = execute(new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)), - highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync); + highLevelClient()::scroll, highLevelClient()::scrollAsync); assertThat(searchResponse.getHits().getTotalHits(), equalTo(100L)); assertThat(searchResponse.getHits().getHits().length, equalTo(30)); @@ -623,7 +623,7 @@ public void testSearchScroll() throws Exception { SearchScrollRequest scrollRequest = new SearchScrollRequest(searchResponse.getScrollId()).scroll(TimeValue.timeValueMinutes(2)); ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, () -> execute(scrollRequest, - highLevelClient()::searchScroll, highLevelClient()::searchScrollAsync)); + highLevelClient()::scroll, highLevelClient()::scrollAsync)); assertEquals(RestStatus.NOT_FOUND, exception.status()); assertThat(exception.getRootCause(), instanceOf(ElasticsearchException.class)); ElasticsearchException rootCause = (ElasticsearchException) exception.getRootCause(); @@ -644,7 +644,7 @@ public void testMultiSearch() throws Exception { multiSearchRequest.add(searchRequest3); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -686,7 +686,7 @@ public void testMultiSearch_withAgg() throws Exception { multiSearchRequest.add(searchRequest3); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -734,7 +734,7 @@ public void testMultiSearch_withQuery() throws Exception { multiSearchRequest.add(searchRequest3); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -759,7 +759,7 @@ public void testMultiSearch_withQuery() throws Exception { searchRequest1.source().highlighter(new HighlightBuilder().field("field")); searchRequest2.source().highlighter(new HighlightBuilder().field("field")); searchRequest3.source().highlighter(new HighlightBuilder().field("field")); - multiSearchResponse = execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + multiSearchResponse = execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(3)); @@ -797,7 +797,7 @@ public void testMultiSearch_failure() throws Exception { multiSearchRequest.add(searchRequest2); MultiSearchResponse multiSearchResponse = - execute(multiSearchRequest, highLevelClient()::multiSearch, highLevelClient()::multiSearchAsync); + execute(multiSearchRequest, highLevelClient()::msearch, highLevelClient()::msearchAsync); assertThat(multiSearchResponse.getTook().millis(), Matchers.greaterThanOrEqualTo(0L)); assertThat(multiSearchResponse.getResponses().length, Matchers.equalTo(2)); @@ -941,8 +941,8 @@ public void testMultiSearchTemplate() throws Exception { multiSearchTemplateRequest.add(badRequest); MultiSearchTemplateResponse multiSearchTemplateResponse = - execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, - highLevelClient()::multiSearchTemplateAsync); + execute(multiSearchTemplateRequest, highLevelClient()::msearchTemplate, + highLevelClient()::msearchTemplateAsync); Item[] responses = multiSearchTemplateResponse.getResponses(); @@ -999,8 +999,8 @@ public void testMultiSearchTemplateAllBad() throws Exception { // The whole HTTP request should fail if no nested search requests are valid ElasticsearchStatusException exception = expectThrows(ElasticsearchStatusException.class, - () -> execute(multiSearchTemplateRequest, highLevelClient()::multiSearchTemplate, - highLevelClient()::multiSearchTemplateAsync)); + () -> execute(multiSearchTemplateRequest, highLevelClient()::msearchTemplate, + highLevelClient()::msearchTemplateAsync)); assertEquals(RestStatus.BAD_REQUEST, exception.status()); assertThat(exception.getMessage(), containsString("no requests added")); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java index 6d035f5db654a..5483f055c2c12 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/SnapshotIT.java @@ -77,8 +77,8 @@ public void testSnapshotGetRepositoriesUsingParams() throws IOException { GetRepositoriesRequest request = new GetRepositoriesRequest(); request.repositories(new String[]{testRepository}); - GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepositories, - highLevelClient().snapshot()::getRepositoriesAsync); + GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepository, + highLevelClient().snapshot()::getRepositoryAsync); assertThat(1, equalTo(response.repositories().size())); } @@ -86,8 +86,8 @@ public void testSnapshotGetDefaultRepositories() throws IOException { assertTrue(createTestRepository("other", FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); assertTrue(createTestRepository("test", FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); - GetRepositoriesResponse response = execute(new GetRepositoriesRequest(), highLevelClient().snapshot()::getRepositories, - highLevelClient().snapshot()::getRepositoriesAsync); + GetRepositoriesResponse response = execute(new GetRepositoriesRequest(), highLevelClient().snapshot()::getRepository, + highLevelClient().snapshot()::getRepositoryAsync); assertThat(2, equalTo(response.repositories().size())); } @@ -95,7 +95,7 @@ public void testSnapshotGetRepositoriesNonExistent() { String repository = "doesnotexist"; GetRepositoriesRequest request = new GetRepositoriesRequest(new String[]{repository}); ElasticsearchException exception = expectThrows(ElasticsearchException.class, () -> execute(request, - highLevelClient().snapshot()::getRepositories, highLevelClient().snapshot()::getRepositoriesAsync)); + highLevelClient().snapshot()::getRepository, highLevelClient().snapshot()::getRepositoryAsync)); assertThat(exception.status(), equalTo(RestStatus.NOT_FOUND)); assertThat(exception.getMessage(), equalTo( @@ -107,8 +107,8 @@ public void testSnapshotDeleteRepository() throws IOException { assertTrue(createTestRepository(repository, FsRepository.TYPE, "{\"location\": \".\"}").isAcknowledged()); GetRepositoriesRequest request = new GetRepositoriesRequest(); - GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepositories, - highLevelClient().snapshot()::getRepositoriesAsync); + GetRepositoriesResponse response = execute(request, highLevelClient().snapshot()::getRepository, + highLevelClient().snapshot()::getRepositoryAsync); assertThat(1, equalTo(response.repositories().size())); DeleteRepositoryRequest deleteRequest = new DeleteRepositoryRequest(repository); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java index 9dad115643cbf..ad41c139ddc37 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/CRUDDocumentationIT.java @@ -1121,7 +1121,7 @@ public void testMultiGet() throws Exception { // end::multi-get-request-top-level-extras // tag::multi-get-execute - MultiGetResponse response = client.multiGet(request, RequestOptions.DEFAULT); + MultiGetResponse response = client.mget(request, RequestOptions.DEFAULT); // end::multi-get-execute // tag::multi-get-response @@ -1174,7 +1174,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::multi-get-execute-async - client.multiGetAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.mgetAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::multi-get-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1185,7 +1185,7 @@ public void onFailure(Exception e) { request.add(new MultiGetRequest.Item("index", "type", "example_id") .fetchSourceContext(FetchSourceContext.DO_NOT_FETCH_SOURCE)); // <1> // end::multi-get-request-no-source - MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request, RequestOptions.DEFAULT)); + MultiGetItemResponse item = unwrapAndAssertExample(client.mget(request, RequestOptions.DEFAULT)); assertNull(item.getResponse().getSource()); } { @@ -1198,7 +1198,7 @@ public void onFailure(Exception e) { request.add(new MultiGetRequest.Item("index", "type", "example_id") .fetchSourceContext(fetchSourceContext)); // <1> // end::multi-get-request-source-include - MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request, RequestOptions.DEFAULT)); + MultiGetItemResponse item = unwrapAndAssertExample(client.mget(request, RequestOptions.DEFAULT)); assertThat(item.getResponse().getSource(), hasEntry("foo", "val1")); assertThat(item.getResponse().getSource(), hasEntry("bar", "val2")); assertThat(item.getResponse().getSource(), not(hasKey("baz"))); @@ -1213,7 +1213,7 @@ public void onFailure(Exception e) { request.add(new MultiGetRequest.Item("index", "type", "example_id") .fetchSourceContext(fetchSourceContext)); // <1> // end::multi-get-request-source-exclude - MultiGetItemResponse item = unwrapAndAssertExample(client.multiGet(request, RequestOptions.DEFAULT)); + MultiGetItemResponse item = unwrapAndAssertExample(client.mget(request, RequestOptions.DEFAULT)); assertThat(item.getResponse().getSource(), not(hasKey("foo"))); assertThat(item.getResponse().getSource(), not(hasKey("bar"))); assertThat(item.getResponse().getSource(), hasEntry("baz", "val3")); @@ -1223,7 +1223,7 @@ public void onFailure(Exception e) { // tag::multi-get-request-stored request.add(new MultiGetRequest.Item("index", "type", "example_id") .storedFields("foo")); // <1> - MultiGetResponse response = client.multiGet(request, RequestOptions.DEFAULT); + MultiGetResponse response = client.mget(request, RequestOptions.DEFAULT); MultiGetItemResponse item = response.getResponses()[0]; String value = item.getResponse().getField("foo").getValue(); // <2> // end::multi-get-request-stored @@ -1235,7 +1235,7 @@ public void onFailure(Exception e) { MultiGetRequest request = new MultiGetRequest(); request.add(new MultiGetRequest.Item("index", "type", "example_id") .version(1000L)); - MultiGetResponse response = client.multiGet(request, RequestOptions.DEFAULT); + MultiGetResponse response = client.mget(request, RequestOptions.DEFAULT); MultiGetItemResponse item = response.getResponses()[0]; assertNull(item.getResponse()); // <1> Exception e = item.getFailure().getFailure(); // <2> diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java index 23dab5b21e2ab..36d562c501f47 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IndicesClientDocumentationIT.java @@ -622,7 +622,7 @@ public void testGetMapping() throws IOException { // end::get-mapping-request-indicesOptions // tag::get-mapping-execute - GetMappingsResponse getMappingResponse = client.indices().getMappings(request, RequestOptions.DEFAULT); + GetMappingsResponse getMappingResponse = client.indices().getMapping(request, RequestOptions.DEFAULT); // end::get-mapping-execute // tag::get-mapping-response @@ -704,7 +704,7 @@ public void onFailure(Exception e) { }); // tag::get-mapping-execute-async - client.indices().getMappingsAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.indices().getMappingAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-mapping-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1344,7 +1344,7 @@ public void testForceMergeIndex() throws Exception { // end::force-merge-request-flush // tag::force-merge-execute - ForceMergeResponse forceMergeResponse = client.indices().forceMerge(request, RequestOptions.DEFAULT); + ForceMergeResponse forceMergeResponse = client.indices().forcemerge(request, RequestOptions.DEFAULT); // end::force-merge-execute // tag::force-merge-response @@ -1369,14 +1369,14 @@ public void onFailure(Exception e) { // end::force-merge-execute-listener // tag::force-merge-execute-async - client.indices().forceMergeAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.indices().forcemergeAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::force-merge-execute-async } { // tag::force-merge-notfound try { ForceMergeRequest request = new ForceMergeRequest("does_not_exist"); - client.indices().forceMerge(request, RequestOptions.DEFAULT); + client.indices().forcemerge(request, RequestOptions.DEFAULT); } catch (ElasticsearchException exception) { if (exception.status() == RestStatus.NOT_FOUND) { // <1> diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java index c53ec2b5d7cc7..98502e3668af1 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/IngestClientDocumentationIT.java @@ -317,7 +317,7 @@ public void testSimulatePipeline() throws IOException { // end::simulate-pipeline-request-verbose // tag::simulate-pipeline-execute - SimulatePipelineResponse response = client.ingest().simulatePipeline(request, RequestOptions.DEFAULT); // <1> + SimulatePipelineResponse response = client.ingest().simulate(request, RequestOptions.DEFAULT); // <1> // end::simulate-pipeline-execute // tag::simulate-pipeline-response @@ -381,7 +381,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::simulate-pipeline-execute-async - client.ingest().simulatePipelineAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.ingest().simulateAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::simulate-pipeline-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java index 26bb4682fd9db..c60f2d4c92b87 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SearchDocumentationIT.java @@ -583,7 +583,7 @@ public void testScroll() throws Exception { // tag::search-scroll2 SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId); // <1> scrollRequest.scroll(TimeValue.timeValueSeconds(30)); - SearchResponse searchScrollResponse = client.searchScroll(scrollRequest, RequestOptions.DEFAULT); + SearchResponse searchScrollResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT); scrollId = searchScrollResponse.getScrollId(); // <2> hits = searchScrollResponse.getHits(); // <3> assertEquals(3, hits.getTotalHits()); @@ -612,7 +612,7 @@ public void testScroll() throws Exception { // end::scroll-request-arguments // tag::search-scroll-execute-sync - SearchResponse searchResponse = client.searchScroll(scrollRequest, RequestOptions.DEFAULT); + SearchResponse searchResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT); // end::search-scroll-execute-sync assertEquals(0, searchResponse.getFailedShards()); @@ -638,7 +638,7 @@ public void onFailure(Exception e) { scrollListener = new LatchedActionListener<>(scrollListener, latch); // tag::search-scroll-execute-async - client.searchScrollAsync(scrollRequest, RequestOptions.DEFAULT, scrollListener); // <1> + client.scrollAsync(scrollRequest, RequestOptions.DEFAULT, scrollListener); // <1> // end::search-scroll-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -710,7 +710,7 @@ public void onFailure(Exception e) { while (searchHits != null && searchHits.length > 0) { // <2> SearchScrollRequest scrollRequest = new SearchScrollRequest(scrollId); // <3> scrollRequest.scroll(scroll); - searchResponse = client.searchScroll(scrollRequest, RequestOptions.DEFAULT); + searchResponse = client.scroll(scrollRequest, RequestOptions.DEFAULT); scrollId = searchResponse.getScrollId(); searchHits = searchResponse.getHits().getHits(); // <4> @@ -861,7 +861,7 @@ public void testMultiSearchTemplateWithInlineScript() throws Exception { // end::multi-search-template-request-inline // tag::multi-search-template-request-sync - MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT); + MultiSearchTemplateResponse multiResponse = client.msearchTemplate(multiRequest, RequestOptions.DEFAULT); // end::multi-search-template-request-sync // tag::multi-search-template-response @@ -916,7 +916,7 @@ public void testMultiSearchTemplateWithStoredScript() throws Exception { // tag::multi-search-template-execute - MultiSearchTemplateResponse multiResponse = client.multiSearchTemplate(multiRequest, RequestOptions.DEFAULT); + MultiSearchTemplateResponse multiResponse = client.msearchTemplate(multiRequest, RequestOptions.DEFAULT); // end::multi-search-template-execute assertNotNull(multiResponse); @@ -944,7 +944,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::multi-search-template-execute-async - client.multiSearchTemplateAsync(multiRequest, RequestOptions.DEFAULT, listener); + client.msearchTemplateAsync(multiRequest, RequestOptions.DEFAULT, listener); // end::multi-search-template-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1201,7 +1201,7 @@ public void testMultiSearch() throws Exception { request.add(secondSearchRequest); // end::multi-search-request-basic // tag::multi-search-execute - MultiSearchResponse response = client.multiSearch(request, RequestOptions.DEFAULT); + MultiSearchResponse response = client.msearch(request, RequestOptions.DEFAULT); // end::multi-search-execute // tag::multi-search-response MultiSearchResponse.Item firstResponse = response.getResponses()[0]; // <1> @@ -1233,7 +1233,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::multi-search-execute-async - client.multiSearchAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.msearchAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::multi-search-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); @@ -1244,7 +1244,7 @@ public void onFailure(Exception e) { request.add(new SearchRequest("posts") // <1> .types("doc")); // <2> // end::multi-search-request-index - MultiSearchResponse response = client.multiSearch(request, RequestOptions.DEFAULT); + MultiSearchResponse response = client.msearch(request, RequestOptions.DEFAULT); MultiSearchResponse.Item firstResponse = response.getResponses()[0]; assertNull(firstResponse.getFailure()); SearchResponse searchResponse = firstResponse.getResponse(); diff --git a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java index 68a8113af6d38..fff3e7ece7066 100644 --- a/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java +++ b/client/rest-high-level/src/test/java/org/elasticsearch/client/documentation/SnapshotClientDocumentationIT.java @@ -221,7 +221,7 @@ public void testSnapshotGetRepository() throws IOException { // end::get-repository-request-masterTimeout // tag::get-repository-execute - GetRepositoriesResponse response = client.snapshot().getRepositories(request, RequestOptions.DEFAULT); + GetRepositoriesResponse response = client.snapshot().getRepository(request, RequestOptions.DEFAULT); // end::get-repository-execute // tag::get-repository-response @@ -256,7 +256,7 @@ public void onFailure(Exception e) { listener = new LatchedActionListener<>(listener, latch); // tag::get-repository-execute-async - client.snapshot().getRepositoriesAsync(request, RequestOptions.DEFAULT, listener); // <1> + client.snapshot().getRepositoryAsync(request, RequestOptions.DEFAULT, listener); // <1> // end::get-repository-execute-async assertTrue(latch.await(30L, TimeUnit.SECONDS)); diff --git a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java index 29aec900cefa9..6bfa4de8d4adf 100644 --- a/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java +++ b/qa/ccs-unavailable-clusters/src/test/java/org/elasticsearch/search/CrossClusterSearchUnavailableClusterIT.java @@ -171,7 +171,7 @@ public void testSearchSkipUnavailable() throws IOException { assertEquals(10, response.getHits().totalHits); assertEquals(10, response.getHits().getHits().length); String scrollId = response.getScrollId(); - SearchResponse scrollResponse = restHighLevelClient.searchScroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); + SearchResponse scrollResponse = restHighLevelClient.scroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); assertSame(SearchResponse.Clusters.EMPTY, scrollResponse.getClusters()); assertEquals(10, scrollResponse.getHits().totalHits); assertEquals(0, scrollResponse.getHits().getHits().length); @@ -206,7 +206,7 @@ public void testSearchSkipUnavailable() throws IOException { assertEquals(10, response.getHits().totalHits); assertEquals(10, response.getHits().getHits().length); String scrollId = response.getScrollId(); - SearchResponse scrollResponse = restHighLevelClient.searchScroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); + SearchResponse scrollResponse = restHighLevelClient.scroll(new SearchScrollRequest(scrollId), RequestOptions.DEFAULT); assertSame(SearchResponse.Clusters.EMPTY, scrollResponse.getClusters()); assertEquals(10, scrollResponse.getHits().totalHits); assertEquals(0, scrollResponse.getHits().getHits().length); From 5a383c2de8d3d61e8a254132d62d360d884d9e1e Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 17 Jul 2018 10:34:45 +0100 Subject: [PATCH 070/107] =?UTF-8?q?Mute=20:qa:mixed-cluster=20indices.stat?= =?UTF-8?q?s/10=5Findex/Index=20-=20all=E2=80=99?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- qa/mixed-cluster/build.gradle | 1 + 1 file changed, 1 insertion(+) diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index ac57d51def7c6..db743cb12b19d 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -57,6 +57,7 @@ for (Version version : bwcVersions.wireCompatible) { tasks.getByName("${baseName}#mixedClusterTestRunner").configure { /* To support taking index snapshots, we have to set path.repo setting */ systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") + systemProperty 'tests.rest.blacklist', ['indices.stats/10_index/Index - all'].join(',') } } From 5bad3a834d2cd90400ef8d63cb194ca6196b6bec Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Tue, 17 Jul 2018 11:41:31 +0000 Subject: [PATCH 071/107] Updates the build to gradle 4.9 (#32087) There are fixes to the dependency report, most importantly for us, it still works even if `failOnVersionConflict` would fail the build. --- build.gradle | 2 +- .../carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy | 2 +- gradle/wrapper/gradle-wrapper.properties | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/build.gradle b/build.gradle index a75d093664fe6..187e247705277 100644 --- a/build.gradle +++ b/build.gradle @@ -596,7 +596,7 @@ if (System.properties.get("build.compare") != null) { } } sourceBuild { - gradleVersion = "4.8.1" // does not default to gradle weapper of project dir, but current version + gradleVersion = gradle.getGradleVersion() projectDir = referenceProject tasks = ["clean", "assemble"] arguments = ["-Dbuild.compare_friendly=true"] diff --git a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy index 24b66efbcef2c..d4c8f89bf50cf 100644 --- a/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy +++ b/buildSrc/src/main/groovy/com/carrotsearch/gradle/junit4/RandomizedTestingPlugin.groovy @@ -74,7 +74,7 @@ class RandomizedTestingPlugin implements Plugin { // since we can't be sure if the task was ever realized, we remove both the provider and the task TaskProvider oldTestProvider try { - oldTestProvider = tasks.getByNameLater(Test, 'test') + oldTestProvider = tasks.named('test') } catch (UnknownTaskException unused) { // no test task, ok, user will use testing task on their own return diff --git a/gradle/wrapper/gradle-wrapper.properties b/gradle/wrapper/gradle-wrapper.properties index 37e3d3699fafc..94161917d1878 100644 --- a/gradle/wrapper/gradle-wrapper.properties +++ b/gradle/wrapper/gradle-wrapper.properties @@ -1,6 +1,6 @@ distributionBase=GRADLE_USER_HOME distributionPath=wrapper/dists -distributionUrl=https\://services.gradle.org/distributions/gradle-4.8.1-all.zip +distributionUrl=https\://services.gradle.org/distributions/gradle-4.9-all.zip zipStoreBase=GRADLE_USER_HOME zipStorePath=wrapper/dists -distributionSha256Sum=ce1645ff129d11aad62dab70d63426fdce6cfd646fa309dc5dc5255dd03c7c11 +distributionSha256Sum=39e2d5803bbd5eaf6c8efe07067b0e5a00235e8c71318642b2ed262920b27721 From 94330d85f8905eee88ab7926a5f427d5d3b6b907 Mon Sep 17 00:00:00 2001 From: markharwood Date: Tue, 17 Jul 2018 13:11:10 +0100 Subject: [PATCH 072/107] Relax TermVectors API to work with textual fields other than TextFieldType (#31915) This changes the field-eligibility test to check one level up in the class hierarchy to allow any subclasses of StringFieldType. Closes #31902 --- .../index/termvectors/TermVectorsService.java | 5 +- .../termvectors/TermVectorsServiceTests.java | 54 ++++++++++++++++++- 2 files changed, 54 insertions(+), 5 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java index c13c56beb5a20..bc77626b94277 100644 --- a/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java +++ b/server/src/main/java/org/elasticsearch/index/termvectors/TermVectorsService.java @@ -48,7 +48,7 @@ import org.elasticsearch.index.mapper.ParseContext; import org.elasticsearch.index.mapper.ParsedDocument; import org.elasticsearch.index.mapper.SourceFieldMapper; -import org.elasticsearch.index.mapper.TextFieldMapper; +import org.elasticsearch.index.mapper.StringFieldType; import org.elasticsearch.index.shard.IndexShard; import org.elasticsearch.search.dfs.AggregatedDfs; @@ -162,8 +162,7 @@ private static void handleFieldWildcards(IndexShard indexShard, TermVectorsReque private static boolean isValidField(MappedFieldType fieldType) { // must be a string - if (fieldType instanceof KeywordFieldMapper.KeywordFieldType == false - && fieldType instanceof TextFieldMapper.TextFieldType == false) { + if (fieldType instanceof StringFieldType == false) { return false; } // and must be indexed diff --git a/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java b/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java index e5adbde71eb31..034e7daaf7f99 100644 --- a/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/termvectors/TermVectorsServiceTests.java @@ -109,12 +109,62 @@ public void testDocFreqs() throws IOException { IndexService test = indicesService.indexService(resolveIndex("test")); IndexShard shard = test.getShardOrNull(0); assertThat(shard, notNullValue()); - TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + assertEquals(1, response.getFields().size()); Terms terms = response.getFields().terms("text"); TermsEnum iterator = terms.iterator(); while (iterator.next() != null) { assertEquals(max, iterator.docFreq()); } - } + } + + public void testWithIndexedPhrases() throws IOException { + XContentBuilder mapping = jsonBuilder() + .startObject() + .startObject("_doc") + .startObject("properties") + .startObject("text") + .field("type", "text") + .field("index_phrases", true) + .field("term_vector", "with_positions_offsets_payloads") + .endObject() + .endObject() + .endObject() + .endObject(); + Settings settings = Settings.builder() + .put("number_of_shards", 1) + .build(); + createIndex("test", settings, "_doc", mapping); + ensureGreen(); + + int max = between(3, 10); + BulkRequestBuilder bulk = client().prepareBulk(); + for (int i = 0; i < max; i++) { + bulk.add(client().prepareIndex("test", "_doc", Integer.toString(i)) + .setSource("text", "the quick brown fox jumped over the lazy dog")); + } + bulk.get(); + + TermVectorsRequest request = new TermVectorsRequest("test", "_doc", "0").termStatistics(true); + + IndicesService indicesService = getInstanceFromNode(IndicesService.class); + IndexService test = indicesService.indexService(resolveIndex("test")); + IndexShard shard = test.getShardOrNull(0); + assertThat(shard, notNullValue()); + TermVectorsResponse response = TermVectorsService.getTermVectors(shard, request); + assertEquals(2, response.getFields().size()); + + Terms terms = response.getFields().terms("text"); + TermsEnum iterator = terms.iterator(); + while (iterator.next() != null) { + assertEquals(max, iterator.docFreq()); + } + + Terms phrases = response.getFields().terms("text._index_phrase"); + TermsEnum phraseIterator = phrases.iterator(); + while (phraseIterator.next() != null) { + assertEquals(max, phraseIterator.docFreq()); + } + } } From 3d0854de6f317984d4c32c90d365c0d472838580 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Tue, 17 Jul 2018 14:14:02 +0200 Subject: [PATCH 073/107] Handle TokenizerFactory TODOs (#32063) * Don't replace Replace TokenizerFactory with Supplier, this approach was rejected in #32063 * Remove unused parameter from constructor --- .../analysis/common/CharGroupTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/ClassicTokenizerFactory.java | 2 +- .../analysis/common/EdgeNGramTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/KeywordTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/LetterTokenizerFactory.java | 2 +- .../analysis/common/LowerCaseTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/NGramTokenizerFactory.java | 2 +- .../analysis/common/PathHierarchyTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/PatternTokenizerFactory.java | 2 +- .../analysis/common/SimplePatternSplitTokenizerFactory.java | 2 +- .../analysis/common/SimplePatternTokenizerFactory.java | 2 +- .../elasticsearch/analysis/common/ThaiTokenizerFactory.java | 2 +- .../analysis/common/UAX29URLEmailTokenizerFactory.java | 2 +- .../analysis/common/WhitespaceTokenizerFactory.java | 2 +- .../org/elasticsearch/index/analysis/IcuTokenizerFactory.java | 2 +- .../elasticsearch/index/analysis/KuromojiTokenizerFactory.java | 2 +- .../org/elasticsearch/index/analysis/NoriTokenizerFactory.java | 2 +- .../index/analysis/SmartChineseTokenizerTokenizerFactory.java | 2 +- .../elasticsearch/index/analysis/AbstractTokenizerFactory.java | 3 +-- .../elasticsearch/index/analysis/StandardTokenizerFactory.java | 2 +- .../org/elasticsearch/index/analysis/TokenizerFactory.java | 2 +- .../xpack/ml/job/categorization/MlClassicTokenizerFactory.java | 2 +- 22 files changed, 22 insertions(+), 23 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java index d4e1e794a309b..4920b7daae852 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CharGroupTokenizerFactory.java @@ -39,7 +39,7 @@ public class CharGroupTokenizerFactory extends AbstractTokenizerFactory{ private boolean tokenizeOnSymbol = false; public CharGroupTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); for (final String c : settings.getAsList("tokenize_on_chars")) { if (c == null || c.length() == 0) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java index e81f6b88d248c..27316f4cde5e7 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ClassicTokenizerFactory.java @@ -35,7 +35,7 @@ public class ClassicTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; ClassicTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java index 55a527cc792c8..9bb17abf0cd02 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/EdgeNGramTokenizerFactory.java @@ -36,7 +36,7 @@ public class EdgeNGramTokenizerFactory extends AbstractTokenizerFactory { private final CharMatcher matcher; EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); this.matcher = parseTokenChars(settings.getAsList("token_chars")); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java index abe88462cb996..e4bf2c8c4ad4e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeywordTokenizerFactory.java @@ -31,7 +31,7 @@ public class KeywordTokenizerFactory extends AbstractTokenizerFactory { private final int bufferSize; KeywordTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); bufferSize = settings.getAsInt("buffer_size", 256); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java index be98eb73a9cad..cba30cb63c36b 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LetterTokenizerFactory.java @@ -29,7 +29,7 @@ public class LetterTokenizerFactory extends AbstractTokenizerFactory { LetterTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java index 8f0c5f759aa64..8c913a33cfe4c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LowerCaseTokenizerFactory.java @@ -30,7 +30,7 @@ public class LowerCaseTokenizerFactory extends AbstractTokenizerFactory implements MultiTermAwareComponent { LowerCaseTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java index b67f67cb2fa75..b00797428b79a 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/NGramTokenizerFactory.java @@ -85,7 +85,7 @@ static CharMatcher parseTokenChars(List characterClasses) { } NGramTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); int maxAllowedNgramDiff = indexSettings.getMaxNgramDiff(); this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE); this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE); diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java index c877fe6944e5b..5b966c1c3b8df 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PathHierarchyTokenizerFactory.java @@ -37,7 +37,7 @@ public class PathHierarchyTokenizerFactory extends AbstractTokenizerFactory { private final boolean reverse; PathHierarchyTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); bufferSize = settings.getAsInt("buffer_size", 1024); String delimiter = settings.get("delimiter"); if (delimiter == null) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java index f850b68ac9829..11ba7e44db0e4 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternTokenizerFactory.java @@ -35,7 +35,7 @@ public class PatternTokenizerFactory extends AbstractTokenizerFactory { private final int group; PatternTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); String sPattern = settings.get("pattern", "\\W+" /*PatternAnalyzer.NON_WORD_PATTERN*/); if (sPattern == null) { diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java index f861ec3792f5e..0faf407829577 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternSplitTokenizerFactory.java @@ -31,7 +31,7 @@ public class SimplePatternSplitTokenizerFactory extends AbstractTokenizerFactory private final String pattern; public SimplePatternSplitTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); pattern = settings.get("pattern", ""); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java index 6db3cfa67a318..67aee333d0ffd 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/SimplePatternTokenizerFactory.java @@ -31,7 +31,7 @@ public class SimplePatternTokenizerFactory extends AbstractTokenizerFactory { private final String pattern; public SimplePatternTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); pattern = settings.get("pattern", ""); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java index b76aca42d36ee..861ade079a08e 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ThaiTokenizerFactory.java @@ -32,7 +32,7 @@ public class ThaiTokenizerFactory extends AbstractTokenizerFactory { ThaiTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java index 8040c88ea7fa5..cd02eec24b42c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/UAX29URLEmailTokenizerFactory.java @@ -32,7 +32,7 @@ public class UAX29URLEmailTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; UAX29URLEmailTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java index 1f89d4688136f..7ce6a361cbad2 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/WhitespaceTokenizerFactory.java @@ -34,7 +34,7 @@ public class WhitespaceTokenizerFactory extends AbstractTokenizerFactory { private Integer maxTokenLength; WhitespaceTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt(MAX_TOKEN_LENGTH, StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java index 84c611c0f8132..3f8b9296aa02c 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java @@ -47,7 +47,7 @@ public class IcuTokenizerFactory extends AbstractTokenizerFactory { private static final String RULE_FILES = "rule_files"; public IcuTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); config = getIcuConfig(environment, settings); } diff --git a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java index 2f00e68a75ebc..e9268f7306512 100644 --- a/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java +++ b/plugins/analysis-kuromoji/src/main/java/org/elasticsearch/index/analysis/KuromojiTokenizerFactory.java @@ -45,7 +45,7 @@ public class KuromojiTokenizerFactory extends AbstractTokenizerFactory { private boolean discartPunctuation; public KuromojiTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); mode = getMode(settings); userDictionary = getUserDictionary(env, settings); discartPunctuation = settings.getAsBoolean("discard_punctuation", true); diff --git a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java index 346cc84e5e6b4..9295ed95c3fb8 100644 --- a/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java +++ b/plugins/analysis-nori/src/main/java/org/elasticsearch/index/analysis/NoriTokenizerFactory.java @@ -38,7 +38,7 @@ public class NoriTokenizerFactory extends AbstractTokenizerFactory { private final KoreanTokenizer.DecompoundMode decompoundMode; public NoriTokenizerFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); decompoundMode = getMode(settings); userDictionary = getUserDictionary(env, settings); } diff --git a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java index 9d38729615205..560bce9db2701 100644 --- a/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java +++ b/plugins/analysis-smartcn/src/main/java/org/elasticsearch/index/analysis/SmartChineseTokenizerTokenizerFactory.java @@ -28,7 +28,7 @@ public class SmartChineseTokenizerTokenizerFactory extends AbstractTokenizerFactory { public SmartChineseTokenizerTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override diff --git a/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java index bf6b2fd7c5b47..4df0375f31cab 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/AbstractTokenizerFactory.java @@ -27,8 +27,7 @@ public abstract class AbstractTokenizerFactory extends AbstractIndexComponent implements TokenizerFactory { protected final Version version; - // TODO drop `String ignored` in a followup - public AbstractTokenizerFactory(IndexSettings indexSettings, String ignored, Settings settings) { + public AbstractTokenizerFactory(IndexSettings indexSettings, Settings settings) { super(indexSettings); this.version = Analysis.parseAnalysisVersion(this.indexSettings.getSettings(), settings, logger); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java index ed8d2b452c2d5..2e4473f3b0e6d 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/StandardTokenizerFactory.java @@ -31,7 +31,7 @@ public class StandardTokenizerFactory extends AbstractTokenizerFactory { private final int maxTokenLength; public StandardTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); maxTokenLength = settings.getAsInt("max_token_length", StandardAnalyzer.DEFAULT_MAX_TOKEN_LENGTH); } diff --git a/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java b/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java index be96dbd65602b..4abed5a62ce71 100644 --- a/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java +++ b/server/src/main/java/org/elasticsearch/index/analysis/TokenizerFactory.java @@ -21,6 +21,6 @@ import org.apache.lucene.analysis.Tokenizer; -public interface TokenizerFactory { // TODO replace with Supplier +public interface TokenizerFactory { Tokenizer create(); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java index 40fee1f40f138..95cba4f2dccb5 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/MlClassicTokenizerFactory.java @@ -20,7 +20,7 @@ public class MlClassicTokenizerFactory extends AbstractTokenizerFactory { public MlClassicTokenizerFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) { - super(indexSettings, name, settings); + super(indexSettings, settings); } @Override From 70d2db352fa5e2cde094ed671782e035846fd3ab Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 17 Jul 2018 09:41:34 -0400 Subject: [PATCH 074/107] Ensure to release translog snapshot in primary-replica resync (#32045) Previously we create a translog snapshot inside the resync method, and that snapshot will be closed by the resync listener. However, if the resync method throws an exception before the resync listener is initialized, the translog snapshot won't be released. Closes #32030 --- .../index/shard/PrimaryReplicaSyncer.java | 71 ++++++++++--------- .../shard/PrimaryReplicaSyncerTests.java | 28 +++++--- .../index/translog/TranslogTests.java | 18 +++++ 3 files changed, 75 insertions(+), 42 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java index b39ebd51f2bc8..e66d78f2e1a05 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java +++ b/server/src/main/java/org/elasticsearch/index/shard/PrimaryReplicaSyncer.java @@ -35,6 +35,7 @@ import org.elasticsearch.common.unit.ByteSizeValue; import org.elasticsearch.common.util.concurrent.AbstractRunnable; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.seqno.SequenceNumbers; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.tasks.Task; @@ -80,48 +81,25 @@ void setChunkSize(ByteSizeValue chunkSize) { // only settable for tests } public void resync(final IndexShard indexShard, final ActionListener listener) { - ActionListener resyncListener = null; + Translog.Snapshot snapshot = null; try { final long startingSeqNo = indexShard.getGlobalCheckpoint() + 1; - Translog.Snapshot snapshot = indexShard.newTranslogSnapshotFromMinSeqNo(startingSeqNo); final long maxSeqNo = indexShard.seqNoStats().getMaxSeqNo(); - resyncListener = new ActionListener() { - @Override - public void onResponse(final ResyncTask resyncTask) { - try { - snapshot.close(); - listener.onResponse(resyncTask); - } catch (final Exception e) { - onFailure(e); - } - } - - @Override - public void onFailure(final Exception e) { - try { - snapshot.close(); - } catch (final Exception inner) { - e.addSuppressed(inner); - } finally { - listener.onFailure(e); - } - } - }; - ShardId shardId = indexShard.shardId(); - + final ShardId shardId = indexShard.shardId(); // Wrap translog snapshot to make it synchronized as it is accessed by different threads through SnapshotSender. // Even though those calls are not concurrent, snapshot.next() uses non-synchronized state and is not multi-thread-compatible // Also fail the resync early if the shard is shutting down - Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { - + snapshot = indexShard.newTranslogSnapshotFromMinSeqNo(startingSeqNo); + final Translog.Snapshot originalSnapshot = snapshot; + final Translog.Snapshot wrappedSnapshot = new Translog.Snapshot() { @Override public synchronized void close() throws IOException { - snapshot.close(); + originalSnapshot.close(); } @Override public synchronized int totalOperations() { - return snapshot.totalOperations(); + return originalSnapshot.totalOperations(); } @Override @@ -132,15 +110,40 @@ public synchronized Translog.Operation next() throws IOException { } else { assert state == IndexShardState.STARTED : "resync should only happen on a started shard, but state was: " + state; } - return snapshot.next(); + return originalSnapshot.next(); } }; + final ActionListener resyncListener = new ActionListener() { + @Override + public void onResponse(final ResyncTask resyncTask) { + try { + wrappedSnapshot.close(); + listener.onResponse(resyncTask); + } catch (final Exception e) { + onFailure(e); + } + } + + @Override + public void onFailure(final Exception e) { + try { + wrappedSnapshot.close(); + } catch (final Exception inner) { + e.addSuppressed(inner); + } finally { + listener.onFailure(e); + } + } + }; + resync(shardId, indexShard.routingEntry().allocationId().getId(), indexShard.getPrimaryTerm(), wrappedSnapshot, startingSeqNo, maxSeqNo, resyncListener); } catch (Exception e) { - if (resyncListener != null) { - resyncListener.onFailure(e); - } else { + try { + IOUtils.close(snapshot); + } catch (IOException inner) { + e.addSuppressed(inner); + } finally { listener.onFailure(e); } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java index b290f4d45597b..4444f475329b3 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/PrimaryReplicaSyncerTests.java @@ -123,12 +123,10 @@ public void testSyncerSendsOffCorrectDocuments() throws Exception { public void testSyncerOnClosingShard() throws Exception { IndexShard shard = newStartedShard(true); AtomicBoolean syncActionCalled = new AtomicBoolean(); - CountDownLatch syncCalledLatch = new CountDownLatch(1); PrimaryReplicaSyncer.SyncAction syncAction = (request, parentTask, allocationId, primaryTerm, listener) -> { logger.info("Sending off {} operations", request.getOperations().length); syncActionCalled.set(true); - syncCalledLatch.countDown(); threadPool.generic().execute(() -> listener.onResponse(new ResyncReplicationResponse())); }; PrimaryReplicaSyncer syncer = new PrimaryReplicaSyncer(Settings.EMPTY, @@ -147,13 +145,27 @@ public void testSyncerOnClosingShard() throws Exception { shard.updateShardState(shard.routingEntry(), shard.getPrimaryTerm(), null, 1000L, Collections.singleton(allocationId), new IndexShardRoutingTable.Builder(shard.shardId()).addShard(shard.routingEntry()).build(), Collections.emptySet()); - PlainActionFuture fut = new PlainActionFuture<>(); - threadPool.generic().execute(() -> { - try { - syncer.resync(shard, fut); - } catch (AlreadyClosedException ace) { - fut.onFailure(ace); + CountDownLatch syncCalledLatch = new CountDownLatch(1); + PlainActionFuture fut = new PlainActionFuture() { + @Override + public void onFailure(Exception e) { + try { + super.onFailure(e); + } finally { + syncCalledLatch.countDown(); + } + } + @Override + public void onResponse(PrimaryReplicaSyncer.ResyncTask result) { + try { + super.onResponse(result); + } finally { + syncCalledLatch.countDown(); + } } + }; + threadPool.generic().execute(() -> { + syncer.resync(shard, fut); }); if (randomBoolean()) { syncCalledLatch.await(); diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index cf6e753684676..dbbb38090bc3b 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -2932,6 +2932,24 @@ public void testSnapshotDedupOperations() throws Exception { } } + /** Make sure that it's ok to close a translog snapshot multiple times */ + public void testCloseSnapshotTwice() throws Exception { + int numOps = between(0, 10); + for (int i = 0; i < numOps; i++) { + Translog.Index op = new Translog.Index("doc", randomAlphaOfLength(10), i, primaryTerm.get(), new byte[]{1}); + translog.add(op); + if (randomBoolean()) { + translog.rollGeneration(); + } + } + for (int i = 0; i < 5; i++) { + Translog.Snapshot snapshot = translog.newSnapshot(); + assertThat(snapshot, SnapshotMatchers.size(numOps)); + snapshot.close(); + snapshot.close(); + } + } + static class SortedSnapshot implements Translog.Snapshot { private final Translog.Snapshot snapshot; private List operations = null; From 5afea06850e5bf24346735c2bedffb7afe5cad70 Mon Sep 17 00:00:00 2001 From: David Roberts Date: Tue, 17 Jul 2018 15:01:12 +0100 Subject: [PATCH 075/107] [ML] Move analyzer dependencies out of categorization config (#32123) The ML config classes will shortly be moved to the X-Pack protocol library to allow the ML APIs to be moved to the high level REST client. Dependencies on server functionality should be removed from the config classes before this is done. This change is entirely about moving code between packages. It does not add or remove any functionality or tests. --- .../config/CategorizationAnalyzerConfig.java | 204 +---------------- .../xpack/core/ml/job/config/Job.java | 18 +- .../xpack/ml/job/JobManager.java | 20 +- .../CategorizationAnalyzer.java | 210 +++++++++++++++++- .../CategorizationAnalyzerTests.java | 153 ++++++++++++- .../CategorizationAnalyzerConfigTests.java | 158 ------------- 6 files changed, 382 insertions(+), 381 deletions(-) diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java index 1c2808c70ffcf..fd0fde76e6883 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/CategorizationAnalyzerConfig.java @@ -5,14 +5,8 @@ */ package org.elasticsearch.xpack.core.ml.job.config; -import org.apache.lucene.analysis.Analyzer; -import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; -import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.UUIDs; -import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -22,15 +16,6 @@ import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.analysis.AnalysisRegistry; -import org.elasticsearch.index.analysis.CharFilterFactory; -import org.elasticsearch.index.analysis.CustomAnalyzer; -import org.elasticsearch.index.analysis.CustomAnalyzerProvider; -import org.elasticsearch.index.analysis.TokenFilterFactory; -import org.elasticsearch.index.analysis.TokenizerFactory; -import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.rest.action.admin.indices.RestAnalyzeAction; import org.elasticsearch.xpack.core.ml.MlParserType; @@ -42,12 +27,11 @@ import java.util.Map; import java.util.Objects; - /** * Configuration for the categorization analyzer. * * The syntax is a subset of what can be supplied to the {@linkplain RestAnalyzeAction _analyze endpoint}. - * To summarise, the first option is to specify the name of an out-of-the-box analyzer: + * To summarize, the first option is to specify the name of an out-of-the-box analyzer: * * "categorization_analyzer" : "standard" * @@ -66,11 +50,6 @@ * { "type" : "pattern_replace", "pattern": "^[0-9].*" } * ] * - * - * Unfortunately there is no easy to to reuse a subset of the _analyze action implementation, so much - * of the code in this file is copied from {@link TransportAnalyzeAction}. Unfortunately the logic required here is - * not quite identical to that of {@link TransportAnalyzeAction}, and the required code is hard to partially reuse. - * TODO: consider refactoring ES core to allow more reuse. */ public class CategorizationAnalyzerConfig implements ToXContentFragment, Writeable { @@ -350,175 +329,6 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws return builder; } - /** - * Convert the config to an {@link Analyzer}. This may be a global analyzer or a newly created custom analyzer. - * In the case of a global analyzer the caller must NOT close it when they have finished with it. In the case of - * a newly created custom analyzer the caller is responsible for closing it. - * @return The first tuple member is the {@link Analyzer}; the second indicates whether the caller is responsible - * for closing it. - */ - public Tuple toAnalyzer(AnalysisRegistry analysisRegistry, Environment environment) throws IOException { - if (analyzer != null) { - Analyzer globalAnalyzer = analysisRegistry.getAnalyzer(analyzer); - if (globalAnalyzer == null) { - throw new IllegalArgumentException("Failed to find global analyzer [" + analyzer + "]"); - } - return new Tuple<>(globalAnalyzer, Boolean.FALSE); - } else { - List charFilterFactoryList = - parseCharFilterFactories(analysisRegistry, environment); - - Tuple tokenizerFactory = parseTokenizerFactory(analysisRegistry, - environment); - - List tokenFilterFactoryList = parseTokenFilterFactories(analysisRegistry, - environment, tokenizerFactory, charFilterFactoryList); - - return new Tuple<>(new CustomAnalyzer(tokenizerFactory.v1(), tokenizerFactory.v2(), - charFilterFactoryList.toArray(new CharFilterFactory[charFilterFactoryList.size()]), - tokenFilterFactoryList.toArray(new TokenFilterFactory[tokenFilterFactoryList.size()])), Boolean.TRUE); - } - } - - - /** - * Get char filter factories for each configured char filter. Each configuration - * element can be the name of an out-of-the-box char filter, or a custom definition. - */ - private List parseCharFilterFactories(AnalysisRegistry analysisRegistry, - Environment environment) throws IOException { - final List charFilterFactoryList = new ArrayList<>(); - for (NameOrDefinition charFilter : charFilters) { - final CharFilterFactory charFilterFactory; - if (charFilter.name != null) { - AnalysisModule.AnalysisProvider charFilterFactoryFactory = - analysisRegistry.getCharFilterProvider(charFilter.name); - if (charFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global char filter under [" + charFilter.name + "]"); - } - charFilterFactory = charFilterFactoryFactory.get(environment, charFilter.name); - } else { - String charFilterTypeName = charFilter.definition.get("type"); - if (charFilterTypeName == null) { - throw new IllegalArgumentException("Missing [type] setting for char filter: " + charFilter.definition); - } - AnalysisModule.AnalysisProvider charFilterFactoryFactory = - analysisRegistry.getCharFilterProvider(charFilterTypeName); - if (charFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global char filter under [" + charFilterTypeName + "]"); - } - Settings settings = augmentSettings(charFilter.definition); - // Need to set anonymous "name" of char_filter - charFilterFactory = charFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, - "_anonymous_charfilter", settings); - } - if (charFilterFactory == null) { - throw new IllegalArgumentException("Failed to find char filter [" + charFilter + "]"); - } - charFilterFactoryList.add(charFilterFactory); - } - return charFilterFactoryList; - } - - /** - * Get the tokenizer factory for the configured tokenizer. The configuration - * can be the name of an out-of-the-box tokenizer, or a custom definition. - */ - private Tuple parseTokenizerFactory(AnalysisRegistry analysisRegistry, - Environment environment) throws IOException { - final String name; - final TokenizerFactory tokenizerFactory; - if (tokenizer.name != null) { - name = tokenizer.name; - AnalysisModule.AnalysisProvider tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(name); - if (tokenizerFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global tokenizer under [" + name + "]"); - } - tokenizerFactory = tokenizerFactoryFactory.get(environment, name); - } else { - String tokenizerTypeName = tokenizer.definition.get("type"); - if (tokenizerTypeName == null) { - throw new IllegalArgumentException("Missing [type] setting for tokenizer: " + tokenizer.definition); - } - AnalysisModule.AnalysisProvider tokenizerFactoryFactory = - analysisRegistry.getTokenizerProvider(tokenizerTypeName); - if (tokenizerFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global tokenizer under [" + tokenizerTypeName + "]"); - } - Settings settings = augmentSettings(tokenizer.definition); - // Need to set anonymous "name" of tokenizer - name = "_anonymous_tokenizer"; - tokenizerFactory = tokenizerFactoryFactory.get(buildDummyIndexSettings(settings), environment, name, settings); - } - return new Tuple<>(name, tokenizerFactory); - } - - /** - * Get token filter factories for each configured token filter. Each configuration - * element can be the name of an out-of-the-box token filter, or a custom definition. - */ - private List parseTokenFilterFactories(AnalysisRegistry analysisRegistry, Environment environment, - Tuple tokenizerFactory, - List charFilterFactoryList) throws IOException { - final List tokenFilterFactoryList = new ArrayList<>(); - for (NameOrDefinition tokenFilter : tokenFilters) { - TokenFilterFactory tokenFilterFactory; - if (tokenFilter.name != null) { - AnalysisModule.AnalysisProvider tokenFilterFactoryFactory; - tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name); - if (tokenFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global token filter under [" + tokenFilter.name + "]"); - } - tokenFilterFactory = tokenFilterFactoryFactory.get(environment, tokenFilter.name); - } else { - String filterTypeName = tokenFilter.definition.get("type"); - if (filterTypeName == null) { - throw new IllegalArgumentException("Missing [type] setting for token filter: " + tokenFilter.definition); - } - AnalysisModule.AnalysisProvider tokenFilterFactoryFactory = - analysisRegistry.getTokenFilterProvider(filterTypeName); - if (tokenFilterFactoryFactory == null) { - throw new IllegalArgumentException("Failed to find global token filter under [" + filterTypeName + "]"); - } - Settings settings = augmentSettings(tokenFilter.definition); - // Need to set anonymous "name" of token_filter - tokenFilterFactory = tokenFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, - "_anonymous_tokenfilter", settings); - tokenFilterFactory = CustomAnalyzerProvider.checkAndApplySynonymFilter(tokenFilterFactory, tokenizerFactory.v1(), - tokenizerFactory.v2(), tokenFilterFactoryList, charFilterFactoryList, environment); - } - if (tokenFilterFactory == null) { - throw new IllegalArgumentException("Failed to find or create token filter [" + tokenFilter + "]"); - } - tokenFilterFactoryList.add(tokenFilterFactory); - } - return tokenFilterFactoryList; - } - - /** - * The Elasticsearch analysis functionality is designed to work with indices. For - * categorization we have to pretend we've got some index settings. - */ - private IndexSettings buildDummyIndexSettings(Settings settings) { - IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); - return new IndexSettings(metaData, Settings.EMPTY); - } - - /** - * The behaviour of Elasticsearch analyzers can vary between versions. - * For categorization we'll always use the latest version of the text analysis. - * The other settings are just to stop classes that expect to be associated with - * an index from complaining. - */ - private Settings augmentSettings(Settings settings) { - return Settings.builder().put(settings) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - .build(); - } - @Override public boolean equals(Object o) { if (this == o) return true; @@ -609,17 +419,5 @@ public CategorizationAnalyzerConfig build() { } return new CategorizationAnalyzerConfig(analyzer, charFilters, tokenizer, tokenFilters); } - - /** - * Verify that the builder will build a valid config. This is not done as part of the basic build - * because it verifies that the names of analyzers/tokenizers/filters referenced by the config are - * known, and the validity of these names could change over time. - */ - public void verify(AnalysisRegistry analysisRegistry, Environment environment) throws IOException { - Tuple tuple = build().toAnalyzer(analysisRegistry, environment); - if (tuple.v2()) { - tuple.v1().close(); - } - } } } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index c8290521f98e4..560bac895fa69 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -21,8 +21,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentParser.Token; -import org.elasticsearch.env.Environment; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.xpack.core.ml.MlParserType; import org.elasticsearch.xpack.core.ml.job.messages.Messages; import org.elasticsearch.xpack.core.ml.job.persistence.AnomalyDetectorsIndexFields; @@ -809,8 +807,8 @@ public Builder setAnalysisConfig(AnalysisConfig.Builder configBuilder) { return this; } - public AnalysisLimits getAnalysisLimits() { - return analysisLimits; + public AnalysisConfig getAnalysisConfig() { + return analysisConfig; } public Builder setAnalysisLimits(AnalysisLimits analysisLimits) { @@ -1135,18 +1133,6 @@ public void validateAnalysisLimitsAndSetDefaults(@Nullable ByteSizeValue maxMode AnalysisLimits.DEFAULT_MODEL_MEMORY_LIMIT_MB); } - /** - * Validate the char filter/tokenizer/token filter names used in the categorization analyzer config (if any). - * The overall structure can be validated at parse time, but the exact names need to be checked separately, - * as plugins that provide the functionality can be installed/uninstalled. - */ - public void validateCategorizationAnalyzer(AnalysisRegistry analysisRegistry, Environment environment) throws IOException { - CategorizationAnalyzerConfig categorizationAnalyzerConfig = analysisConfig.getCategorizationAnalyzerConfig(); - if (categorizationAnalyzerConfig != null) { - new CategorizationAnalyzerConfig.Builder(categorizationAnalyzerConfig).verify(analysisRegistry, environment); - } - } - private void validateGroups() { for (String group : this.groups) { if (MlStrings.isValidId(group) == false) { diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java index c3d31ae10e925..f7fab029c8803 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/JobManager.java @@ -39,6 +39,7 @@ import org.elasticsearch.xpack.core.ml.action.UpdateJobAction; import org.elasticsearch.xpack.core.ml.action.util.QueryPage; import org.elasticsearch.xpack.core.ml.job.config.AnalysisLimits; +import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.core.ml.job.config.DataDescription; import org.elasticsearch.xpack.core.ml.job.config.Job; import org.elasticsearch.xpack.core.ml.job.config.JobState; @@ -50,6 +51,7 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapshot; import org.elasticsearch.xpack.core.ml.utils.ExceptionsHelper; import org.elasticsearch.xpack.ml.MachineLearning; +import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzer; import org.elasticsearch.xpack.ml.job.persistence.JobProvider; import org.elasticsearch.xpack.ml.job.persistence.JobResultsPersister; import org.elasticsearch.xpack.ml.job.process.autodetect.UpdateParams; @@ -170,6 +172,22 @@ public JobState getJobState(String jobId) { return MlMetadata.getJobState(jobId, tasks); } + /** + * Validate the char filter/tokenizer/token filter names used in the categorization analyzer config (if any). + * This validation has to be done server-side; it cannot be done in a client as that won't have loaded the + * appropriate analysis modules/plugins. + * The overall structure can be validated at parse time, but the exact names need to be checked separately, + * as plugins that provide the functionality can be installed/uninstalled. + */ + static void validateCategorizationAnalyzer(Job.Builder jobBuilder, AnalysisRegistry analysisRegistry, Environment environment) + throws IOException { + CategorizationAnalyzerConfig categorizationAnalyzerConfig = jobBuilder.getAnalysisConfig().getCategorizationAnalyzerConfig(); + if (categorizationAnalyzerConfig != null) { + CategorizationAnalyzer.verifyConfigBuilder(new CategorizationAnalyzerConfig.Builder(categorizationAnalyzerConfig), + analysisRegistry, environment); + } + } + /** * Stores a job in the cluster state */ @@ -177,7 +195,7 @@ public void putJob(PutJobAction.Request request, AnalysisRegistry analysisRegist ActionListener actionListener) throws IOException { request.getJobBuilder().validateAnalysisLimitsAndSetDefaults(maxModelMemoryLimit); - request.getJobBuilder().validateCategorizationAnalyzer(analysisRegistry, environment); + validateCategorizationAnalyzer(request.getJobBuilder(), analysisRegistry, environment); Job job = request.getJobBuilder().build(new Date()); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java index a0101b999d5dc..6111fa139f97f 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzer.java @@ -9,9 +9,21 @@ import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; import org.elasticsearch.ElasticsearchException; +import org.elasticsearch.Version; +import org.elasticsearch.action.admin.indices.analyze.TransportAnalyzeAction; +import org.elasticsearch.cluster.metadata.IndexMetaData; +import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.collect.Tuple; +import org.elasticsearch.common.settings.Settings; import org.elasticsearch.env.Environment; +import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AnalysisRegistry; +import org.elasticsearch.index.analysis.CharFilterFactory; +import org.elasticsearch.index.analysis.CustomAnalyzer; +import org.elasticsearch.index.analysis.CustomAnalyzerProvider; +import org.elasticsearch.index.analysis.TokenFilterFactory; +import org.elasticsearch.index.analysis.TokenizerFactory; +import org.elasticsearch.indices.analysis.AnalysisModule; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import java.io.Closeable; @@ -19,11 +31,16 @@ import java.util.ArrayList; import java.util.List; - /** * The categorization analyzer. * * Converts messages to lists of tokens that will be fed to the ML categorization algorithm. + * + * The code in {@link #makeAnalyzer} and the methods it calls is largely copied from {@link TransportAnalyzeAction}. + * Unfortunately there is no easy way to reuse a subset of the _analyze action implementation, as the + * logic required here is not quite identical to that of {@link TransportAnalyzeAction}, and the required code is + * hard to partially reuse. + * TODO: consider refactoring ES core to allow more reuse. */ public class CategorizationAnalyzer implements Closeable { @@ -33,7 +50,7 @@ public class CategorizationAnalyzer implements Closeable { public CategorizationAnalyzer(AnalysisRegistry analysisRegistry, Environment environment, CategorizationAnalyzerConfig categorizationAnalyzerConfig) throws IOException { - Tuple tuple = categorizationAnalyzerConfig.toAnalyzer(analysisRegistry, environment); + Tuple tuple = makeAnalyzer(categorizationAnalyzerConfig, analysisRegistry, environment); analyzer = tuple.v1(); closeAnalyzer = tuple.v2(); } @@ -69,4 +86,193 @@ public List tokenizeField(String fieldName, String fieldValue) { } return tokens; } + + /** + * Verify that the config builder will build a valid config. This is not done as part of the basic build + * because it verifies that the names of analyzers/tokenizers/filters referenced by the config are + * known, and the validity of these names could change over time. Additionally, it has to be done + * server-side rather than client-side, as the client will not have loaded the appropriate analysis + * modules/plugins. + */ + public static void verifyConfigBuilder(CategorizationAnalyzerConfig.Builder configBuilder, AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { + Tuple tuple = makeAnalyzer(configBuilder.build(), analysisRegistry, environment); + if (tuple.v2()) { + tuple.v1().close(); + } + } + + /** + * Convert a config to an {@link Analyzer}. This may be a global analyzer or a newly created custom analyzer. + * In the case of a global analyzer the caller must NOT close it when they have finished with it. In the case of + * a newly created custom analyzer the caller is responsible for closing it. + * @return The first tuple member is the {@link Analyzer}; the second indicates whether the caller is responsible + * for closing it. + */ + private static Tuple makeAnalyzer(CategorizationAnalyzerConfig config, AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { + String analyzer = config.getAnalyzer(); + if (analyzer != null) { + Analyzer globalAnalyzer = analysisRegistry.getAnalyzer(analyzer); + if (globalAnalyzer == null) { + throw new IllegalArgumentException("Failed to find global analyzer [" + analyzer + "]"); + } + return new Tuple<>(globalAnalyzer, Boolean.FALSE); + } else { + List charFilterFactoryList = parseCharFilterFactories(config, analysisRegistry, environment); + + Tuple tokenizerFactory = parseTokenizerFactory(config, analysisRegistry, environment); + + List tokenFilterFactoryList = parseTokenFilterFactories(config, analysisRegistry, environment, + tokenizerFactory, charFilterFactoryList); + + return new Tuple<>(new CustomAnalyzer(tokenizerFactory.v1(), tokenizerFactory.v2(), + charFilterFactoryList.toArray(new CharFilterFactory[charFilterFactoryList.size()]), + tokenFilterFactoryList.toArray(new TokenFilterFactory[tokenFilterFactoryList.size()])), Boolean.TRUE); + } + } + + + /** + * Get char filter factories for each configured char filter. Each configuration + * element can be the name of an out-of-the-box char filter, or a custom definition. + */ + private static List parseCharFilterFactories(CategorizationAnalyzerConfig config, AnalysisRegistry analysisRegistry, + Environment environment) throws IOException { + List charFilters = config.getCharFilters(); + final List charFilterFactoryList = new ArrayList<>(); + for (CategorizationAnalyzerConfig.NameOrDefinition charFilter : charFilters) { + final CharFilterFactory charFilterFactory; + if (charFilter.name != null) { + AnalysisModule.AnalysisProvider charFilterFactoryFactory = + analysisRegistry.getCharFilterProvider(charFilter.name); + if (charFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global char filter under [" + charFilter.name + "]"); + } + charFilterFactory = charFilterFactoryFactory.get(environment, charFilter.name); + } else { + String charFilterTypeName = charFilter.definition.get("type"); + if (charFilterTypeName == null) { + throw new IllegalArgumentException("Missing [type] setting for char filter: " + charFilter.definition); + } + AnalysisModule.AnalysisProvider charFilterFactoryFactory = + analysisRegistry.getCharFilterProvider(charFilterTypeName); + if (charFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global char filter under [" + charFilterTypeName + "]"); + } + Settings settings = augmentSettings(charFilter.definition); + // Need to set anonymous "name" of char_filter + charFilterFactory = charFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, "_anonymous_charfilter", + settings); + } + if (charFilterFactory == null) { + throw new IllegalArgumentException("Failed to find char filter [" + charFilter + "]"); + } + charFilterFactoryList.add(charFilterFactory); + } + return charFilterFactoryList; + } + + /** + * Get the tokenizer factory for the configured tokenizer. The configuration + * can be the name of an out-of-the-box tokenizer, or a custom definition. + */ + private static Tuple parseTokenizerFactory(CategorizationAnalyzerConfig config, + AnalysisRegistry analysisRegistry, Environment environment) + throws IOException { + CategorizationAnalyzerConfig.NameOrDefinition tokenizer = config.getTokenizer(); + final String name; + final TokenizerFactory tokenizerFactory; + if (tokenizer.name != null) { + name = tokenizer.name; + AnalysisModule.AnalysisProvider tokenizerFactoryFactory = analysisRegistry.getTokenizerProvider(name); + if (tokenizerFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global tokenizer under [" + name + "]"); + } + tokenizerFactory = tokenizerFactoryFactory.get(environment, name); + } else { + String tokenizerTypeName = tokenizer.definition.get("type"); + if (tokenizerTypeName == null) { + throw new IllegalArgumentException("Missing [type] setting for tokenizer: " + tokenizer.definition); + } + AnalysisModule.AnalysisProvider tokenizerFactoryFactory = + analysisRegistry.getTokenizerProvider(tokenizerTypeName); + if (tokenizerFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global tokenizer under [" + tokenizerTypeName + "]"); + } + Settings settings = augmentSettings(tokenizer.definition); + // Need to set anonymous "name" of tokenizer + name = "_anonymous_tokenizer"; + tokenizerFactory = tokenizerFactoryFactory.get(buildDummyIndexSettings(settings), environment, name, settings); + } + return new Tuple<>(name, tokenizerFactory); + } + + /** + * Get token filter factories for each configured token filter. Each configuration + * element can be the name of an out-of-the-box token filter, or a custom definition. + */ + private static List parseTokenFilterFactories(CategorizationAnalyzerConfig config, + AnalysisRegistry analysisRegistry, Environment environment, + Tuple tokenizerFactory, + List charFilterFactoryList) throws IOException { + List tokenFilters = config.getTokenFilters(); + final List tokenFilterFactoryList = new ArrayList<>(); + for (CategorizationAnalyzerConfig.NameOrDefinition tokenFilter : tokenFilters) { + TokenFilterFactory tokenFilterFactory; + if (tokenFilter.name != null) { + AnalysisModule.AnalysisProvider tokenFilterFactoryFactory; + tokenFilterFactoryFactory = analysisRegistry.getTokenFilterProvider(tokenFilter.name); + if (tokenFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global token filter under [" + tokenFilter.name + "]"); + } + tokenFilterFactory = tokenFilterFactoryFactory.get(environment, tokenFilter.name); + } else { + String filterTypeName = tokenFilter.definition.get("type"); + if (filterTypeName == null) { + throw new IllegalArgumentException("Missing [type] setting for token filter: " + tokenFilter.definition); + } + AnalysisModule.AnalysisProvider tokenFilterFactoryFactory = + analysisRegistry.getTokenFilterProvider(filterTypeName); + if (tokenFilterFactoryFactory == null) { + throw new IllegalArgumentException("Failed to find global token filter under [" + filterTypeName + "]"); + } + Settings settings = augmentSettings(tokenFilter.definition); + // Need to set anonymous "name" of token_filter + tokenFilterFactory = tokenFilterFactoryFactory.get(buildDummyIndexSettings(settings), environment, "_anonymous_tokenfilter", + settings); + tokenFilterFactory = CustomAnalyzerProvider.checkAndApplySynonymFilter(tokenFilterFactory, tokenizerFactory.v1(), + tokenizerFactory.v2(), tokenFilterFactoryList, charFilterFactoryList, environment); + } + if (tokenFilterFactory == null) { + throw new IllegalArgumentException("Failed to find or create token filter [" + tokenFilter + "]"); + } + tokenFilterFactoryList.add(tokenFilterFactory); + } + return tokenFilterFactoryList; + } + + /** + * The Elasticsearch analysis functionality is designed to work with indices. For + * categorization we have to pretend we've got some index settings. + */ + private static IndexSettings buildDummyIndexSettings(Settings settings) { + IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); + return new IndexSettings(metaData, Settings.EMPTY); + } + + /** + * The behaviour of Elasticsearch analyzers can vary between versions. + * For categorization we'll always use the latest version of the text analysis. + * The other settings are just to stop classes that expect to be associated with + * an index from complaining. + */ + private static Settings augmentSettings(Settings settings) { + return Settings.builder().put(settings) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java index 9f03952165c13..59413f6a61879 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/categorization/CategorizationAnalyzerTests.java @@ -22,7 +22,6 @@ import java.util.HashMap; import java.util.Map; - public class CategorizationAnalyzerTests extends ESTestCase { private AnalysisRegistry analysisRegistry; @@ -41,6 +40,158 @@ public void setup() throws Exception { analysisRegistry = buildTestAnalysisRegistry(environment); } + public void testVerifyConfigBuilder_GivenNoConfig() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenDefault() throws IOException { + CategorizationAnalyzerConfig defaultConfig = CategorizationAnalyzerConfig.buildDefaultCategorizationAnalyzer(null); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(defaultConfig); + CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment); + } + + public void testVerifyConfigBuilder_GivenValidAnalyzer() throws IOException { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("standard"); + CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment); + } + + public void testVerifyConfigBuilder_GivenInvalidAnalyzer() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("does not exist"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global analyzer [does not exist]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenValidCustomConfig() throws IOException { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); + ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); + ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter(ignoreStuffThatBeginsWithADigit) + .addTokenFilter("snowball"); + CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithInvalidCharFilter() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter("wrong!") + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global char filter under [wrong!]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithMisconfiguredCharFilter() { + Map noPattern = new HashMap<>(); + noPattern.put("type", "pattern_replace"); + noPattern.put("attern", "should have been pattern"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(noPattern) + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("pattern is missing for [_anonymous_charfilter] char filter of type 'pattern_replace'", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithInvalidTokenizer() { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .setTokenizer("oops!") + .addTokenFilter("lowercase") + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global tokenizer under [oops!]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenNoTokenizer() { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); + ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); + ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .addTokenFilter("lowercase") + .addTokenFilter(ignoreStuffThatBeginsWithADigit) + .addTokenFilter("snowball"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithInvalidTokenFilter() { + Map ignoreStuffInSqaureBrackets = new HashMap<>(); + ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); + ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter(ignoreStuffInSqaureBrackets) + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter("oh dear!"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("Failed to find global token filter under [oh dear!]", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenCustomConfigWithMisconfiguredTokenFilter() { + Map noPattern = new HashMap<>(); + noPattern.put("type", "pattern_replace"); + noPattern.put("attern", "should have been pattern"); + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .addCharFilter("html_strip") + .setTokenizer("classic") + .addTokenFilter("lowercase") + .addTokenFilter(noPattern); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("pattern is missing for [_anonymous_tokenfilter] token filter of type 'pattern_replace'", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenAnalyzerAndCharFilter() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .setAnalyzer("standard") + .addCharFilter("html_strip"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [char_filter] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenAnalyzerAndTokenizer() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .setAnalyzer("standard") + .setTokenizer("classic"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [tokenizer] field", e.getMessage()); + } + + public void testVerifyConfigBuilder_GivenAnalyzerAndTokenFilter() { + CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() + .setAnalyzer("standard") + .addTokenFilter("lowercase"); + IllegalArgumentException e = expectThrows(IllegalArgumentException.class, + () -> CategorizationAnalyzer.verifyConfigBuilder(builder, analysisRegistry, environment)); + assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [filter] field", e.getMessage()); + } + // The default categorization analyzer matches what the analyzer in the ML C++ does public void testDefaultCategorizationAnalyzer() throws IOException { CategorizationAnalyzerConfig defaultConfig = CategorizationAnalyzerConfig.buildDefaultCategorizationAnalyzer(null); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java index 9c725fe76292a..2fe2c0b334c4e 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/CategorizationAnalyzerConfigTests.java @@ -6,175 +6,17 @@ package org.elasticsearch.xpack.ml.job.config; import org.elasticsearch.common.io.stream.Writeable; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.env.Environment; -import org.elasticsearch.env.TestEnvironment; -import org.elasticsearch.index.analysis.AnalysisRegistry; import org.elasticsearch.test.AbstractSerializingTestCase; import org.elasticsearch.xpack.core.ml.job.config.CategorizationAnalyzerConfig; import org.elasticsearch.xpack.core.ml.MlParserType; -import org.elasticsearch.xpack.ml.job.categorization.CategorizationAnalyzerTests; -import org.junit.Before; import java.io.IOException; import java.util.HashMap; import java.util.Map; - public class CategorizationAnalyzerConfigTests extends AbstractSerializingTestCase { - private AnalysisRegistry analysisRegistry; - private Environment environment; - - @Before - public void setup() throws Exception { - Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()).build(); - environment = TestEnvironment.newEnvironment(settings); - analysisRegistry = CategorizationAnalyzerTests.buildTestAnalysisRegistry(environment); - } - - public void testVerify_GivenNoConfig() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); - } - - public void testVerify_GivenDefault() throws IOException { - CategorizationAnalyzerConfig defaultConfig = CategorizationAnalyzerConfig.buildDefaultCategorizationAnalyzer(null); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder(defaultConfig); - builder.verify(analysisRegistry, environment); - } - - public void testVerify_GivenValidAnalyzer() throws IOException { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("standard"); - builder.verify(analysisRegistry, environment); - } - - public void testVerify_GivenInvalidAnalyzer() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder().setAnalyzer("does not exist"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global analyzer [does not exist]", e.getMessage()); - } - - public void testVerify_GivenValidCustomConfig() throws IOException { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); - ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); - ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter(ignoreStuffThatBeginsWithADigit) - .addTokenFilter("snowball"); - builder.verify(analysisRegistry, environment); - } - - public void testVerify_GivenCustomConfigWithInvalidCharFilter() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter("wrong!") - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global char filter under [wrong!]", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithMisconfiguredCharFilter() { - Map noPattern = new HashMap<>(); - noPattern.put("type", "pattern_replace"); - noPattern.put("attern", "should have been pattern"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(noPattern) - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("pattern is missing for [_anonymous_charfilter] char filter of type 'pattern_replace'", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithInvalidTokenizer() { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .setTokenizer("oops!") - .addTokenFilter("lowercase") - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global tokenizer under [oops!]", e.getMessage()); - } - - public void testVerify_GivenNoTokenizer() { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - Map ignoreStuffThatBeginsWithADigit = new HashMap<>(); - ignoreStuffThatBeginsWithADigit.put("type", "pattern_replace"); - ignoreStuffThatBeginsWithADigit.put("pattern", "^[0-9].*"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .addTokenFilter("lowercase") - .addTokenFilter(ignoreStuffThatBeginsWithADigit) - .addTokenFilter("snowball"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is not a global analyzer must specify a [tokenizer] field", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithInvalidTokenFilter() { - Map ignoreStuffInSqaureBrackets = new HashMap<>(); - ignoreStuffInSqaureBrackets.put("type", "pattern_replace"); - ignoreStuffInSqaureBrackets.put("pattern", "\\[[^\\]]*\\]"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter(ignoreStuffInSqaureBrackets) - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter("oh dear!"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("Failed to find global token filter under [oh dear!]", e.getMessage()); - } - - public void testVerify_GivenCustomConfigWithMisconfiguredTokenFilter() { - Map noPattern = new HashMap<>(); - noPattern.put("type", "pattern_replace"); - noPattern.put("attern", "should have been pattern"); - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .addCharFilter("html_strip") - .setTokenizer("classic") - .addTokenFilter("lowercase") - .addTokenFilter(noPattern); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("pattern is missing for [_anonymous_tokenfilter] token filter of type 'pattern_replace'", e.getMessage()); - } - - public void testVerify_GivenAnalyzerAndCharFilter() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .setAnalyzer("standard") - .addCharFilter("html_strip"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [char_filter] field", e.getMessage()); - } - - public void testVerify_GivenAnalyzerAndTokenizer() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .setAnalyzer("standard") - .setTokenizer("classic"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [tokenizer] field", e.getMessage()); - } - - public void testVerify_GivenAnalyzerAndTokenFilter() { - CategorizationAnalyzerConfig.Builder builder = new CategorizationAnalyzerConfig.Builder() - .setAnalyzer("standard") - .addTokenFilter("lowercase"); - IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> builder.verify(analysisRegistry, environment)); - assertEquals("categorization_analyzer that is a global analyzer cannot also specify a [filter] field", e.getMessage()); - } - @Override protected CategorizationAnalyzerConfig createTestInstance() { return createRandomized().build(); From bb9fae0c60046be004a234999cfcee22c2e35d79 Mon Sep 17 00:00:00 2001 From: David Kyle Date: Tue, 17 Jul 2018 16:21:53 +0100 Subject: [PATCH 076/107] [ML] Wait for aliases in multi-node tests (#32086) --- .../xpack/ml/integration/MlJobIT.java | 62 ++++++++++++------- .../integration/RestoreModelSnapshotIT.java | 12 ++-- 2 files changed, 48 insertions(+), 26 deletions(-) diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java index 7820cbc06f5a0..07529acdb8815 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/MlJobIT.java @@ -7,6 +7,7 @@ import org.apache.http.entity.ContentType; import org.apache.http.entity.StringEntity; +import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.common.settings.Settings; @@ -185,23 +186,32 @@ public void testCreateJobsWithIndexNameOption() throws Exception { + "anomaly_detectors/" + jobId2, Collections.emptyMap(), new StringEntity(jobConfig, ContentType.APPLICATION_JSON)); assertEquals(200, response.getStatusLine().getStatusCode()); - response = client().performRequest("get", "_aliases"); - assertEquals(200, response.getStatusLine().getStatusCode()); - String responseAsString = responseEntityToString(response); + // With security enabled GET _aliases throws an index_not_found_exception + // if no aliases have been created. In multi-node tests the alias may not + // appear immediately so wait here. + assertBusy(() -> { + try { + Response aliasesResponse = client().performRequest("get", "_aliases"); + assertEquals(200, aliasesResponse.getStatusLine().getStatusCode()); + String responseAsString = responseEntityToString(aliasesResponse); + assertThat(responseAsString, + containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName("custom-" + indexName) + "\":{\"aliases\":{")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) + + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId1 + "\",\"boost\":1.0}}}}")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) + + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId2 + "\",\"boost\":1.0}}}}")); + assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}")); + } catch (ResponseException e) { + throw new AssertionError(e); + } + }); + Response indicesResponse = client().performRequest("get", "_cat/indices"); + assertEquals(200, indicesResponse.getStatusLine().getStatusCode()); + String responseAsString = responseEntityToString(indicesResponse); assertThat(responseAsString, - containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName("custom-" + indexName) + "\":{\"aliases\":{")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId1) - + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId1 + "\",\"boost\":1.0}}}}")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId1) + "\":{}")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.jobResultsAliasedName(jobId2) - + "\":{\"filter\":{\"term\":{\"job_id\":{\"value\":\"" + jobId2 + "\",\"boost\":1.0}}}}")); - assertThat(responseAsString, containsString("\"" + AnomalyDetectorsIndex.resultsWriteAlias(jobId2) + "\":{}")); - - response = client().performRequest("get", "_cat/indices"); - assertEquals(200, response.getStatusLine().getStatusCode()); - responseAsString = responseEntityToString(response); - assertThat(responseAsString, containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); + containsString(AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + "custom-" + indexName)); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId1)))); assertThat(responseAsString, not(containsString(AnomalyDetectorsIndex.jobResultsAliasedName(jobId2)))); @@ -438,7 +448,6 @@ public void testDeleteJobAfterMissingIndex() throws Exception { client().performRequest("get", MachineLearning.BASE_PATH + "anomaly_detectors/" + jobId + "/_stats")); } - @AwaitsFix(bugUrl = "https://github.com/elastic/elasticsearch/issues/32034") public void testDeleteJobAfterMissingAliases() throws Exception { String jobId = "delete-job-after-missing-alias-job"; String readAliasName = AnomalyDetectorsIndex.jobResultsAliasedName(jobId); @@ -446,15 +455,24 @@ public void testDeleteJobAfterMissingAliases() throws Exception { String indexName = AnomalyDetectorsIndexFields.RESULTS_INDEX_PREFIX + AnomalyDetectorsIndexFields.RESULTS_INDEX_DEFAULT; createFarequoteJob(jobId); - Response response = client().performRequest("get", "_cat/aliases"); - assertEquals(200, response.getStatusLine().getStatusCode()); - String responseAsString = responseEntityToString(response); - assertThat(responseAsString, containsString(readAliasName)); - assertThat(responseAsString, containsString(writeAliasName)); + // With security enabled cat aliases throws an index_not_found_exception + // if no aliases have been created. In multi-node tests the alias may not + // appear immediately so wait here. + assertBusy(() -> { + try { + Response aliasesResponse = client().performRequest(new Request("get", "_cat/aliases")); + assertEquals(200, aliasesResponse.getStatusLine().getStatusCode()); + String responseAsString = responseEntityToString(aliasesResponse); + assertThat(responseAsString, containsString(readAliasName)); + assertThat(responseAsString, containsString(writeAliasName)); + } catch (ResponseException e) { + throw new AssertionError(e); + } + }); // Manually delete the aliases so that we can test that deletion proceeds // normally anyway - response = client().performRequest("delete", indexName + "/_alias/" + readAliasName); + Response response = client().performRequest("delete", indexName + "/_alias/" + readAliasName); assertEquals(200, response.getStatusLine().getStatusCode()); response = client().performRequest("delete", indexName + "/_alias/" + writeAliasName); assertEquals(200, response.getStatusLine().getStatusCode()); diff --git a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java index 9ff80bc739b16..d7a2b857bf359 100644 --- a/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java +++ b/x-pack/qa/ml-native-multi-node-tests/src/test/java/org/elasticsearch/xpack/ml/integration/RestoreModelSnapshotIT.java @@ -72,10 +72,14 @@ public void test() throws Exception { openJob(job.getId()); String forecastId = forecast(job.getId(), TimeValue.timeValueHours(3), null); waitForecastToFinish(job.getId(), forecastId); - ForecastRequestStats forecastStats = getForecastStats(job.getId(), forecastId); - assertThat(forecastStats.getMessages(), anyOf(nullValue(), empty())); - assertThat(forecastStats.getMemoryUsage(), greaterThan(0L)); - assertEquals(forecastStats.getRecordCount(), 3L); + // In a multi-node cluster the replica may not be up to date + // so wait for the change + assertBusy(() -> { + ForecastRequestStats forecastStats = getForecastStats(job.getId(), forecastId); + assertThat(forecastStats.getMessages(), anyOf(nullValue(), empty())); + assertThat(forecastStats.getMemoryUsage(), greaterThan(0L)); + assertThat(forecastStats.getRecordCount(), equalTo(3L)); + }); closeJob(job.getId()); From b31dc36a9c69f9c8cc8e78e681788c54d297822e Mon Sep 17 00:00:00 2001 From: aptxx Date: Tue, 17 Jul 2018 23:42:24 +0800 Subject: [PATCH 077/107] Docs: Fix missing example script quote (#32010) --- .../client/documentation/ReindexDocumentationIT.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java index 0766560a849a6..93c785e754a54 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java +++ b/modules/reindex/src/test/java/org/elasticsearch/client/documentation/ReindexDocumentationIT.java @@ -96,7 +96,7 @@ public void updateByQuery() { updateByQuery.source("source_index") .script(new Script( ScriptType.INLINE, - "if (ctx._source.awesome == 'absolutely) {" + "if (ctx._source.awesome == 'absolutely') {" + " ctx.op='noop'" + "} else if (ctx._source.awesome == 'lame') {" + " ctx.op='delete'" From a7c8e076988593d4576f4b10312882629890968c Mon Sep 17 00:00:00 2001 From: Andy Bristol Date: Tue, 17 Jul 2018 09:15:11 -0700 Subject: [PATCH 078/107] Re-disable packaging tests on suse boxes This reverts commit 14d7e2c7b2d3761a361edd720f98353f856936a4. --- .../gradle/vagrant/VagrantTestPlugin.groovy | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy index d4d1d857e90d4..de3c0dfc3285f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/vagrant/VagrantTestPlugin.groovy @@ -526,7 +526,11 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(batsPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(batsPackagingTest) + // these tests are temporarily disabled for suse boxes while we debug an issue + // https://github.com/elastic/elasticsearch/issues/30295 + if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { + packagingTest.dependsOn(batsPackagingTest) + } } } @@ -565,7 +569,11 @@ class VagrantTestPlugin implements Plugin { project.gradle.removeListener(javaPackagingReproListener) } if (project.extensions.esvagrant.boxes.contains(box)) { - packagingTest.dependsOn(javaPackagingTest) + // these tests are temporarily disabled for suse boxes while we debug an issue + // https://github.com/elastic/elasticsearch/issues/30295 + if (box.equals("opensuse-42") == false && box.equals("sles-12") == false) { + packagingTest.dependsOn(javaPackagingTest) + } } /* From 7490ec619c1e5495baf9be400eb1ecc72d990a86 Mon Sep 17 00:00:00 2001 From: Ioannis Kakavas Date: Tue, 17 Jul 2018 20:25:38 +0300 Subject: [PATCH 079/107] Remove empty @param from Javadoc --- .../test/java/org/elasticsearch/test/SecuritySettingsSource.java | 1 - 1 file changed, 1 deletion(-) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java index 7d329781fad2b..56d5fec3f20d6 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/test/SecuritySettingsSource.java @@ -298,7 +298,6 @@ private static void addSSLSettingsForStore(Settings.Builder builder, String pref * Returns the SSL related configuration settings given the location of a key and certificate and the location * of the PEM certificates to be trusted * - * @param builder * @param keyPath The path to the Private key to be used for SSL * @param password The password with which the private key is protected * @param certificatePath The path to the PEM formatted Certificate encapsulating the public key that corresponds From a481ef63ebc7ac25d9fbbfff447d0ca5986ff76e Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Tue, 17 Jul 2018 10:33:38 -0700 Subject: [PATCH 080/107] Painless: Fix Bug with Duplicate PainlessClasses (#32110) When building the PainlessMethods and PainlessFields they stored a reference to a PainlessClass. This reference was prior to "freezing" the PainlessClass so the data was both incomplete and mutable. This has been replaced with a target java class instead since the PainlessClass is accessible through a java class now and it requires no special modifications to get around a chicken and egg issue. --- .../java/org/elasticsearch/painless/Def.java | 4 +- .../elasticsearch/painless/FunctionRef.java | 8 ++-- .../painless/lookup/PainlessField.java | 6 +-- .../lookup/PainlessLookupBuilder.java | 14 +++---- .../lookup/PainlessLookupUtility.java | 6 +-- .../painless/lookup/PainlessMethod.java | 15 +++---- .../painless/node/EListInit.java | 3 +- .../elasticsearch/painless/node/EMapInit.java | 3 +- .../elasticsearch/painless/node/ENewObj.java | 3 +- .../painless/node/PSubField.java | 13 +++--- .../painless/PainlessDocGenerator.java | 42 ++++++++++--------- 11 files changed, 62 insertions(+), 55 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index 78db712d183d2..fe11ff4814b90 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -334,8 +334,8 @@ static MethodHandle lookupReference(PainlessLookup painlessLookup, MethodHandles } int arity = interfaceMethod.arguments.size(); PainlessMethod implMethod = lookupMethodInternal(painlessLookup, receiverClass, name, arity); - return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, implMethod.owner.name, - implMethod.name, receiverClass); + return lookupReferenceInternal(painlessLookup, methodHandlesLookup, interfaceType, + PainlessLookupUtility.anyTypeToPainlessTypeName(implMethod.target), implMethod.name, receiverClass); } /** Returns a method handle to an implementation of clazz, given method reference signature. */ diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 9e72dc2c83576..925359fabc505 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -102,22 +102,22 @@ public FunctionRef(Class expected, PainlessMethod interfaceMethod, PainlessMe interfaceMethodType = interfaceMethod.getMethodType().dropParameterTypes(0, 1); // the Painless$Script class can be inferred if owner is null - if (delegateMethod.owner == null) { + if (delegateMethod.target == null) { delegateClassName = CLASS_NAME; isDelegateInterface = false; } else if (delegateMethod.augmentation != null) { delegateClassName = delegateMethod.augmentation.getName(); isDelegateInterface = delegateMethod.augmentation.isInterface(); } else { - delegateClassName = delegateMethod.owner.clazz.getName(); - isDelegateInterface = delegateMethod.owner.clazz.isInterface(); + delegateClassName = delegateMethod.target.getName(); + isDelegateInterface = delegateMethod.target.isInterface(); } if ("".equals(delegateMethod.name)) { delegateInvokeType = H_NEWINVOKESPECIAL; } else if (Modifier.isStatic(delegateMethod.modifiers)) { delegateInvokeType = H_INVOKESTATIC; - } else if (delegateMethod.owner.clazz.isInterface()) { + } else if (delegateMethod.target.isInterface()) { delegateInvokeType = H_INVOKEINTERFACE; } else { delegateInvokeType = H_INVOKEVIRTUAL; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java index 7c85bd269b461..f316e1438ecb9 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessField.java @@ -23,18 +23,18 @@ public final class PainlessField { public final String name; - public final PainlessClass owner; + public final Class target; public final Class clazz; public final String javaName; public final int modifiers; public final MethodHandle getter; public final MethodHandle setter; - PainlessField(String name, String javaName, PainlessClass owner, Class clazz, int modifiers, + PainlessField(String name, String javaName, Class target, Class clazz, int modifiers, MethodHandle getter, MethodHandle setter) { this.name = name; this.javaName = javaName; - this.owner = owner; + this.target = target; this.clazz = clazz; this.modifiers = modifiers; this.getter = getter; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 9a5e08d65a754..5641eee1b5d9b 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -310,7 +310,7 @@ private void addConstructor(String ownerStructName, WhitelistConstructor whiteli } painlessConstructor = methodCache.computeIfAbsent(buildMethodCacheKey(ownerStruct.name, "", painlessParametersTypes), - key -> new PainlessMethod("", ownerStruct, null, void.class, painlessParametersTypes, + key -> new PainlessMethod("", ownerStruct.clazz, null, void.class, painlessParametersTypes, asmConstructor, javaConstructor.getModifiers(), javaHandle)); ownerStruct.constructors.put(painlessMethodKey, painlessConstructor); } else if (painlessConstructor.arguments.equals(painlessParametersTypes) == false){ @@ -419,7 +419,7 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, painlessMethod = methodCache.computeIfAbsent( buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, null, painlessReturnClass, + key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, null, painlessReturnClass, painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.staticMethods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn == painlessReturnClass && @@ -445,7 +445,7 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, painlessMethod = methodCache.computeIfAbsent( buildMethodCacheKey(ownerStruct.name, whitelistMethod.javaMethodName, painlessParametersTypes), - key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct, javaAugmentedClass, painlessReturnClass, + key -> new PainlessMethod(whitelistMethod.javaMethodName, ownerStruct.clazz, javaAugmentedClass, painlessReturnClass, painlessParametersTypes, asmMethod, javaMethod.getModifiers(), javaMethodHandle)); ownerStruct.methods.put(painlessMethodKey, painlessMethod); } else if ((painlessMethod.name.equals(whitelistMethod.javaMethodName) && painlessMethod.rtn.equals(painlessReturnClass) && @@ -501,7 +501,7 @@ private void addField(String ownerStructName, WhitelistField whitelistField) { painlessField = fieldCache.computeIfAbsent( buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct, painlessFieldClass, javaField.getModifiers(), null, null)); + ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), null, null)); ownerStruct.staticMembers.put(whitelistField.javaFieldName, painlessField); } else if (painlessField.clazz != painlessFieldClass) { throw new IllegalArgumentException("illegal duplicate static fields [" + whitelistField.javaFieldName + "] " + @@ -530,7 +530,7 @@ private void addField(String ownerStructName, WhitelistField whitelistField) { painlessField = fieldCache.computeIfAbsent( buildFieldCacheKey(ownerStruct.name, whitelistField.javaFieldName, painlessFieldClass.getName()), key -> new PainlessField(whitelistField.javaFieldName, javaField.getName(), - ownerStruct, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); + ownerStruct.clazz, painlessFieldClass, javaField.getModifiers(), javaMethodHandleGetter, javaMethodHandleSetter)); ownerStruct.members.put(whitelistField.javaFieldName, painlessField); } else if (painlessField.clazz != painlessFieldClass) { throw new IllegalArgumentException("illegal duplicate member fields [" + whitelistField.javaFieldName + "] " + @@ -615,8 +615,8 @@ private void copyStruct(String struct, List children) { for (PainlessField field : child.members.values()) { if (owner.members.get(field.name) == null) { - owner.members.put(field.name, - new PainlessField(field.name, field.javaName, owner, field.clazz, field.modifiers, field.getter, field.setter)); + owner.members.put(field.name, new PainlessField( + field.name, field.javaName, owner.clazz, field.clazz, field.modifiers, field.getter, field.setter)); } } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java index d1f3ee4ece3e0..0f7c8fb915cdf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupUtility.java @@ -158,7 +158,7 @@ public static Class painlessTypeNameToPainlessType(String painlessTypeName, M painlessTypeName.charAt(arrayIndex++) == ']') { ++arrayDimensions; } else { - throw new IllegalArgumentException("invalid painless type [" + painlessTypeName + "]."); + throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found"); } } @@ -192,7 +192,7 @@ public static Class painlessTypeNameToPainlessType(String painlessTypeName, M try { return Class.forName(javaDescriptor); } catch (ClassNotFoundException cnfe) { - throw new IllegalStateException("painless type [" + painlessTypeName + "] not found", cnfe); + throw new IllegalArgumentException("painless type [" + painlessTypeName + "] not found", cnfe); } } @@ -207,7 +207,7 @@ public static void validatePainlessType(Class painlessType, Collection target; public final Class augmentation; public final Class rtn; public final List> arguments; @@ -38,11 +39,11 @@ public class PainlessMethod { public final int modifiers; public final MethodHandle handle; - public PainlessMethod(String name, PainlessClass owner, Class augmentation, Class rtn, List> arguments, + public PainlessMethod(String name, Class target, Class augmentation, Class rtn, List> arguments, org.objectweb.asm.commons.Method method, int modifiers, MethodHandle handle) { this.name = name; this.augmentation = augmentation; - this.owner = owner; + this.target = target; this.rtn = rtn; this.arguments = Collections.unmodifiableList(arguments); this.method = method; @@ -85,11 +86,11 @@ public MethodType getMethodType() { for (int i = 0; i < arguments.size(); i++) { params[i] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } - returnValue = owner.clazz; + returnValue = target; } else { // virtual/interface method: add receiver class params = new Class[1 + arguments.size()]; - params[0] = owner.clazz; + params[0] = target; for (int i = 0; i < arguments.size(); i++) { params[i + 1] = PainlessLookupUtility.painlessDefTypeToJavaObjectType(arguments.get(i)); } @@ -106,8 +107,8 @@ public void write(MethodWriter writer) { clazz = augmentation; type = org.objectweb.asm.Type.getType(augmentation); } else { - clazz = owner.clazz; - type = owner.type; + clazz = target; + type = Type.getType(target); } if (Modifier.isStatic(modifiers)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 90475419b3260..7e923e5f90f1e 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -26,6 +26,7 @@ import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; +import org.objectweb.asm.Type; import java.util.ArrayList; import java.util.List; @@ -90,7 +91,7 @@ void write(MethodWriter writer, Globals globals) { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(Type.getType(constructor.target), constructor.method); for (AExpression value : values) { writer.dup(); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index c6474846d4c7a..b350a758944d5 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -26,6 +26,7 @@ import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; +import org.objectweb.asm.Type; import java.util.HashMap; import java.util.List; @@ -109,7 +110,7 @@ void write(MethodWriter writer, Globals globals) { writer.newInstance(MethodWriter.getType(actual)); writer.dup(); - writer.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(Type.getType(constructor.target), constructor.method); for (int index = 0; index < keys.size(); ++index) { AExpression key = keys.get(index); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index a780ea3e05be8..cf6f040c9753a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -26,6 +26,7 @@ import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.lookup.PainlessMethodKey; +import org.objectweb.asm.Type; import java.util.List; import java.util.Objects; @@ -104,7 +105,7 @@ void write(MethodWriter writer, Globals globals) { argument.write(writer, globals); } - writer.invokeConstructor(constructor.owner.type, constructor.method); + writer.invokeConstructor(Type.getType(constructor.target), constructor.method); } @Override diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java index 8eb154e745bf7..a1a0ee1dade36 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubField.java @@ -25,6 +25,7 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupUtility; +import org.objectweb.asm.Type; import java.lang.reflect.Modifier; import java.util.Objects; @@ -63,9 +64,9 @@ void write(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.getStatic(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } else { - writer.getField(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } } @@ -94,9 +95,9 @@ void load(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.getStatic(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } else { - writer.getField(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.getField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } } @@ -105,9 +106,9 @@ void store(MethodWriter writer, Globals globals) { writer.writeDebugInfo(location); if (java.lang.reflect.Modifier.isStatic(field.modifiers)) { - writer.putStatic(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.putStatic(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } else { - writer.putField(field.owner.type, field.javaName, MethodWriter.getType(field.clazz)); + writer.putField(Type.getType(field.target), field.javaName, MethodWriter.getType(field.clazz)); } } diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index 5e8e6ad47d813..4486a52ccb1be 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -27,6 +27,7 @@ import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; import org.elasticsearch.painless.spi.Whitelist; @@ -67,8 +68,8 @@ public static void main(String[] args) throws IOException { Path indexPath = apiRootPath.resolve("index.asciidoc"); logger.info("Starting to write [index.asciidoc]"); try (PrintStream indexStream = new PrintStream( - Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), - false, StandardCharsets.UTF_8.name())) { + Files.newOutputStream(indexPath, StandardOpenOption.CREATE_NEW, StandardOpenOption.WRITE), + false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(indexStream); List structs = PAINLESS_LOOKUP.getStructs().stream().sorted(comparing(t -> t.name)).collect(toList()); for (PainlessClass struct : structs) { @@ -91,7 +92,7 @@ public static void main(String[] args) throws IOException { false, StandardCharsets.UTF_8.name())) { emitGeneratedWarning(typeStream); typeStream.print("[["); - emitAnchor(typeStream, struct); + emitAnchor(typeStream, struct.clazz); typeStream.print("]]++"); typeStream.print(struct.name); typeStream.println("++::"); @@ -104,10 +105,11 @@ public static void main(String[] args) throws IOException { struct.constructors.values().stream().sorted(NUMBER_OF_ARGS).forEach(documentMethod); Map inherited = new TreeMap<>(); struct.methods.values().stream().sorted(METHOD_NAME.thenComparing(NUMBER_OF_ARGS)).forEach(method -> { - if (method.owner == struct) { + if (method.target == struct.clazz) { documentMethod(typeStream, method); } else { - inherited.put(method.owner.name, method.owner); + PainlessClass painlessClass = PAINLESS_LOOKUP.getPainlessStructFromJavaClass(method.target); + inherited.put(painlessClass.name, painlessClass); } }); @@ -206,16 +208,16 @@ private static void documentMethod(PrintStream stream, PainlessMethod method) { /** * Anchor text for a {@link PainlessClass}. */ - private static void emitAnchor(PrintStream stream, PainlessClass struct) { + private static void emitAnchor(PrintStream stream, Class clazz) { stream.print("painless-api-reference-"); - stream.print(struct.name.replace('.', '-')); + stream.print(PainlessLookupUtility.anyTypeToPainlessTypeName(clazz).replace('.', '-')); } /** * Anchor text for a {@link PainlessMethod}. */ private static void emitAnchor(PrintStream stream, PainlessMethod method) { - emitAnchor(stream, method.owner); + emitAnchor(stream, method.target); stream.print('-'); stream.print(methodName(method)); stream.print('-'); @@ -226,18 +228,18 @@ private static void emitAnchor(PrintStream stream, PainlessMethod method) { * Anchor text for a {@link PainlessField}. */ private static void emitAnchor(PrintStream stream, PainlessField field) { - emitAnchor(stream, field.owner); + emitAnchor(stream, field.target); stream.print('-'); stream.print(field.name); } private static String methodName(PainlessMethod method) { - return method.name.equals("") ? method.owner.name : method.name; + return method.name.equals("") ? PainlessLookupUtility.anyTypeToPainlessTypeName(method.target) : method.name; } /** * Emit a {@link Class}. If the type is primitive or an array of primitives this just emits the name of the type. Otherwise this emits - an internal link with the text. + an internal link with the text. */ private static void emitType(PrintStream stream, Class clazz) { emitStruct(stream, PAINLESS_LOOKUP.getPainlessStructFromJavaClass(clazz)); @@ -253,7 +255,7 @@ private static void emitType(PrintStream stream, Class clazz) { private static void emitStruct(PrintStream stream, PainlessClass struct) { if (false == struct.clazz.isPrimitive() && false == struct.name.equals("def")) { stream.print("<<"); - emitAnchor(stream, struct); + emitAnchor(stream, struct.clazz); stream.print(','); stream.print(struct.name); stream.print(">>"); @@ -271,14 +273,14 @@ private static void emitJavadocLink(PrintStream stream, String root, PainlessMet stream.print("link:{"); stream.print(root); stream.print("-javadoc}/"); - stream.print(classUrlPath(method.augmentation != null ? method.augmentation : method.owner.clazz)); + stream.print(classUrlPath(method.augmentation != null ? method.augmentation : method.target)); stream.print(".html#"); stream.print(methodName(method)); stream.print("%2D"); boolean first = true; if (method.augmentation != null) { first = false; - stream.print(method.owner.clazz.getName()); + stream.print(method.target.getName()); } for (Class clazz: method.arguments) { if (first) { @@ -303,7 +305,7 @@ private static void emitJavadocLink(PrintStream stream, String root, PainlessFie stream.print("link:{"); stream.print(root); stream.print("-javadoc}/"); - stream.print(classUrlPath(field.owner.clazz)); + stream.print(classUrlPath(field.target)); stream.print(".html#"); stream.print(field.javaName); } @@ -315,21 +317,21 @@ private static String javadocRoot(PainlessMethod method) { if (method.augmentation != null) { return "painless"; } - return javadocRoot(method.owner); + return javadocRoot(method.target); } /** * Pick the javadoc root for a {@link PainlessField}. */ private static String javadocRoot(PainlessField field) { - return javadocRoot(field.owner); + return javadocRoot(field.target); } /** - * Pick the javadoc root for a {@link PainlessClass}. + * Pick the javadoc root for a {@link Class}. */ - private static String javadocRoot(PainlessClass struct) { - String classPackage = struct.clazz.getPackage().getName(); + private static String javadocRoot(Class clazz) { + String classPackage = clazz.getPackage().getName(); if (classPackage.startsWith("java")) { return "java8"; } From 346edfab0ac0faa2089b3e05f21b9e569d1a8634 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 17 Jul 2018 14:20:41 -0400 Subject: [PATCH 081/107] Build: Move shadow customizations into common code (#32014) Moves the customizations to the build to produce nice shadow jars and javadocs into common build code, mostly BuildPlugin with a little into the root build.gradle file. This means that any project that applies the shadow plugin will automatically be set up just like the high level rest client: * The non-shadow jar will not be built * The shadow jar will not have a "classifier" * Tests will run against the shadow jar * Javadoc will include all of the shadowed classes * Service files in `META-INF/services` will be merged --- benchmarks/build.gradle | 15 --- build.gradle | 57 +++++++++-- buildSrc/build.gradle | 1 + .../elasticsearch/gradle/BuildPlugin.groovy | 92 ++++++++++++++++- .../gradle/plugin/PluginBuildPlugin.groovy | 14 +++ client/benchmark/build.gradle | 12 --- client/rest-high-level/build.gradle | 99 ------------------- x-pack/plugin/sql/jdbc/build.gradle | 36 ------- 8 files changed, 153 insertions(+), 173 deletions(-) diff --git a/benchmarks/build.gradle b/benchmarks/build.gradle index e7ee5a059ab37..80d1982300dd1 100644 --- a/benchmarks/build.gradle +++ b/benchmarks/build.gradle @@ -17,17 +17,6 @@ * under the License. */ -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - apply plugin: 'elasticsearch.build' // order of this section matters, see: https://github.com/johnrengelman/shadow/issues/336 @@ -81,10 +70,6 @@ thirdPartyAudit.excludes = [ 'org.openjdk.jmh.util.Utils' ] -shadowJar { - classifier = 'benchmarks' -} - runShadow { executable = new File(project.runtimeJavaHome, 'bin/java') } diff --git a/build.gradle b/build.gradle index 187e247705277..ccbb6898dc413 100644 --- a/build.gradle +++ b/build.gradle @@ -17,7 +17,7 @@ * under the License. */ - +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import org.apache.tools.ant.taskdefs.condition.Os import org.apache.tools.ant.filters.ReplaceTokens import org.elasticsearch.gradle.BuildPlugin @@ -303,18 +303,55 @@ subprojects { if (project.plugins.hasPlugin(BuildPlugin)) { String artifactsHost = VersionProperties.elasticsearch.isSnapshot() ? "https://snapshots.elastic.co" : "https://artifacts.elastic.co" Closure sortClosure = { a, b -> b.group <=> a.group } - Closure depJavadocClosure = { dep -> - if (dep.group != null && dep.group.startsWith('org.elasticsearch')) { - Project upstreamProject = dependencyToProject(dep) - if (upstreamProject != null) { - project.javadoc.dependsOn "${upstreamProject.path}:javadoc" - String artifactPath = dep.group.replaceAll('\\.', '/') + '/' + dep.name.replaceAll('\\.', '/') + '/' + dep.version - project.javadoc.options.linksOffline artifactsHost + "/javadoc/" + artifactPath, "${upstreamProject.buildDir}/docs/javadoc/" + Closure depJavadocClosure = { shadowed, dep -> + if (dep.group == null || false == dep.group.startsWith('org.elasticsearch')) { + return + } + Project upstreamProject = dependencyToProject(dep) + if (upstreamProject == null) { + return + } + if (shadowed) { + /* + * Include the source of shadowed upstream projects so we don't + * have to publish their javadoc. + */ + project.evaluationDependsOn(upstreamProject.path) + project.javadoc.source += upstreamProject.javadoc.source + /* + * Do not add those projects to the javadoc classpath because + * we are going to resolve them with their source instead. + */ + project.javadoc.classpath = project.javadoc.classpath.filter { f -> + false == upstreamProject.configurations.archives.artifacts.files.files.contains(f) } + /* + * Instead we need the upstream project's javadoc classpath so + * we don't barf on the classes that it references. + */ + project.javadoc.classpath += upstreamProject.javadoc.classpath + } else { + // Link to non-shadowed dependant projects + project.javadoc.dependsOn "${upstreamProject.path}:javadoc" + String artifactPath = dep.group.replaceAll('\\.', '/') + '/' + dep.name.replaceAll('\\.', '/') + '/' + dep.version + project.javadoc.options.linksOffline artifactsHost + "/javadoc/" + artifactPath, "${upstreamProject.buildDir}/docs/javadoc/" } } - project.configurations.compile.dependencies.findAll().toSorted(sortClosure).each(depJavadocClosure) - project.configurations.compileOnly.dependencies.findAll().toSorted(sortClosure).each(depJavadocClosure) + boolean hasShadow = project.plugins.hasPlugin(ShadowPlugin) + project.configurations.compile.dependencies + .findAll() + .toSorted(sortClosure) + .each({ c -> depJavadocClosure(hasShadow, c) }) + project.configurations.compileOnly.dependencies + .findAll() + .toSorted(sortClosure) + .each({ c -> depJavadocClosure(hasShadow, c) }) + if (hasShadow) { + project.configurations.shadow.dependencies + .findAll() + .toSorted(sortClosure) + .each({ c -> depJavadocClosure(false, c) }) + } } } } diff --git a/buildSrc/build.gradle b/buildSrc/build.gradle index 3d100daf7d65f..eb95ff148f63c 100644 --- a/buildSrc/build.gradle +++ b/buildSrc/build.gradle @@ -104,6 +104,7 @@ dependencies { compile 'de.thetaphi:forbiddenapis:2.5' compile 'org.apache.rat:apache-rat:0.11' compile "org.elasticsearch:jna:4.5.1" + compile 'com.github.jengelman.gradle.plugins:shadow:2.0.4' testCompile "junit:junit:${props.getProperty('junit')}" } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index 89e10c50ff782..b5b5ec95becc9 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -19,6 +19,7 @@ package org.elasticsearch.gradle import com.carrotsearch.gradle.junit4.RandomizedTestingTask +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import org.apache.tools.ant.taskdefs.condition.Os import org.eclipse.jgit.lib.Constants import org.eclipse.jgit.lib.RepositoryBuilder @@ -36,12 +37,14 @@ import org.gradle.api.artifacts.ModuleDependency import org.gradle.api.artifacts.ModuleVersionIdentifier import org.gradle.api.artifacts.ProjectDependency import org.gradle.api.artifacts.ResolvedArtifact +import org.gradle.api.artifacts.SelfResolvingDependency import org.gradle.api.artifacts.dsl.RepositoryHandler import org.gradle.api.execution.TaskExecutionGraph import org.gradle.api.plugins.JavaPlugin import org.gradle.api.publish.maven.MavenPublication import org.gradle.api.publish.maven.plugins.MavenPublishPlugin import org.gradle.api.publish.maven.tasks.GenerateMavenPom +import org.gradle.api.tasks.SourceSet import org.gradle.api.tasks.bundling.Jar import org.gradle.api.tasks.compile.GroovyCompile import org.gradle.api.tasks.compile.JavaCompile @@ -498,7 +501,41 @@ class BuildPlugin implements Plugin { } } } + project.plugins.withType(ShadowPlugin).whenPluginAdded { + project.publishing { + publications { + nebula(MavenPublication) { + artifact project.tasks.shadowJar + artifactId = project.archivesBaseName + /* + * Configure the pom to include the "shadow" as compile dependencies + * because that is how we're using them but remove all other dependencies + * because they've been shaded into the jar. + */ + pom.withXml { XmlProvider xml -> + Node root = xml.asNode() + root.remove(root.dependencies) + Node dependenciesNode = root.appendNode('dependencies') + project.configurations.shadow.allDependencies.each { + if (false == it instanceof SelfResolvingDependency) { + Node dependencyNode = dependenciesNode.appendNode('dependency') + dependencyNode.appendNode('groupId', it.group) + dependencyNode.appendNode('artifactId', it.name) + dependencyNode.appendNode('version', it.version) + dependencyNode.appendNode('scope', 'compile') + } + } + // Be tidy and remove the element if it is empty + if (dependenciesNode.children.empty) { + root.remove(dependenciesNode) + } + } + } + } + } + } } + } /** Adds compiler settings to the project */ @@ -660,6 +697,28 @@ class BuildPlugin implements Plugin { } } } + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * When we use the shadow plugin we entirely replace the + * normal jar with the shadow jar so we no longer want to run + * the jar task. + */ + project.tasks.jar.enabled = false + project.tasks.shadowJar { + /* + * Replace the default "shadow" classifier with null + * which will leave the classifier off of the file name. + */ + classifier = null + /* + * Not all cases need service files merged but it is + * better to be safe + */ + mergeServiceFiles() + } + // Make sure we assemble the shadow jar + project.tasks.assemble.dependsOn project.tasks.shadowJar + } } /** Returns a closure of common configuration shared by unit and integration tests. */ @@ -744,6 +803,18 @@ class BuildPlugin implements Plugin { } exclude '**/*$*.class' + + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * If we make a shaded jar we test against it. + */ + classpath -= project.tasks.compileJava.outputs.files + classpath -= project.configurations.compile + classpath -= project.configurations.runtime + classpath += project.configurations.shadow + classpath += project.tasks.shadowJar.outputs.files + dependsOn project.tasks.shadowJar + } } } @@ -766,7 +837,26 @@ class BuildPlugin implements Plugin { additionalTest.dependsOn(project.tasks.testClasses) test.dependsOn(additionalTest) }); - return test + + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * We need somewhere to configure dependencies that we don't wish + * to shade into the jar. The shadow plugin creates a "shadow" + * configuration which is *almost* exactly that. It is never + * bundled into the shaded jar but is used for main source + * compilation. Unfortunately, by default it is not used for + * *test* source compilation and isn't used in tests at all. This + * change makes it available for test compilation. + * + * Note that this isn't going to work properly with qa projects + * but they have no business applying the shadow plugin in the + * firstplace. + */ + SourceSet testSourceSet = project.sourceSets.findByName('test') + if (testSourceSet != null) { + testSourceSet.compileClasspath += project.configurations.shadow + } + } } private static configurePrecommit(Project project) { diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy index eb4da8d1f314c..d76084bf22e07 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/plugin/PluginBuildPlugin.groovy @@ -18,11 +18,13 @@ */ package org.elasticsearch.gradle.plugin +import com.github.jengelman.gradle.plugins.shadow.ShadowPlugin import nebula.plugin.info.scm.ScmInfoPlugin import org.elasticsearch.gradle.BuildPlugin import org.elasticsearch.gradle.NoticeTask import org.elasticsearch.gradle.test.RestIntegTestTask import org.elasticsearch.gradle.test.RunTask +import org.gradle.api.InvalidUserDataException import org.gradle.api.JavaVersion import org.gradle.api.Project import org.gradle.api.Task @@ -46,6 +48,18 @@ public class PluginBuildPlugin extends BuildPlugin { @Override public void apply(Project project) { super.apply(project) + project.plugins.withType(ShadowPlugin).whenPluginAdded { + /* + * We've not tested these plugins together and we're fairly sure + * they aren't going to work properly as is *and* we're not really + * sure *why* you'd want to shade stuff in plugins. So we throw an + * exception here to make you come and read this comment. If you + * have a need for shadow while building plugins then know that you + * are probably going to have to fight with gradle for a while.... + */ + throw new InvalidUserDataException('elasticsearch.esplugin is not ' + + 'compatible with com.github.johnrengelman.shadow'); + } configureDependencies(project) // this afterEvaluate must happen before the afterEvaluate added by integTest creation, // so that the file name resolution for installing the plugin will be setup diff --git a/client/benchmark/build.gradle b/client/benchmark/build.gradle index 77867f5e273f2..0c3238d985346 100644 --- a/client/benchmark/build.gradle +++ b/client/benchmark/build.gradle @@ -17,18 +17,6 @@ * under the License. */ -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - - apply plugin: 'elasticsearch.build' // build an uberjar with all benchmarks apply plugin: 'com.github.johnrengelman.shadow' diff --git a/client/rest-high-level/build.gradle b/client/rest-high-level/build.gradle index 2fed806e98c57..a1260894bf7aa 100644 --- a/client/rest-high-level/build.gradle +++ b/client/rest-high-level/build.gradle @@ -21,17 +21,6 @@ import org.elasticsearch.gradle.precommit.PrecommitTasks import org.elasticsearch.gradle.test.RestIntegTestTask import org.gradle.api.internal.provider.Providers -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - apply plugin: 'elasticsearch.build' apply plugin: 'elasticsearch.rest-test' apply plugin: 'nebula.maven-base-publish' @@ -45,49 +34,6 @@ archivesBaseName = 'elasticsearch-rest-high-level-client' Task copyRestSpec = RestIntegTestTask.createCopyRestSpecTask(project, Providers.FALSE) test.dependsOn(copyRestSpec) -publishing { - publications { - nebula(MavenPublication) { - artifact shadowJar - artifactId = archivesBaseName - /* - * Configure the pom to include the "shadow" as compile dependencies - * because that is how we're using them but remove all other dependencies - * because they've been shaded into the jar. - */ - pom.withXml { XmlProvider xml -> - Node root = xml.asNode() - root.remove(root.dependencies) - Node dependenciesNode = root.appendNode('dependencies') - project.configurations.shadow.allDependencies.each { - if (false == it instanceof SelfResolvingDependency) { - Node dependencyNode = dependenciesNode.appendNode('dependency') - dependencyNode.appendNode('groupId', it.group) - dependencyNode.appendNode('artifactId', it.name) - dependencyNode.appendNode('version', it.version) - dependencyNode.appendNode('scope', 'compile') - } - } - } - } - } -} - -/* - * We need somewhere to configure dependencies that we don't wish to shade - * into the high level REST client. The shadow plugin creates a "shadow" - * configuration which is *almost* exactly that. It is never bundled into - * the shaded jar but is used for main source compilation. Unfortunately, - * by default it is not used for *test* source compilation and isn't used - * in tests at all. This change makes it available for test compilation. - * A change below makes it available for testing. - */ -sourceSets { - test { - compileClasspath += configurations.shadow - } -} - dependencies { /* * Everything in the "shadow" configuration is *not* copied into the @@ -124,48 +70,3 @@ forbiddenApisMain { signaturesURLs += [PrecommitTasks.getResource('/forbidden/http-signatures.txt')] signaturesURLs += [file('src/main/resources/forbidden/rest-high-level-signatures.txt').toURI().toURL()] } - -shadowJar { - classifier = null - mergeServiceFiles() -} - -// We don't need normal jar, we use shadow jar instead -jar.enabled = false -assemble.dependsOn shadowJar - -javadoc { - /* - * Bundle all of the javadoc from all of the shaded projects into this one - * so we don't *have* to publish javadoc for all of the "client" jars. - */ - configurations.compile.dependencies.all { Dependency dep -> - Project p = dependencyToProject(dep) - if (p != null) { - evaluationDependsOn(p.path) - source += p.sourceSets.main.allJava - } - } -} - -/* - * Use the jar for testing so we have tests of the bundled jar. - * Use the "shadow" configuration for testing because we need things - * in it. - */ -test { - classpath -= compileJava.outputs.files - classpath -= configurations.compile - classpath -= configurations.runtime - classpath += configurations.shadow - classpath += shadowJar.outputs.files - dependsOn shadowJar -} -integTestRunner { - classpath -= compileJava.outputs.files - classpath -= configurations.compile - classpath -= configurations.runtime - classpath += configurations.shadow - classpath += shadowJar.outputs.files - dependsOn shadowJar -} diff --git a/x-pack/plugin/sql/jdbc/build.gradle b/x-pack/plugin/sql/jdbc/build.gradle index 9d27c2030d676..a0d9b24c50729 100644 --- a/x-pack/plugin/sql/jdbc/build.gradle +++ b/x-pack/plugin/sql/jdbc/build.gradle @@ -1,15 +1,3 @@ - -buildscript { - repositories { - maven { - url 'https://plugins.gradle.org/m2/' - } - } - dependencies { - classpath 'com.github.jengelman.gradle.plugins:shadow:2.0.4' - } -} - apply plugin: 'elasticsearch.build' apply plugin: 'nebula.maven-base-publish' apply plugin: 'nebula.maven-scm' @@ -49,7 +37,6 @@ dependencyLicenses { } shadowJar { - classifier = null relocate 'com.fasterxml', 'org.elasticsearch.fasterxml' } @@ -70,26 +57,3 @@ artifacts { nodeps nodepsJar archives shadowJar } - -publishing { - publications { - nebula(MavenPublication) { - artifact shadowJar - pom.withXml { - // Nebula is mistakenly including all dependencies that are already shadowed into the shadow jar - asNode().remove(asNode().dependencies) - } - } - } -} - -assemble.dependsOn shadowJar - -// Use the jar for testing so the tests are more "real" -test { - classpath -= compileJava.outputs.files - classpath -= configurations.compile - classpath -= configurations.runtime - classpath += shadowJar.outputs.files - dependsOn shadowJar -} From cdf5c8aec2bdb80e5a50e5925e95a7bbd26c42c7 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Tue, 17 Jul 2018 15:49:20 -0400 Subject: [PATCH 082/107] Disable C2 from using AVX-512 on JDK 10 (#32138) The C2 compiler in JDK 10 appears to have an issue compiling to AVX-512 instructions (on hardware that supports such). As a workaround, this commit adds a JVM flag on JDK 10+ to disable the use of AVX-512 instructions until a fix is introduced to the JDK. Instead, we use a flag to enable AVX and AVX2 only. Note: Based on my reading of the C2 code, this flag does not appear to have any impact on hardware that does not support AVX2. I have tested this manually on an Intel Atom C2538 processor that supports neither AVX nor AVX2. I have also tested this manually on an Intel i5-3317U processor that supports AVX but not AVX2. --- distribution/src/config/jvm.options | 3 +++ 1 file changed, 3 insertions(+) diff --git a/distribution/src/config/jvm.options b/distribution/src/config/jvm.options index c5c0f44caeb7a..e486735eb8fb4 100644 --- a/distribution/src/config/jvm.options +++ b/distribution/src/config/jvm.options @@ -100,3 +100,6 @@ ${error.file} # due to internationalization enhancements in JDK 9 Elasticsearch need to set the provider to COMPAT otherwise # time/date parsing will break in an incompatible way for some date patterns and locals 9-:-Djava.locale.providers=COMPAT + +# temporary workaround for C2 bug with JDK 10 on hardware with AVX-512 +10-:-XX:UseAVX=2 From a835503323eec51cf21e3f19a447b9793cac4948 Mon Sep 17 00:00:00 2001 From: Ryan Ernst Date: Tue, 17 Jul 2018 13:14:46 -0700 Subject: [PATCH 083/107] Build: Make additional test deps of check (#32015) This commit moves additional unit test runners from being dependencies of the test task to dependencies of check. Without this change, reproduce lines are incorrect due to the additional test runner not matching any of the reproduce class/method info. closes #31964 --- .../elasticsearch/gradle/BuildPlugin.groovy | 2 +- server/build.gradle | 21 ++++++++++--------- 2 files changed, 12 insertions(+), 11 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy index b5b5ec95becc9..219d00ba64032 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/BuildPlugin.groovy @@ -835,7 +835,7 @@ class BuildPlugin implements Plugin { additionalTest.configure(commonTestConfig(project)) additionalTest.configure(config) additionalTest.dependsOn(project.tasks.testClasses) - test.dependsOn(additionalTest) + project.check.dependsOn(additionalTest) }); project.plugins.withType(ShadowPlugin).whenPluginAdded { diff --git a/server/build.gradle b/server/build.gradle index da60bca5a3e81..7db073f43a5ca 100644 --- a/server/build.gradle +++ b/server/build.gradle @@ -156,6 +156,16 @@ if (isEclipse) { compileJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" compileTestJava.options.compilerArgs << "-Xlint:-cast,-deprecation,-rawtypes,-try,-unchecked" +// TODO: remove ScriptDocValuesMissingV6BehaviourTests in 7.0 +additionalTest('testScriptDocValuesMissingV6Behaviour'){ + include '**/ScriptDocValuesMissingV6BehaviourTests.class' + systemProperty 'es.scripting.exception_for_missing_value', 'false' +} +test { + // these are tested explicitly in separate test tasks + exclude '**/*ScriptDocValuesMissingV6BehaviourTests.class' +} + forbiddenPatterns { exclude '**/*.json' exclude '**/*.jmx' @@ -329,7 +339,7 @@ if (isEclipse == false || project.path == ":server-tests") { task integTest(type: RandomizedTestingTask, group: JavaBasePlugin.VERIFICATION_GROUP, description: 'Multi-node tests', - dependsOn: test.dependsOn.collect()) { + dependsOn: test.dependsOn) { configure(BuildPlugin.commonTestConfig(project)) classpath = project.test.classpath testClassesDirs = project.test.testClassesDirs @@ -339,12 +349,3 @@ if (isEclipse == false || project.path == ":server-tests") { integTest.mustRunAfter test } -// TODO: remove ScriptDocValuesMissingV6BehaviourTests in 7.0 -additionalTest('testScriptDocValuesMissingV6Behaviour'){ - include '**/ScriptDocValuesMissingV6BehaviourTests.class' - systemProperty 'es.scripting.exception_for_missing_value', 'false' -} -test { - // these are tested explicitly in separate test tasks - exclude '**/*ScriptDocValuesMissingV6BehaviourTests.class' -} From ff7ff36911da45790fab45acea12db57105b2812 Mon Sep 17 00:00:00 2001 From: Jack Conradson Date: Tue, 17 Jul 2018 13:54:49 -0700 Subject: [PATCH 084/107] Painless: Add PainlessClassBuilder (#32141) Several pieces of data in PainlessClass cannot be passed in at the time the PainlessClass is created so it must be "frozen" after all the data is collected. This means PainlessClass is currently serving two functions as both a builder and a set of data. This separates the two pieces into clearly distinct values. This change also removes the PainlessMethodKey in favor of a simple String. The goal is to have the painless method key be completely internal to the PainlessLookup eventually and this simplifies the way there. Note that this was added since PainlessClass and PainlessClassBuilder were already being changed instead of a follow up PR. --- .../java/org/elasticsearch/painless/Def.java | 3 +- .../elasticsearch/painless/FunctionRef.java | 8 +- .../org/elasticsearch/painless/Locals.java | 9 +-- .../painless/lookup/PainlessClass.java | 73 +++++------------ .../painless/lookup/PainlessClassBuilder.java | 70 +++++++++++++++++ .../lookup/PainlessLookupBuilder.java | 78 ++++++++++--------- .../painless/lookup/PainlessMethodKey.java | 75 ------------------ .../painless/node/ECallLocal.java | 4 +- .../painless/node/EFunctionRef.java | 4 +- .../painless/node/EListInit.java | 9 ++- .../elasticsearch/painless/node/EMapInit.java | 9 ++- .../elasticsearch/painless/node/ENewObj.java | 4 +- .../painless/node/PCallInvoke.java | 3 +- .../elasticsearch/painless/node/PField.java | 13 ++-- .../painless/node/PSubListShortcut.java | 6 +- .../painless/node/PSubMapShortcut.java | 6 +- .../elasticsearch/painless/node/SSource.java | 6 +- .../painless/node/SSubEachIterable.java | 5 +- .../painless/node/NodeToStringTests.java | 10 +-- 19 files changed, 178 insertions(+), 217 deletions(-) create mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java delete mode 100644 modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java index fe11ff4814b90..f3388fc4bb268 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Def.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.lang.invoke.CallSite; import java.lang.invoke.MethodHandle; @@ -185,7 +184,7 @@ static MethodHandle arrayLengthGetter(Class arrayType) { * @throws IllegalArgumentException if no matching whitelisted method was found. */ static PainlessMethod lookupMethodInternal(PainlessLookup painlessLookup, Class receiverClass, String name, int arity) { - PainlessMethodKey key = new PainlessMethodKey(name, arity); + String key = PainlessLookupUtility.buildPainlessMethodKey(name, arity); // check whitelist for matching method for (Class clazz = receiverClass; clazz != null; clazz = clazz.getSuperclass()) { PainlessClass struct = painlessLookup.getPainlessStructFromJavaClass(clazz); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java index 925359fabc505..d64e833912f59 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/FunctionRef.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.lang.invoke.MethodType; @@ -177,10 +176,11 @@ private static PainlessMethod lookup(PainlessLookup painlessLookup, Class exp final PainlessMethod impl; // ctor ref if ("new".equals(call)) { - impl = struct.constructors.get(new PainlessMethodKey("", method.arguments.size())); + impl = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", method.arguments.size())); } else { // look for a static impl first - PainlessMethod staticImpl = struct.staticMethods.get(new PainlessMethodKey(call, method.arguments.size())); + PainlessMethod staticImpl = + struct.staticMethods.get(PainlessLookupUtility.buildPainlessMethodKey(call, method.arguments.size())); if (staticImpl == null) { // otherwise a virtual impl final int arity; @@ -191,7 +191,7 @@ private static PainlessMethod lookup(PainlessLookup painlessLookup, Class exp // receiver passed arity = method.arguments.size() - 1; } - impl = struct.methods.get(new PainlessMethodKey(call, arity)); + impl = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey(call, arity)); } else { impl = staticImpl; } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java index e797740fed185..6c1010a34505a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/Locals.java @@ -23,7 +23,6 @@ import org.elasticsearch.painless.lookup.PainlessLookup; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Arrays; import java.util.Collection; @@ -144,7 +143,7 @@ public Variable getVariable(Location location, String name) { } /** Looks up a method. Returns null if the method does not exist. */ - public PainlessMethod getMethod(PainlessMethodKey key) { + public PainlessMethod getMethod(String key) { PainlessMethod method = lookupMethod(key); if (method != null) { return method; @@ -200,7 +199,7 @@ public PainlessLookup getPainlessLookup() { // variable name -> variable private Map variables; // method name+arity -> methods - private Map methods; + private Map methods; /** * Create a new Locals @@ -238,7 +237,7 @@ private Variable lookupVariable(Location location, String name) { } /** Looks up a method at this scope only. Returns null if the method does not exist. */ - private PainlessMethod lookupMethod(PainlessMethodKey key) { + private PainlessMethod lookupMethod(String key) { if (methods == null) { return null; } @@ -261,7 +260,7 @@ private void addMethod(PainlessMethod method) { if (methods == null) { methods = new HashMap<>(); } - methods.put(new PainlessMethodKey(method.name, method.arguments.size()), method); + methods.put(PainlessLookupUtility.buildPainlessMethodKey(method.name, method.arguments.size()), method); // TODO: check result } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java index 7d84899b00e58..57b18bc60da44 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClass.java @@ -19,19 +19,20 @@ package org.elasticsearch.painless.lookup; +import org.objectweb.asm.Type; + import java.lang.invoke.MethodHandle; import java.util.Collections; -import java.util.HashMap; import java.util.Map; public final class PainlessClass { public final String name; public final Class clazz; - public final org.objectweb.asm.Type type; + public final Type type; - public final Map constructors; - public final Map staticMethods; - public final Map methods; + public final Map constructors; + public final Map staticMethods; + public final Map methods; public final Map staticMembers; public final Map members; @@ -41,63 +42,25 @@ public final class PainlessClass { public final PainlessMethod functionalMethod; - PainlessClass(String name, Class clazz, org.objectweb.asm.Type type) { + PainlessClass(String name, Class clazz, Type type, + Map constructors, Map staticMethods, Map methods, + Map staticMembers, Map members, + Map getters, Map setters, + PainlessMethod functionalMethod) { this.name = name; this.clazz = clazz; this.type = type; - constructors = new HashMap<>(); - staticMethods = new HashMap<>(); - methods = new HashMap<>(); - - staticMembers = new HashMap<>(); - members = new HashMap<>(); - - getters = new HashMap<>(); - setters = new HashMap<>(); - - functionalMethod = null; - } - - private PainlessClass(PainlessClass struct, PainlessMethod functionalMethod) { - name = struct.name; - clazz = struct.clazz; - type = struct.type; + this.constructors = Collections.unmodifiableMap(constructors); + this.staticMethods = Collections.unmodifiableMap(staticMethods); + this.methods = Collections.unmodifiableMap(methods); - constructors = Collections.unmodifiableMap(struct.constructors); - staticMethods = Collections.unmodifiableMap(struct.staticMethods); - methods = Collections.unmodifiableMap(struct.methods); + this.staticMembers = Collections.unmodifiableMap(staticMembers); + this.members = Collections.unmodifiableMap(members); - staticMembers = Collections.unmodifiableMap(struct.staticMembers); - members = Collections.unmodifiableMap(struct.members); - - getters = Collections.unmodifiableMap(struct.getters); - setters = Collections.unmodifiableMap(struct.setters); + this.getters = Collections.unmodifiableMap(getters); + this.setters = Collections.unmodifiableMap(setters); this.functionalMethod = functionalMethod; } - - public PainlessClass freeze(PainlessMethod functionalMethod) { - return new PainlessClass(this, functionalMethod); - } - - @Override - public boolean equals(Object object) { - if (this == object) { - return true; - } - - if (object == null || getClass() != object.getClass()) { - return false; - } - - PainlessClass struct = (PainlessClass)object; - - return name.equals(struct.name); - } - - @Override - public int hashCode() { - return name.hashCode(); - } } diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java new file mode 100644 index 0000000000000..0eda3660f0b82 --- /dev/null +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessClassBuilder.java @@ -0,0 +1,70 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.elasticsearch.painless.lookup; + +import org.objectweb.asm.Type; + +import java.lang.invoke.MethodHandle; +import java.util.HashMap; +import java.util.Map; + +final class PainlessClassBuilder { + final String name; + final Class clazz; + final Type type; + + final Map constructors; + final Map staticMethods; + final Map methods; + + final Map staticMembers; + final Map members; + + final Map getters; + final Map setters; + + PainlessMethod functionalMethod; + + PainlessClassBuilder(String name, Class clazz, Type type) { + this.name = name; + this.clazz = clazz; + this.type = type; + + constructors = new HashMap<>(); + staticMethods = new HashMap<>(); + methods = new HashMap<>(); + + staticMembers = new HashMap<>(); + members = new HashMap<>(); + + getters = new HashMap<>(); + setters = new HashMap<>(); + + functionalMethod = null; + } + + PainlessClass build() { + return new PainlessClass(name, clazz, type, + constructors, staticMethods, methods, + staticMembers, members, + getters, setters, + functionalMethod); + } +} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java index 5641eee1b5d9b..2150c0b210a59 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessLookupBuilder.java @@ -37,6 +37,8 @@ import java.util.Stack; import java.util.regex.Pattern; +import static org.elasticsearch.painless.lookup.PainlessLookupUtility.buildPainlessMethodKey; + public class PainlessLookupBuilder { private static final Pattern TYPE_NAME_PATTERN = Pattern.compile("^[_a-zA-Z][._a-zA-Z0-9]*$"); @@ -60,16 +62,16 @@ private static String buildFieldCacheKey(String structName, String fieldName, St } private final Map> painlessTypesToJavaClasses; - private final Map, PainlessClass> javaClassesToPainlessStructs; + private final Map, PainlessClassBuilder> javaClassesToPainlessClassBuilders; public PainlessLookupBuilder(List whitelists) { painlessTypesToJavaClasses = new HashMap<>(); - javaClassesToPainlessStructs = new HashMap<>(); + javaClassesToPainlessClassBuilders = new HashMap<>(); String origin = null; painlessTypesToJavaClasses.put("def", def.class); - javaClassesToPainlessStructs.put(def.class, new PainlessClass("def", Object.class, Type.getType(Object.class))); + javaClassesToPainlessClassBuilders.put(def.class, new PainlessClassBuilder("def", Object.class, Type.getType(Object.class))); try { // first iteration collects all the Painless type names that @@ -77,7 +79,8 @@ public PainlessLookupBuilder(List whitelists) { for (Whitelist whitelist : whitelists) { for (WhitelistClass whitelistStruct : whitelist.whitelistStructs) { String painlessTypeName = whitelistStruct.javaClassName.replace('$', '.'); - PainlessClass painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); + PainlessClassBuilder painlessStruct = + javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(painlessTypeName)); if (painlessStruct != null && painlessStruct.clazz.getName().equals(whitelistStruct.javaClassName) == false) { throw new IllegalArgumentException("struct [" + painlessStruct.name + "] cannot represent multiple classes " + @@ -87,8 +90,8 @@ public PainlessLookupBuilder(List whitelists) { origin = whitelistStruct.origin; addStruct(whitelist.javaClassLoader, whitelistStruct); - painlessStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(painlessTypeName)); - javaClassesToPainlessStructs.put(painlessStruct.clazz, painlessStruct); + painlessStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(painlessTypeName)); + javaClassesToPainlessClassBuilders.put(painlessStruct.clazz, painlessStruct); } } @@ -121,8 +124,8 @@ public PainlessLookupBuilder(List whitelists) { // goes through each Painless struct and determines the inheritance list, // and then adds all inherited types to the Painless struct's whitelist - for (Class javaClass : javaClassesToPainlessStructs.keySet()) { - PainlessClass painlessStruct = javaClassesToPainlessStructs.get(javaClass); + for (Class javaClass : javaClassesToPainlessClassBuilders.keySet()) { + PainlessClassBuilder painlessStruct = javaClassesToPainlessClassBuilders.get(javaClass); List painlessSuperStructs = new ArrayList<>(); Class javaSuperClass = painlessStruct.clazz.getSuperclass(); @@ -133,7 +136,7 @@ public PainlessLookupBuilder(List whitelists) { // adds super classes to the inheritance list if (javaSuperClass != null && javaSuperClass.isInterface() == false) { while (javaSuperClass != null) { - PainlessClass painlessSuperStruct = javaClassesToPainlessStructs.get(javaSuperClass); + PainlessClassBuilder painlessSuperStruct = javaClassesToPainlessClassBuilders.get(javaSuperClass); if (painlessSuperStruct != null) { painlessSuperStructs.add(painlessSuperStruct.name); @@ -149,7 +152,7 @@ public PainlessLookupBuilder(List whitelists) { Class javaInterfaceLookup = javaInteraceLookups.pop(); for (Class javaSuperInterface : javaInterfaceLookup.getInterfaces()) { - PainlessClass painlessInterfaceStruct = javaClassesToPainlessStructs.get(javaSuperInterface); + PainlessClassBuilder painlessInterfaceStruct = javaClassesToPainlessClassBuilders.get(javaSuperInterface); if (painlessInterfaceStruct != null) { String painlessInterfaceStructName = painlessInterfaceStruct.name; @@ -170,7 +173,7 @@ public PainlessLookupBuilder(List whitelists) { // copies methods and fields from Object into interface types if (painlessStruct.clazz.isInterface() || (def.class.getSimpleName()).equals(painlessStruct.name)) { - PainlessClass painlessObjectStruct = javaClassesToPainlessStructs.get(Object.class); + PainlessClassBuilder painlessObjectStruct = javaClassesToPainlessClassBuilders.get(Object.class); if (painlessObjectStruct != null) { copyStruct(painlessStruct.name, Collections.singletonList(painlessObjectStruct.name)); @@ -179,14 +182,9 @@ public PainlessLookupBuilder(List whitelists) { } // precompute runtime classes - for (PainlessClass painlessStruct : javaClassesToPainlessStructs.values()) { + for (PainlessClassBuilder painlessStruct : javaClassesToPainlessClassBuilders.values()) { addRuntimeClass(painlessStruct); } - - // copy all structs to make them unmodifiable for outside users: - for (Map.Entry,PainlessClass> entry : javaClassesToPainlessStructs.entrySet()) { - entry.setValue(entry.getValue().freeze(computeFunctionalInterfaceMethod(entry.getValue()))); - } } private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelistStruct) { @@ -223,12 +221,12 @@ private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelis } } - PainlessClass existingStruct = javaClassesToPainlessStructs.get(javaClass); + PainlessClassBuilder existingStruct = javaClassesToPainlessClassBuilders.get(javaClass); if (existingStruct == null) { - PainlessClass struct = new PainlessClass(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass)); + PainlessClassBuilder struct = new PainlessClassBuilder(painlessTypeName, javaClass, org.objectweb.asm.Type.getType(javaClass)); painlessTypesToJavaClasses.put(painlessTypeName, javaClass); - javaClassesToPainlessStructs.put(javaClass, struct); + javaClassesToPainlessClassBuilders.put(javaClass, struct); } else if (existingStruct.clazz.equals(javaClass) == false) { throw new IllegalArgumentException("struct [" + painlessTypeName + "] is used to " + "illegally represent multiple java classes [" + whitelistStruct.javaClassName + "] and " + @@ -261,7 +259,7 @@ private void addStruct(ClassLoader whitelistClassLoader, WhitelistClass whitelis } private void addConstructor(String ownerStructName, WhitelistConstructor whitelistConstructor) { - PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for constructor with " + @@ -295,7 +293,7 @@ private void addConstructor(String ownerStructName, WhitelistConstructor whiteli " with constructor parameters " + whitelistConstructor.painlessParameterTypeNames, exception); } - PainlessMethodKey painlessMethodKey = new PainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); + String painlessMethodKey = buildPainlessMethodKey("", whitelistConstructor.painlessParameterTypeNames.size()); PainlessMethod painlessConstructor = ownerStruct.constructors.get(painlessMethodKey); if (painlessConstructor == null) { @@ -321,7 +319,7 @@ private void addConstructor(String ownerStructName, WhitelistConstructor whiteli } private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, WhitelistMethod whitelistMethod) { - PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -400,8 +398,8 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, "and parameters " + whitelistMethod.painlessParameterTypeNames); } - PainlessMethodKey painlessMethodKey = - new PainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); + String painlessMethodKey = + buildPainlessMethodKey(whitelistMethod.javaMethodName, whitelistMethod.painlessParameterTypeNames.size()); if (javaAugmentedClass == null && Modifier.isStatic(javaMethod.getModifiers())) { PainlessMethod painlessMethod = ownerStruct.staticMethods.get(painlessMethodKey); @@ -459,7 +457,7 @@ private void addMethod(ClassLoader whitelistClassLoader, String ownerStructName, } private void addField(String ownerStructName, WhitelistField whitelistField) { - PainlessClass ownerStruct = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(ownerStructName)); + PainlessClassBuilder ownerStruct = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(ownerStructName)); if (ownerStruct == null) { throw new IllegalArgumentException("owner struct [" + ownerStructName + "] not defined for method with " + @@ -540,14 +538,14 @@ private void addField(String ownerStructName, WhitelistField whitelistField) { } private void copyStruct(String struct, List children) { - final PainlessClass owner = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(struct)); + final PainlessClassBuilder owner = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(struct)); if (owner == null) { throw new IllegalArgumentException("Owner struct [" + struct + "] not defined for copy."); } for (int count = 0; count < children.size(); ++count) { - final PainlessClass child = javaClassesToPainlessStructs.get(painlessTypesToJavaClasses.get(children.get(count))); + final PainlessClassBuilder child = javaClassesToPainlessClassBuilders.get(painlessTypesToJavaClasses.get(children.get(count))); if (child == null) { throw new IllegalArgumentException("Child struct [" + children.get(count) + "]" + @@ -559,8 +557,8 @@ private void copyStruct(String struct, List children) { " is not a super type of owner struct [" + owner.name + "] in copy."); } - for (Map.Entry kvPair : child.methods.entrySet()) { - PainlessMethodKey methodKey = kvPair.getKey(); + for (Map.Entry kvPair : child.methods.entrySet()) { + String methodKey = kvPair.getKey(); PainlessMethod method = kvPair.getValue(); if (owner.methods.get(methodKey) == null) { // TODO: some of these are no longer valid or outright don't work @@ -625,10 +623,10 @@ private void copyStruct(String struct, List children) { /** * Precomputes a more efficient structure for dynamic method/field access. */ - private void addRuntimeClass(final PainlessClass struct) { + private void addRuntimeClass(final PainlessClassBuilder struct) { // add all getters/setters - for (Map.Entry method : struct.methods.entrySet()) { - String name = method.getKey().name; + for (Map.Entry method : struct.methods.entrySet()) { + String name = method.getValue().name; PainlessMethod m = method.getValue(); if (m.arguments.size() == 0 && @@ -668,7 +666,7 @@ private void addRuntimeClass(final PainlessClass struct) { } /** computes the functional interface method for a class, or returns null */ - private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) { + private PainlessMethod computeFunctionalInterfaceMethod(PainlessClassBuilder clazz) { if (!clazz.clazz.isInterface()) { return null; } @@ -703,7 +701,7 @@ private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) { } // inspect the one method found from the reflection API, it should match the whitelist! java.lang.reflect.Method oneMethod = methods.get(0); - PainlessMethod painless = clazz.methods.get(new PainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount())); + PainlessMethod painless = clazz.methods.get(buildPainlessMethodKey(oneMethod.getName(), oneMethod.getParameterCount())); if (painless == null || painless.method.equals(org.objectweb.asm.commons.Method.getMethod(oneMethod)) == false) { throw new IllegalArgumentException("Class: " + clazz.name + " is functional but the functional " + "method is not whitelisted!"); @@ -712,7 +710,15 @@ private PainlessMethod computeFunctionalInterfaceMethod(PainlessClass clazz) { } public PainlessLookup build() { - return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessStructs); + Map, PainlessClass> javaClassesToPainlessClasses = new HashMap<>(); + + // copy all structs to make them unmodifiable for outside users: + for (Map.Entry,PainlessClassBuilder> entry : javaClassesToPainlessClassBuilders.entrySet()) { + entry.getValue().functionalMethod = computeFunctionalInterfaceMethod(entry.getValue()); + javaClassesToPainlessClasses.put(entry.getKey(), entry.getValue().build()); + } + + return new PainlessLookup(painlessTypesToJavaClasses, javaClassesToPainlessClasses); } public Class getJavaClassFromPainlessType(String painlessType) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java deleted file mode 100644 index 49413ab0c5fef..0000000000000 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/lookup/PainlessMethodKey.java +++ /dev/null @@ -1,75 +0,0 @@ -/* - * Licensed to Elasticsearch under one or more contributor - * license agreements. See the NOTICE file distributed with - * this work for additional information regarding copyright - * ownership. Elasticsearch licenses this file to you under - * the Apache License, Version 2.0 (the "License"); you may - * not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, - * software distributed under the License is distributed on an - * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY - * KIND, either express or implied. See the License for the - * specific language governing permissions and limitations - * under the License. - */ - -package org.elasticsearch.painless.lookup; - -import java.util.Objects; - -/** - * Key for looking up a method. - *

- * Methods are keyed on both name and arity, and can be overloaded once per arity. - * This allows signatures such as {@code String.indexOf(String) vs String.indexOf(String, int)}. - *

- * It is less flexible than full signature overloading where types can differ too, but - * better than just the name, and overloading types adds complexity to users, too. - */ -public final class PainlessMethodKey { - public final String name; - public final int arity; - - /** - * Create a new lookup key - * @param name name of the method - * @param arity number of parameters - */ - public PainlessMethodKey(String name, int arity) { - this.name = Objects.requireNonNull(name); - this.arity = arity; - } - - @Override - public int hashCode() { - final int prime = 31; - int result = 1; - result = prime * result + arity; - result = prime * result + name.hashCode(); - return result; - } - - @Override - public boolean equals(Object obj) { - if (this == obj) return true; - if (obj == null) return false; - if (getClass() != obj.getClass()) return false; - PainlessMethodKey other = (PainlessMethodKey) obj; - if (arity != other.arity) return false; - if (!name.equals(other.name)) return false; - return true; - } - - @Override - public String toString() { - StringBuilder sb = new StringBuilder(); - sb.append(name); - sb.append('/'); - sb.append(arity); - return sb.toString(); - } -} diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java index dfed0ca47b482..098c75386e1a6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ECallLocal.java @@ -23,8 +23,8 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.List; import java.util.Objects; @@ -58,7 +58,7 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); + String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); method = locals.getMethod(methodKey); if (method == null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java index d4eddb059a847..92b14a885a141 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EFunctionRef.java @@ -27,7 +27,6 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.util.Objects; @@ -71,7 +70,8 @@ void analyze(Locals locals) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], not a functional interface"); } - PainlessMethod delegateMethod = locals.getMethod(new PainlessMethodKey(call, interfaceMethod.arguments.size())); + PainlessMethod delegateMethod = + locals.getMethod(PainlessLookupUtility.buildPainlessMethodKey(call, interfaceMethod.arguments.size())); if (delegateMethod == null) { throw new IllegalArgumentException("Cannot convert function reference [" + type + "::" + call + "] " + "to [" + PainlessLookupUtility.anyTypeToPainlessTypeName(expected) + "], function not found"); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java index 7e923e5f90f1e..e0af653d2098a 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EListInit.java @@ -23,8 +23,8 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; @@ -62,14 +62,15 @@ void analyze(Locals locals) { actual = ArrayList.class; - constructor = - locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get(new PainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors + .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods.get(new PainlessMethodKey("add", 1)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods + .get(PainlessLookupUtility.buildPainlessMethodKey("add", 1)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java index b350a758944d5..d81f08dc3cc54 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/EMapInit.java @@ -23,8 +23,8 @@ import org.elasticsearch.painless.Locals; import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Type; @@ -68,14 +68,15 @@ void analyze(Locals locals) { actual = HashMap.class; - constructor = - locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors.get(new PainlessMethodKey("", 0)); + constructor = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).constructors + .get(PainlessLookupUtility.buildPainlessMethodKey("", 0)); if (constructor == null) { throw createError(new IllegalStateException("Illegal tree structure.")); } - method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods.get(new PainlessMethodKey("put", 2)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual).methods + .get(PainlessLookupUtility.buildPainlessMethodKey("put", 2)); if (method == null) { throw createError(new IllegalStateException("Illegal tree structure.")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java index cf6f040c9753a..c0d4433f7fb5f 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/ENewObj.java @@ -24,8 +24,8 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.objectweb.asm.Type; import java.util.List; @@ -65,7 +65,7 @@ void analyze(Locals locals) { } PainlessClass struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(actual); - constructor = struct.constructors.get(new PainlessMethodKey("", arguments.size())); + constructor = struct.constructors.get(PainlessLookupUtility.buildPainlessMethodKey("", arguments.size())); if (constructor != null) { Class[] types = new Class[constructor.arguments.size()]; diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java index 445c053347ec3..cd5d648379193 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PCallInvoke.java @@ -26,7 +26,6 @@ import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import java.util.List; @@ -77,7 +76,7 @@ void analyze(Locals locals) { struct = locals.getPainlessLookup().getPainlessStructFromJavaClass(PainlessLookupUtility.getBoxedAnyType(prefix.actual)); } - PainlessMethodKey methodKey = new PainlessMethodKey(name, arguments.size()); + String methodKey = PainlessLookupUtility.buildPainlessMethodKey(name, arguments.size()); PainlessMethod method = prefix instanceof EStatic ? struct.staticMethods.get(methodKey) : struct.methods.get(methodKey); if (method != null) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java index 3f2f887956491..b5df74358d3e6 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PField.java @@ -27,7 +27,6 @@ import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import java.util.List; @@ -74,16 +73,16 @@ void analyze(Locals locals) { if (field != null) { sub = new PSubField(location, field); } else { - PainlessMethod getter = struct.methods.get( - new PainlessMethodKey("get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + PainlessMethod getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( + "get" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); if (getter == null) { - getter = struct.methods.get( - new PainlessMethodKey("is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( + "is" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 0)); } - PainlessMethod setter = struct.methods.get( - new PainlessMethodKey("set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); + PainlessMethod setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey( + "set" + Character.toUpperCase(value.charAt(0)) + value.substring(1), 1)); if (getter != null || setter != null) { sub = new PSubShortcut(location, value, PainlessLookupUtility.anyTypeToPainlessTypeName(prefix.actual), getter, setter); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java index 0a3ab142ddc7c..3841b1fece115 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubListShortcut.java @@ -25,8 +25,8 @@ import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; import java.util.Set; @@ -56,8 +56,8 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - getter = struct.methods.get(new PainlessMethodKey("get", 1)); - setter = struct.methods.get(new PainlessMethodKey("set", 2)); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); + setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("set", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1 || getter.arguments.get(0) != int.class)) { diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java index f71e2ac5d1fa0..13a3b9c9b9429 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/PSubMapShortcut.java @@ -24,8 +24,8 @@ import org.elasticsearch.painless.Location; import org.elasticsearch.painless.MethodWriter; import org.elasticsearch.painless.lookup.PainlessClass; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import java.util.Objects; import java.util.Set; @@ -55,8 +55,8 @@ void extractVariables(Set variables) { @Override void analyze(Locals locals) { - getter = struct.methods.get(new PainlessMethodKey("get", 1)); - setter = struct.methods.get(new PainlessMethodKey("put", 2)); + getter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("get", 1)); + setter = struct.methods.get(PainlessLookupUtility.buildPainlessMethodKey("put", 2)); if (getter != null && (getter.rtn == void.class || getter.arguments.size() != 1)) { throw createError(new IllegalArgumentException("Illegal map get shortcut for type [" + struct.name + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java index cd473e2c84ec7..c354e78a961a3 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSource.java @@ -30,8 +30,8 @@ import org.elasticsearch.painless.SimpleChecksAdapter; import org.elasticsearch.painless.WriterConstants; import org.elasticsearch.painless.lookup.PainlessLookup; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.node.SFunction.FunctionReserved; import org.objectweb.asm.ClassVisitor; import org.objectweb.asm.ClassWriter; @@ -165,12 +165,12 @@ void extractVariables(Set variables) { } public void analyze(PainlessLookup painlessLookup) { - Map methods = new HashMap<>(); + Map methods = new HashMap<>(); for (SFunction function : functions) { function.generateSignature(painlessLookup); - PainlessMethodKey key = new PainlessMethodKey(function.name, function.parameters.size()); + String key = PainlessLookupUtility.buildPainlessMethodKey(function.name, function.parameters.size()); if (methods.put(key, function.method) != null) { throw createError(new IllegalArgumentException("Duplicate functions with name [" + function.name + "].")); diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java index cec1297a4c41c..798b30e2b6d51 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/node/SSubEachIterable.java @@ -29,7 +29,6 @@ import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.def; import org.objectweb.asm.Label; import org.objectweb.asm.Opcodes; @@ -77,8 +76,8 @@ void analyze(Locals locals) { if (expression.actual == def.class) { method = null; } else { - method = locals.getPainlessLookup(). - getPainlessStructFromJavaClass(expression.actual).methods.get(new PainlessMethodKey("iterator", 0)); + method = locals.getPainlessLookup().getPainlessStructFromJavaClass(expression.actual).methods + .get(PainlessLookupUtility.buildPainlessMethodKey("iterator", 0)); if (method == null) { throw createError(new IllegalArgumentException("Unable to create iterator for the type " + diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java index 86d365e0fcc7a..cd3e4123e1267 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/node/NodeToStringTests.java @@ -24,8 +24,8 @@ import org.elasticsearch.painless.lookup.PainlessCast; import org.elasticsearch.painless.lookup.PainlessField; import org.elasticsearch.painless.lookup.PainlessLookupBuilder; +import org.elasticsearch.painless.lookup.PainlessLookupUtility; import org.elasticsearch.painless.lookup.PainlessMethod; -import org.elasticsearch.painless.lookup.PainlessMethodKey; import org.elasticsearch.painless.lookup.PainlessClass; import org.elasticsearch.painless.FeatureTest; import org.elasticsearch.painless.GenericElasticsearchScript; @@ -405,14 +405,14 @@ public void testPSubBrace() { public void testPSubCallInvoke() { Location l = new Location(getTestName(), 0); PainlessClass c = painlessLookup.getPainlessStructFromJavaClass(Integer.class); - PainlessMethod m = c.methods.get(new PainlessMethodKey("toString", 0)); + PainlessMethod m = c.methods.get(PainlessLookupUtility.buildPainlessMethodKey("toString", 0)); PSubCallInvoke node = new PSubCallInvoke(l, m, null, emptyList()); node.prefix = new EVariable(l, "a"); assertEquals("(PSubCallInvoke (EVariable a) toString)", node.toString()); assertEquals("(PSubNullSafeCallInvoke (PSubCallInvoke (EVariable a) toString))", new PSubNullSafeCallInvoke(l, node).toString()); l = new Location(getTestName(), 1); - m = c.methods.get(new PainlessMethodKey("equals", 1)); + m = c.methods.get(PainlessLookupUtility.buildPainlessMethodKey("equals", 1)); node = new PSubCallInvoke(l, m, null, singletonList(new EVariable(l, "b"))); node.prefix = new EVariable(l, "a"); assertEquals("(PSubCallInvoke (EVariable a) equals (Args (EVariable b)))", node.toString()); @@ -502,8 +502,8 @@ public void testPSubMapShortcut() { public void testPSubShortcut() { Location l = new Location(getTestName(), 0); PainlessClass s = painlessLookup.getPainlessStructFromJavaClass(FeatureTest.class); - PainlessMethod getter = s.methods.get(new PainlessMethodKey("getX", 0)); - PainlessMethod setter = s.methods.get(new PainlessMethodKey("setX", 1)); + PainlessMethod getter = s.methods.get(PainlessLookupUtility.buildPainlessMethodKey("getX", 0)); + PainlessMethod setter = s.methods.get(PainlessLookupUtility.buildPainlessMethodKey("setX", 1)); PSubShortcut node = new PSubShortcut(l, "x", FeatureTest.class.getName(), getter, setter); node.prefix = new EVariable(l, "a"); assertEquals("(PSubShortcut (EVariable a) x)", node.toString()); From 101458b9fa982e430cca584695c422be9557e7ac Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 17 Jul 2018 18:41:31 -0400 Subject: [PATCH 085/107] Build: Skip jar tests if jar disabled The shadow plugin disables the jar task but we still attempted to extract the jar to see if it had the right license and notice file. This skips the extraction and those tests if the jar is built for any reason which fixes projects that use the shadow plugin. --- build.gradle | 3 +++ 1 file changed, 3 insertions(+) diff --git a/build.gradle b/build.gradle index ccbb6898dc413..90a9d88ac8b5c 100644 --- a/build.gradle +++ b/build.gradle @@ -574,6 +574,7 @@ subprojects { project -> commandLine "${->new File(rootProject.compilerJavaHome, 'bin/jar')}", 'xf', "${-> jarTask.outputs.files.singleFile}", 'META-INF/LICENSE.txt', 'META-INF/NOTICE.txt' workingDir destination + onlyIf {jarTask.enabled} doFirst { project.delete(destination) Files.createDirectories(destination) @@ -582,6 +583,7 @@ subprojects { project -> final Task checkNotice = project.task("verify${jarTask.name.capitalize()}Notice") { dependsOn extract + onlyIf {jarTask.enabled} doLast { final List noticeLines = Files.readAllLines(project.noticeFile.toPath()) final Path noticePath = extract.destination.resolve('META-INF/NOTICE.txt') @@ -592,6 +594,7 @@ subprojects { project -> final Task checkLicense = project.task("verify${jarTask.name.capitalize()}License") { dependsOn extract + onlyIf {jarTask.enabled} doLast { final List licenseLines = Files.readAllLines(project.licenseFile.toPath()) final Path licensePath = extract.destination.resolve('META-INF/LICENSE.txt') From 88c4f6cdbd42768b3d68d59bd18856ff0d9899f9 Mon Sep 17 00:00:00 2001 From: Nik Everett Date: Tue, 17 Jul 2018 20:25:27 -0400 Subject: [PATCH 086/107] Switch distribution to new style Requests (#30595) In #29623 we added `Request` object flavored requests to the low level REST client and in #30315 we deprecated the old `performRequest`s. This changes all calls in the `distribution/archives/integ-test-zip` project to use the new versions. --- .../test/rest/CreatedLocationHeaderIT.java | 24 +++++----- .../test/rest/NodeRestUsageIT.java | 44 +++++++++---------- .../test/rest/RequestsWithoutContentIT.java | 37 ++++++++-------- .../rest/WaitForRefreshAndCloseTests.java | 42 +++++++++--------- 4 files changed, 74 insertions(+), 73 deletions(-) diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java index 9c9b6af705a8e..71a41db80a253 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/CreatedLocationHeaderIT.java @@ -19,14 +19,11 @@ package org.elasticsearch.test.rest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Request; import org.elasticsearch.client.Response; import java.io.IOException; -import static java.util.Collections.emptyMap; import static java.util.Collections.singletonMap; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.startsWith; @@ -49,26 +46,31 @@ public void testIndexWithoutId() throws IOException { } public void testUpsert() throws IOException { - locationTestCase(client().performRequest("POST", "test/test/1/_update", emptyMap(), new StringEntity("{" - + "\"doc\": {\"test\": \"test\"}," - + "\"doc_as_upsert\": true}", ContentType.APPLICATION_JSON))); + Request request = new Request("POST", "test/test/1/_update"); + request.setJsonEntity("{" + + "\"doc\": {\"test\": \"test\"}," + + "\"doc_as_upsert\": true}"); + locationTestCase(client().performRequest(request)); } private void locationTestCase(String method, String url) throws IOException { - locationTestCase(client().performRequest(method, url, emptyMap(), - new StringEntity("{\"test\": \"test\"}", ContentType.APPLICATION_JSON))); + final Request request = new Request(method, url); + request.setJsonEntity("{\"test\": \"test\"}"); + locationTestCase(client().performRequest(request)); // we have to delete the index otherwise the second indexing request will route to the single shard and not produce a 201 final Response response = client().performRequest(new Request("DELETE", "test")); assertThat(response.getStatusLine().getStatusCode(), equalTo(200)); - locationTestCase(client().performRequest(method, url + "?routing=cat", emptyMap(), - new StringEntity("{\"test\": \"test\"}", ContentType.APPLICATION_JSON))); + final Request withRouting = new Request(method, url); + withRouting.addParameter("routing", "cat"); + withRouting.setJsonEntity("{\"test\": \"test\"}"); + locationTestCase(client().performRequest(withRouting)); } private void locationTestCase(Response response) throws IOException { assertEquals(201, response.getStatusLine().getStatusCode()); String location = response.getHeader("Location"); assertThat(location, startsWith("/test/test/")); - Response getResponse = client().performRequest("GET", location); + Response getResponse = client().performRequest(new Request("GET", location)); assertEquals(singletonMap("test", "test"), entityAsMap(getResponse).get("_source")); } diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java index b94aa71b04029..818037f68a111 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/NodeRestUsageIT.java @@ -19,13 +19,11 @@ package org.elasticsearch.test.rest; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.Request; import java.io.IOException; -import java.util.Collections; import java.util.HashMap; import java.util.Map; @@ -39,8 +37,8 @@ public class NodeRestUsageIT extends ESRestTestCase { @SuppressWarnings("unchecked") public void testWithRestUsage() throws IOException { // First get the current usage figures - Response beforeResponse = client().performRequest("GET", - randomFrom("_nodes/usage", "_nodes/usage/rest_actions", "_nodes/usage/_all")); + String path = randomFrom("_nodes/usage", "_nodes/usage/rest_actions", "_nodes/usage/_all"); + Response beforeResponse = client().performRequest(new Request("GET", path)); Map beforeResponseBodyMap = entityAsMap(beforeResponse); assertThat(beforeResponseBodyMap, notNullValue()); Map before_nodesMap = (Map) beforeResponseBodyMap.get("_nodes"); @@ -80,24 +78,24 @@ public void testWithRestUsage() throws IOException { } // Do some requests to get some rest usage stats - client().performRequest("PUT", "/test"); - client().performRequest("POST", "/test/doc/1", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/test/doc/2", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/test/doc/3", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("GET", "/test/_search"); - client().performRequest("POST", "/test/doc/4", Collections.emptyMap(), - new StringEntity("{ \"foo\": \"bar\"}", ContentType.APPLICATION_JSON)); - client().performRequest("POST", "/test/_refresh"); - client().performRequest("GET", "/_cat/indices"); - client().performRequest("GET", "/_nodes"); - client().performRequest("GET", "/test/_search"); - client().performRequest("GET", "/_nodes/stats"); - client().performRequest("DELETE", "/test"); + client().performRequest(new Request("PUT", "/test")); + for (int i = 0; i < 3; i++) { + final Request index = new Request("POST", "/test/doc/1"); + index.setJsonEntity("{\"foo\": \"bar\"}"); + client().performRequest(index); + } + client().performRequest(new Request("GET", "/test/_search")); + final Request index4 = new Request("POST", "/test/doc/4"); + index4.setJsonEntity("{\"foo\": \"bar\"}"); + client().performRequest(index4); + client().performRequest(new Request("POST", "/test/_refresh")); + client().performRequest(new Request("GET", "/_cat/indices")); + client().performRequest(new Request("GET", "/_nodes")); + client().performRequest(new Request("GET", "/test/_search")); + client().performRequest(new Request("GET", "/_nodes/stats")); + client().performRequest(new Request("DELETE", "/test")); - Response response = client().performRequest("GET", "_nodes/usage"); + Response response = client().performRequest(new Request("GET", "_nodes/usage")); Map responseBodyMap = entityAsMap(response); assertThat(responseBodyMap, notNullValue()); Map _nodesMap = (Map) responseBodyMap.get("_nodes"); @@ -139,7 +137,7 @@ public void testWithRestUsage() throws IOException { public void testMetricsWithAll() throws IOException { ResponseException exception = expectThrows(ResponseException.class, - () -> client().performRequest("GET", "_nodes/usage/_all,rest_actions")); + () -> client().performRequest(new Request("GET", "_nodes/usage/_all,rest_actions"))); assertNotNull(exception); assertThat(exception.getMessage(), containsString("\"type\":\"illegal_argument_exception\"," + "\"reason\":\"request [_nodes/usage/_all,rest_actions] contains _all and individual metrics [_all,rest_actions]\"")); diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java index ce72af26628a1..a6fc7b9cce18e 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/RequestsWithoutContentIT.java @@ -20,6 +20,7 @@ package org.elasticsearch.test.rest; import org.elasticsearch.client.ResponseException; +import org.elasticsearch.client.Request; import java.io.IOException; @@ -28,56 +29,56 @@ public class RequestsWithoutContentIT extends ESRestTestCase { public void testIndexMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/idx/type/123")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/idx/type/123"))); assertResponseException(responseException, "request body is required"); } public void testBulkMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/_bulk")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/_bulk"))); assertResponseException(responseException, "request body is required"); } public void testPutSettingsMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - "PUT", "/_settings")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request("PUT", "/_settings"))); assertResponseException(responseException, "request body is required"); } public void testPutMappingsMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/test_index/test_type/_mapping")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/test_index/test_type/_mapping"))); assertResponseException(responseException, "request body is required"); } public void testPutIndexTemplateMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "PUT" : "POST", "/_template/my_template")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "PUT" : "POST", "/_template/my_template"))); assertResponseException(responseException, "request body is required"); } public void testMultiSearchMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "GET", "/_msearch")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_msearch"))); assertResponseException(responseException, "request body or source parameter is required"); } public void testPutPipelineMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - "PUT", "/_ingest/pipeline/my_pipeline")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request("PUT", "/_ingest/pipeline/my_pipeline"))); assertResponseException(responseException, "request body or source parameter is required"); } public void testSimulatePipelineMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "GET", "/_ingest/pipeline/my_pipeline/_simulate")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "GET", "/_ingest/pipeline/my_pipeline/_simulate"))); assertResponseException(responseException, "request body or source parameter is required"); } public void testPutScriptMissingBody() throws IOException { - ResponseException responseException = expectThrows(ResponseException.class, () -> client().performRequest( - randomBoolean() ? "POST" : "PUT", "/_scripts/lang")); + ResponseException responseException = expectThrows(ResponseException.class, () -> + client().performRequest(new Request(randomBoolean() ? "POST" : "PUT", "/_scripts/lang"))); assertResponseException(responseException, "request body is required"); } diff --git a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java index 0b1ad2a6dd9ab..fab809a51bcc2 100644 --- a/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java +++ b/distribution/archives/integ-test-zip/src/test/java/org/elasticsearch/test/rest/WaitForRefreshAndCloseTests.java @@ -19,26 +19,21 @@ package org.elasticsearch.test.rest; -import org.apache.http.HttpEntity; -import org.apache.http.entity.ContentType; -import org.apache.http.entity.StringEntity; import org.apache.http.util.EntityUtils; import org.elasticsearch.action.ActionFuture; import org.elasticsearch.action.support.PlainActionFuture; import org.elasticsearch.client.Response; import org.elasticsearch.client.ResponseException; import org.elasticsearch.client.ResponseListener; +import org.elasticsearch.client.Request; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.HashMap; import java.util.Locale; import java.util.Map; -import static java.util.Collections.emptyMap; - /** * Tests that wait for refresh is fired if the index is closed. */ @@ -46,13 +41,14 @@ public class WaitForRefreshAndCloseTests extends ESRestTestCase { @Before public void setupIndex() throws IOException { try { - client().performRequest("DELETE", indexName()); + client().performRequest(new Request("DELETE", indexName())); } catch (ResponseException e) { // If we get an error, it should be because the index doesn't exist assertEquals(404, e.getResponse().getStatusLine().getStatusCode()); } - client().performRequest("PUT", indexName(), emptyMap(), - new StringEntity("{\"settings\":{\"refresh_interval\":-1}}", ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", indexName()); + request.setJsonEntity("{\"settings\":{\"refresh_interval\":-1}}"); + client().performRequest(request); } @After @@ -69,17 +65,20 @@ private String docPath() { } public void testIndexAndThenClose() throws Exception { - closeWhileListenerEngaged(start("PUT", "", new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON))); + closeWhileListenerEngaged(start("PUT", "", "{\"test\":\"test\"}")); } public void testUpdateAndThenClose() throws Exception { - client().performRequest("PUT", docPath(), emptyMap(), new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON)); - closeWhileListenerEngaged(start("POST", "/_update", - new StringEntity("{\"doc\":{\"name\":\"test\"}}", ContentType.APPLICATION_JSON))); + Request request = new Request("PUT", docPath()); + request.setJsonEntity("{\"test\":\"test\"}"); + client().performRequest(request); + closeWhileListenerEngaged(start("POST", "/_update", "{\"doc\":{\"name\":\"test\"}}")); } public void testDeleteAndThenClose() throws Exception { - client().performRequest("PUT", docPath(), emptyMap(), new StringEntity("{\"test\":\"test\"}", ContentType.APPLICATION_JSON)); + Request request = new Request("PUT", docPath()); + request.setJsonEntity("{\"test\":\"test\"}"); + client().performRequest(request); closeWhileListenerEngaged(start("DELETE", "", null)); } @@ -88,7 +87,7 @@ private void closeWhileListenerEngaged(ActionFuture future) throws Excep assertBusy(() -> { Map stats; try { - stats = entityAsMap(client().performRequest("GET", indexName() + "/_stats/refresh")); + stats = entityAsMap(client().performRequest(new Request("GET", indexName() + "/_stats/refresh"))); } catch (IOException e) { throw new RuntimeException(e); } @@ -105,18 +104,19 @@ private void closeWhileListenerEngaged(ActionFuture future) throws Excep }); // Close the index. That should flush the listener. - client().performRequest("POST", indexName() + "/_close"); + client().performRequest(new Request("POST", indexName() + "/_close")); // The request shouldn't fail. It certainly shouldn't hang. future.get(); } - private ActionFuture start(String method, String path, HttpEntity body) { + private ActionFuture start(String method, String path, String body) { PlainActionFuture future = new PlainActionFuture<>(); - Map params = new HashMap<>(); - params.put("refresh", "wait_for"); - params.put("error_trace", ""); - client().performRequestAsync(method, docPath() + path, params, body, new ResponseListener() { + Request request = new Request(method, docPath() + path); + request.addParameter("refresh", "wait_for"); + request.addParameter("error_trace", ""); + request.setJsonEntity(body); + client().performRequestAsync(request, new ResponseListener() { @Override public void onSuccess(Response response) { try { From 8486f24ed97bc786949c562e511071c063598099 Mon Sep 17 00:00:00 2001 From: Nhat Nguyen Date: Tue, 17 Jul 2018 21:59:48 -0400 Subject: [PATCH 087/107] Remove versionType from translog (#31945) With the introduction of sequence number, we no longer use versionType to resolve out of order collision in replication and recovery requests. This PR removes removes the versionType from translog. We can only remove it in 7.0 because it is still required in a mixed cluster between 6.x and 5.x. --- .../action/bulk/TransportShardBulkAction.java | 5 +- .../org/elasticsearch/index/VersionType.java | 15 ----- .../elasticsearch/index/engine/Engine.java | 2 + .../index/engine/InternalEngine.java | 25 +------- .../elasticsearch/index/shard/IndexShard.java | 23 ++++--- .../index/translog/Translog.java | 61 ++++++++----------- .../index/translog/TranslogWriter.java | 8 ++- .../resync/ResyncReplicationRequestTests.java | 4 +- .../index/engine/InternalEngineTests.java | 30 ++++----- .../RecoveryDuringReplicationTests.java | 1 - .../index/shard/IndexShardTests.java | 27 ++++---- .../index/translog/TranslogTests.java | 25 ++++---- .../indices/recovery/RecoveryTests.java | 10 +-- .../index/engine/EngineTestCase.java | 9 +-- .../index/engine/TranslogHandler.java | 6 +- .../index/shard/IndexShardTestCase.java | 4 +- 16 files changed, 99 insertions(+), 156 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java index a78421a2328cb..15a98077eac4a 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/TransportShardBulkAction.java @@ -523,13 +523,12 @@ private static Engine.Result performOpOnReplica(DocWriteResponse primaryResponse indexRequest.type(), indexRequest.id(), indexRequest.source(), indexRequest.getContentType()) .routing(indexRequest.routing()); result = replica.applyIndexOperationOnReplica(primaryResponse.getSeqNo(), primaryResponse.getVersion(), - indexRequest.versionType().versionTypeForReplicationAndRecovery(), indexRequest.getAutoGeneratedTimestamp(), - indexRequest.isRetry(), sourceToParse); + indexRequest.getAutoGeneratedTimestamp(), indexRequest.isRetry(), sourceToParse); break; case DELETE: DeleteRequest deleteRequest = (DeleteRequest) docWriteRequest; result = replica.applyDeleteOperationOnReplica(primaryResponse.getSeqNo(), primaryResponse.getVersion(), - deleteRequest.type(), deleteRequest.id(), deleteRequest.versionType().versionTypeForReplicationAndRecovery()); + deleteRequest.type(), deleteRequest.id()); break; default: throw new IllegalStateException("Unexpected request operation type on replica: " diff --git a/server/src/main/java/org/elasticsearch/index/VersionType.java b/server/src/main/java/org/elasticsearch/index/VersionType.java index b350252dc9c41..eb05ff809076c 100644 --- a/server/src/main/java/org/elasticsearch/index/VersionType.java +++ b/server/src/main/java/org/elasticsearch/index/VersionType.java @@ -85,13 +85,6 @@ public boolean validateVersionForReads(long version) { // not allowing Versions.NOT_FOUND as it is not a valid input value. return version > 0L || version == Versions.MATCH_ANY; } - - @Override - public VersionType versionTypeForReplicationAndRecovery() { - // replicas get the version from the primary after increment. The same version is stored in - // the transaction log. -> the should use the external semantics. - return EXTERNAL; - } }, EXTERNAL((byte) 1) { @Override @@ -333,14 +326,6 @@ public byte getValue() { */ public abstract boolean validateVersionForReads(long version); - /** - * Some version types require different semantics for primary and replicas. This version allows - * the type to override the default behavior. - */ - public VersionType versionTypeForReplicationAndRecovery() { - return this; - } - public static VersionType fromString(String versionType) { if ("internal".equals(versionType)) { return INTERNAL; diff --git a/server/src/main/java/org/elasticsearch/index/engine/Engine.java b/server/src/main/java/org/elasticsearch/index/engine/Engine.java index 8a560e02fe449..53a7baa60f6ca 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/Engine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/Engine.java @@ -1168,6 +1168,7 @@ public static class Index extends Operation { public Index(Term uid, ParsedDocument doc, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime, long autoGeneratedIdTimestamp, boolean isRetry) { super(uid, seqNo, primaryTerm, version, versionType, origin, startTime); + assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin; this.doc = doc; this.isRetry = isRetry; this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp; @@ -1245,6 +1246,7 @@ public static class Delete extends Operation { public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, VersionType versionType, Origin origin, long startTime) { super(uid, seqNo, primaryTerm, version, versionType, origin, startTime); + assert (origin == Origin.PRIMARY) == (versionType != null) : "invalid version_type=" + versionType + " for origin=" + origin; this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); } diff --git a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java index 92c64d415ad0b..bdcfb2fc7313f 100644 --- a/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java +++ b/server/src/main/java/org/elasticsearch/index/engine/InternalEngine.java @@ -691,7 +691,7 @@ private boolean canOptimizeAddDocument(Index index) { return true; case PEER_RECOVERY: case REPLICA: - assert index.version() == 1 && index.versionType() == VersionType.EXTERNAL + assert index.version() == 1 && index.versionType() == null : "version: " + index.version() + " type: " + index.versionType(); return true; case LOCAL_TRANSLOG_RECOVERY: @@ -704,20 +704,6 @@ private boolean canOptimizeAddDocument(Index index) { return false; } - private boolean assertVersionType(final Engine.Operation operation) { - if (operation.origin() == Operation.Origin.REPLICA || - operation.origin() == Operation.Origin.PEER_RECOVERY || - operation.origin() == Operation.Origin.LOCAL_TRANSLOG_RECOVERY) { - // ensure that replica operation has expected version type for replication - // ensure that versionTypeForReplicationAndRecovery is idempotent - assert operation.versionType() == operation.versionType().versionTypeForReplicationAndRecovery() - : "unexpected version type in request from [" + operation.origin().name() + "] " + - "found [" + operation.versionType().name() + "] " + - "expected [" + operation.versionType().versionTypeForReplicationAndRecovery().name() + "]"; - } - return true; - } - private boolean assertIncomingSequenceNumber(final Engine.Operation.Origin origin, final long seqNo) { if (origin == Operation.Origin.PRIMARY) { assert assertOriginPrimarySequenceNumber(seqNo); @@ -757,7 +743,6 @@ public IndexResult index(Index index) throws IOException { try (ReleasableLock releasableLock = readLock.acquire()) { ensureOpen(); assert assertIncomingSequenceNumber(index.origin(), index.seqNo()); - assert assertVersionType(index); try (Releasable ignored = versionMap.acquireLock(index.uid().bytes()); Releasable indexThrottle = doThrottle ? () -> {} : throttle.acquireThrottle()) { lastWriteNanos = index.startTime(); @@ -860,9 +845,6 @@ private IndexingStrategy planIndexingAsNonPrimary(Index index) throws IOExceptio "max_seqno non-append-only [" + maxSeqNoOfNonAppendOnlyOperations.get() + "], seqno of index [" + index.seqNo() + "]"; } versionMap.enforceSafeAccess(); - // drop out of order operations - assert index.versionType().versionTypeForReplicationAndRecovery() == index.versionType() : - "resolving out of order delivery based on versioning but version type isn't fit for it. got [" + index.versionType() + "]"; // unlike the primary, replicas don't really care to about creation status of documents // this allows to ignore the case where a document was found in the live version maps in // a delete state and return false for the created flag in favor of code simplicity @@ -1096,7 +1078,6 @@ private void updateDocs(final Term uid, final List docs, public DeleteResult delete(Delete delete) throws IOException { versionMap.enforceSafeAccess(); assert Objects.equals(delete.uid().field(), IdFieldMapper.NAME) : delete.uid().field(); - assert assertVersionType(delete); assert assertIncomingSequenceNumber(delete.origin(), delete.seqNo()); final DeleteResult deleteResult; // NOTE: we don't throttle this when merges fall behind because delete-by-id does not create new segments: @@ -1149,10 +1130,6 @@ public DeleteResult delete(Delete delete) throws IOException { private DeletionStrategy planDeletionAsNonPrimary(Delete delete) throws IOException { assert delete.origin() != Operation.Origin.PRIMARY : "planing as primary but got " + delete.origin(); - // drop out of order operations - assert delete.versionType().versionTypeForReplicationAndRecovery() == delete.versionType() : - "resolving out of order delivery based on versioning but version type isn't fit for it. got [" - + delete.versionType() + "]"; maxSeqNoOfNonAppendOnlyOperations.updateAndGet(curr -> Math.max(delete.seqNo(), curr)); assert maxSeqNoOfNonAppendOnlyOperations.get() >= delete.seqNo() : "max_seqno of non-append-only was not updated;" + "max_seqno non-append-only [" + maxSeqNoOfNonAppendOnlyOperations.get() + "], seqno of delete [" + delete.seqNo() + "]"; diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index 5bd8f9abc6e04..b07e22875e81f 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -645,22 +645,22 @@ private IndexShardState changeState(IndexShardState newState, String reason) { public Engine.IndexResult applyIndexOperationOnPrimary(long version, VersionType versionType, SourceToParse sourceToParse, long autoGeneratedTimestamp, boolean isRetry) throws IOException { + assert versionType.validateVersionForWrites(version); return applyIndexOperation(SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm, version, versionType, autoGeneratedTimestamp, isRetry, Engine.Operation.Origin.PRIMARY, sourceToParse); } - public Engine.IndexResult applyIndexOperationOnReplica(long seqNo, long version, VersionType versionType, - long autoGeneratedTimeStamp, boolean isRetry, SourceToParse sourceToParse) + public Engine.IndexResult applyIndexOperationOnReplica(long seqNo, long version, long autoGeneratedTimeStamp, + boolean isRetry, SourceToParse sourceToParse) throws IOException { - return applyIndexOperation(seqNo, primaryTerm, version, versionType, autoGeneratedTimeStamp, isRetry, + return applyIndexOperation(seqNo, primaryTerm, version, null, autoGeneratedTimeStamp, isRetry, Engine.Operation.Origin.REPLICA, sourceToParse); } - private Engine.IndexResult applyIndexOperation(long seqNo, long opPrimaryTerm, long version, VersionType versionType, + private Engine.IndexResult applyIndexOperation(long seqNo, long opPrimaryTerm, long version, @Nullable VersionType versionType, long autoGeneratedTimeStamp, boolean isRetry, Engine.Operation.Origin origin, SourceToParse sourceToParse) throws IOException { assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; - assert versionType.validateVersionForWrites(version); ensureWriteAllowed(origin); Engine.Index operation; try { @@ -736,19 +736,18 @@ private Engine.NoOpResult noOp(Engine engine, Engine.NoOp noOp) { public Engine.DeleteResult applyDeleteOperationOnPrimary(long version, String type, String id, VersionType versionType) throws IOException { + assert versionType.validateVersionForWrites(version); return applyDeleteOperation(SequenceNumbers.UNASSIGNED_SEQ_NO, primaryTerm, version, type, id, versionType, Engine.Operation.Origin.PRIMARY); } - public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long version, String type, String id, - VersionType versionType) throws IOException { - return applyDeleteOperation(seqNo, primaryTerm, version, type, id, versionType, Engine.Operation.Origin.REPLICA); + public Engine.DeleteResult applyDeleteOperationOnReplica(long seqNo, long version, String type, String id) throws IOException { + return applyDeleteOperation(seqNo, primaryTerm, version, type, id, null, Engine.Operation.Origin.REPLICA); } private Engine.DeleteResult applyDeleteOperation(long seqNo, long opPrimaryTerm, long version, String type, String id, - VersionType versionType, Engine.Operation.Origin origin) throws IOException { + @Nullable VersionType versionType, Engine.Operation.Origin origin) throws IOException { assert opPrimaryTerm <= this.primaryTerm : "op term [ " + opPrimaryTerm + " ] > shard term [" + this.primaryTerm + "]"; - assert versionType.validateVersionForWrites(version); ensureWriteAllowed(origin); // When there is a single type, the unique identifier is only composed of the _id, // so there is no way to differenciate foo#1 from bar#1. This is especially an issue @@ -1211,14 +1210,14 @@ public Engine.Result applyTranslogOperation(Translog.Operation operation, Engine // we set canHaveDuplicates to true all the time such that we de-optimze the translog case and ensure that all // autoGeneratedID docs that are coming from the primary are updated correctly. result = applyIndexOperation(index.seqNo(), index.primaryTerm(), index.version(), - index.versionType().versionTypeForReplicationAndRecovery(), index.getAutoGeneratedIdTimestamp(), true, origin, + null, index.getAutoGeneratedIdTimestamp(), true, origin, source(shardId.getIndexName(), index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source())).routing(index.routing())); break; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; result = applyDeleteOperation(delete.seqNo(), delete.primaryTerm(), delete.version(), delete.type(), delete.id(), - delete.versionType().versionTypeForReplicationAndRecovery(), origin); + null, origin); break; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) operation; diff --git a/server/src/main/java/org/elasticsearch/index/translog/Translog.java b/server/src/main/java/org/elasticsearch/index/translog/Translog.java index 63055d933e43e..31404b7874a92 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/Translog.java +++ b/server/src/main/java/org/elasticsearch/index/translog/Translog.java @@ -1011,7 +1011,8 @@ public static class Index implements Operation { public static final int FORMAT_6_0 = 8; // since 6.0.0 public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 - public static final int SERIALIZATION_FORMAT = FORMAT_NO_PARENT; + public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; private final String id; private final long autoGeneratedIdTimestamp; @@ -1019,7 +1020,6 @@ public static class Index implements Operation { private final long seqNo; private final long primaryTerm; private final long version; - private final VersionType versionType; private final BytesReference source; private final String routing; @@ -1034,8 +1034,9 @@ private Index(final StreamInput in) throws IOException { in.readOptionalString(); // _parent } this.version = in.readLong(); - this.versionType = VersionType.fromValue(in.readByte()); - assert versionType.validateVersionForWrites(this.version) : "invalid version for writes: " + this.version; + if (format < FORMAT_NO_VERSION_TYPE) { + in.readByte(); // _version_type + } this.autoGeneratedIdTimestamp = in.readLong(); seqNo = in.readLong(); primaryTerm = in.readLong(); @@ -1049,15 +1050,14 @@ public Index(Engine.Index index, Engine.IndexResult indexResult) { this.seqNo = indexResult.getSeqNo(); this.primaryTerm = index.primaryTerm(); this.version = indexResult.getVersion(); - this.versionType = index.versionType(); this.autoGeneratedIdTimestamp = index.getAutoGeneratedIdTimestamp(); } public Index(String type, String id, long seqNo, long primaryTerm, byte[] source) { - this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, VersionType.INTERNAL, source, null, -1); + this(type, id, seqNo, primaryTerm, Versions.MATCH_ANY, source, null, -1); } - public Index(String type, String id, long seqNo, long primaryTerm, long version, VersionType versionType, + public Index(String type, String id, long seqNo, long primaryTerm, long version, byte[] source, String routing, long autoGeneratedIdTimestamp) { this.type = type; this.id = id; @@ -1065,7 +1065,6 @@ public Index(String type, String id, long seqNo, long primaryTerm, long version, this.seqNo = seqNo; this.primaryTerm = primaryTerm; this.version = version; - this.versionType = versionType; this.routing = routing; this.autoGeneratedIdTimestamp = autoGeneratedIdTimestamp; } @@ -1110,24 +1109,22 @@ public long version() { return this.version; } - public VersionType versionType() { - return versionType; - } - @Override public Source getSource() { return new Source(source, routing); } private void write(final StreamOutput out) throws IOException { - out.writeVInt(SERIALIZATION_FORMAT); + final int format = out.getVersion().onOrAfter(Version.V_7_0_0_alpha1) ? SERIALIZATION_FORMAT : FORMAT_6_0; + out.writeVInt(format); out.writeString(id); out.writeString(type); out.writeBytesReference(source); out.writeOptionalString(routing); out.writeLong(version); - - out.writeByte(versionType.getValue()); + if (format < FORMAT_NO_VERSION_TYPE) { + out.writeByte(VersionType.EXTERNAL.getValue()); + } out.writeLong(autoGeneratedIdTimestamp); out.writeLong(seqNo); out.writeLong(primaryTerm); @@ -1149,7 +1146,6 @@ public boolean equals(Object o) { primaryTerm != index.primaryTerm || id.equals(index.id) == false || type.equals(index.type) == false || - versionType != index.versionType || autoGeneratedIdTimestamp != index.autoGeneratedIdTimestamp || source.equals(index.source) == false) { return false; @@ -1168,7 +1164,6 @@ public int hashCode() { result = 31 * result + Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); - result = 31 * result + versionType.hashCode(); result = 31 * result + source.hashCode(); result = 31 * result + (routing != null ? routing.hashCode() : 0); result = 31 * result + Long.hashCode(autoGeneratedIdTimestamp); @@ -1194,14 +1189,15 @@ public long getAutoGeneratedIdTimestamp() { public static class Delete implements Operation { private static final int FORMAT_6_0 = 4; // 6.0 - * - public static final int SERIALIZATION_FORMAT = FORMAT_6_0; + public static final int FORMAT_NO_PARENT = FORMAT_6_0 + 1; // since 7.0 + public static final int FORMAT_NO_VERSION_TYPE = FORMAT_NO_PARENT + 1; + public static final int SERIALIZATION_FORMAT = FORMAT_NO_VERSION_TYPE; private final String type, id; private final Term uid; private final long seqNo; private final long primaryTerm; private final long version; - private final VersionType versionType; private Delete(final StreamInput in) throws IOException { final int format = in.readVInt();// SERIALIZATION_FORMAT @@ -1210,29 +1206,29 @@ private Delete(final StreamInput in) throws IOException { id = in.readString(); uid = new Term(in.readString(), in.readBytesRef()); this.version = in.readLong(); - this.versionType = VersionType.fromValue(in.readByte()); - assert versionType.validateVersionForWrites(this.version); + if (format < FORMAT_NO_VERSION_TYPE) { + in.readByte(); // versionType + } seqNo = in.readLong(); primaryTerm = in.readLong(); } public Delete(Engine.Delete delete, Engine.DeleteResult deleteResult) { - this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion(), delete.versionType()); + this(delete.type(), delete.id(), delete.uid(), deleteResult.getSeqNo(), delete.primaryTerm(), deleteResult.getVersion()); } /** utility for testing */ public Delete(String type, String id, long seqNo, long primaryTerm, Term uid) { - this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY, VersionType.INTERNAL); + this(type, id, uid, seqNo, primaryTerm, Versions.MATCH_ANY); } - public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version, VersionType versionType) { + public Delete(String type, String id, Term uid, long seqNo, long primaryTerm, long version) { this.type = Objects.requireNonNull(type); this.id = Objects.requireNonNull(id); this.uid = uid; this.seqNo = seqNo; this.primaryTerm = primaryTerm; this.version = version; - this.versionType = versionType; } @Override @@ -1271,23 +1267,22 @@ public long version() { return this.version; } - public VersionType versionType() { - return this.versionType; - } - @Override public Source getSource() { throw new IllegalStateException("trying to read doc source from delete operation"); } private void write(final StreamOutput out) throws IOException { - out.writeVInt(SERIALIZATION_FORMAT); + final int format = out.getVersion().onOrAfter(Version.V_7_0_0_alpha1) ? SERIALIZATION_FORMAT : FORMAT_6_0; + out.writeVInt(format); out.writeString(type); out.writeString(id); out.writeString(uid.field()); out.writeBytesRef(uid.bytes()); out.writeLong(version); - out.writeByte(versionType.getValue()); + if (format < FORMAT_NO_VERSION_TYPE) { + out.writeByte(VersionType.EXTERNAL.getValue()); + } out.writeLong(seqNo); out.writeLong(primaryTerm); } @@ -1306,8 +1301,7 @@ public boolean equals(Object o) { return version == delete.version && seqNo == delete.seqNo && primaryTerm == delete.primaryTerm && - uid.equals(delete.uid) && - versionType == delete.versionType; + uid.equals(delete.uid); } @Override @@ -1316,7 +1310,6 @@ public int hashCode() { result = 31 * result + Long.hashCode(seqNo); result = 31 * result + Long.hashCode(primaryTerm); result = 31 * result + Long.hashCode(version); - result = 31 * result + versionType.hashCode(); return result; } diff --git a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java index b89b21c52588a..c135facc67f5f 100644 --- a/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java +++ b/server/src/main/java/org/elasticsearch/index/translog/TranslogWriter.java @@ -202,9 +202,11 @@ private synchronized boolean assertNoSeqNumberConflict(long seqNo, BytesReferenc if (previous.v1().equals(data) == false) { Translog.Operation newOp = Translog.readOperation(new BufferedChecksumStreamInput(data.streamInput())); Translog.Operation prvOp = Translog.readOperation(new BufferedChecksumStreamInput(previous.v1().streamInput())); - throw new AssertionError( - "seqNo [" + seqNo + "] was processed twice in generation [" + generation + "], with different data. " + - "prvOp [" + prvOp + "], newOp [" + newOp + "]", previous.v2()); + if (newOp.equals(prvOp) == false) { + throw new AssertionError( + "seqNo [" + seqNo + "] was processed twice in generation [" + generation + "], with different data. " + + "prvOp [" + prvOp + "], newOp [" + newOp + "]", previous.v2()); + } } } else { seenSequenceNumbers.put(seqNo, diff --git a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java index 914c2b87422db..15b8e1c99d266 100644 --- a/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/resync/ResyncReplicationRequestTests.java @@ -21,9 +21,7 @@ import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; -import org.elasticsearch.common.lucene.uid.Versions; import org.elasticsearch.index.Index; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.shard.ShardId; import org.elasticsearch.index.translog.Translog; import org.elasticsearch.test.ESTestCase; @@ -38,7 +36,7 @@ public class ResyncReplicationRequestTests extends ESTestCase { public void testSerialization() throws IOException { final byte[] bytes = "{}".getBytes(Charset.forName("UTF-8")); final Translog.Index index = new Translog.Index("type", "id", 0, randomNonNegativeLong(), - Versions.MATCH_ANY, VersionType.INTERNAL, bytes, null, -1); + randomNonNegativeLong(), bytes, null, -1); final ShardId shardId = new ShardId(new Index("index", "uuid"), 0); final ResyncReplicationRequest before = new ResyncReplicationRequest(shardId, 42L, new Translog.Operation[]{index}); diff --git a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java index 2e89a66805ce1..87b63dfdef832 100644 --- a/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java +++ b/server/src/test/java/org/elasticsearch/index/engine/InternalEngineTests.java @@ -1183,7 +1183,7 @@ public void testVersioningNewCreate() throws IOException { assertThat(indexResult.getVersion(), equalTo(1L)); create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(), - create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + null, REPLICA, 0, -1, false); indexResult = replicaEngine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); } @@ -1197,7 +1197,7 @@ public void testReplicatedVersioningWithFlush() throws IOException { create = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), create.primaryTerm(), indexResult.getVersion(), - create.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + null, REPLICA, 0, -1, false); indexResult = replicaEngine.index(create); assertThat(indexResult.getVersion(), equalTo(1L)); assertTrue(indexResult.isCreated()); @@ -1216,7 +1216,7 @@ public void testReplicatedVersioningWithFlush() throws IOException { update = new Engine.Index(newUid(doc), doc, updateResult.getSeqNo(), update.primaryTerm(), updateResult.getVersion(), - update.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + null, REPLICA, 0, -1, false); updateResult = replicaEngine.index(update); assertThat(updateResult.getVersion(), equalTo(2L)); assertFalse(updateResult.isCreated()); @@ -1269,7 +1269,7 @@ public void testVersioningNewIndex() throws IOException { Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); - index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, 0, -1, false); + index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, 0, -1, false); indexResult = replicaEngine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); } @@ -1418,7 +1418,7 @@ protected List generateSingleDocHistory(boolean forReplica, Ve forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, - forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, + forReplica ? null : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis(), -1, false ); @@ -1427,7 +1427,7 @@ protected List generateSingleDocHistory(boolean forReplica, Ve forReplica && i >= startWithSeqNo ? i * 2 : SequenceNumbers.UNASSIGNED_SEQ_NO, forReplica && i >= startWithSeqNo && incrementTermWhenIntroducingSeqNo ? primaryTerm + 1 : primaryTerm, version, - forReplica ? versionType.versionTypeForReplicationAndRecovery() : versionType, + forReplica ? null : versionType, forReplica ? REPLICA : PRIMARY, System.currentTimeMillis()); } @@ -3221,7 +3221,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep Engine.IndexResult indexResult = engine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); - index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); indexResult = replicaEngine.index(index); assertThat(indexResult.getVersion(), equalTo(1L)); @@ -3235,7 +3235,7 @@ public void testRetryWithAutogeneratedIdWorksAndNoDuplicateDocs() throws IOExcep assertEquals(1, topDocs.totalHits); } - index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), index.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + index = new Engine.Index(newUid(doc), doc, indexResult.getSeqNo(), index.primaryTerm(), indexResult.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); indexResult = replicaEngine.index(index); assertThat(indexResult.getResultType(), equalTo(Engine.Result.Type.SUCCESS)); replicaEngine.refresh("test"); @@ -3255,7 +3255,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() Engine.IndexResult result = engine.index(firstIndexRequest); assertThat(result.getVersion(), equalTo(1L)); - Engine.Index firstIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), firstIndexRequest.primaryTerm(), result.getVersion(), firstIndexRequest.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + Engine.Index firstIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), firstIndexRequest.primaryTerm(), result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); Engine.IndexResult indexReplicaResult = replicaEngine.index(firstIndexRequestReplica); assertThat(indexReplicaResult.getVersion(), equalTo(1L)); @@ -3269,7 +3269,7 @@ public void testRetryWithAutogeneratedIdsAndWrongOrderWorksAndNoDuplicateDocs() assertEquals(1, topDocs.totalHits); } - Engine.Index secondIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), secondIndexRequest.primaryTerm(), result.getVersion(), firstIndexRequest.versionType().versionTypeForReplicationAndRecovery(), REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); + Engine.Index secondIndexRequestReplica = new Engine.Index(newUid(doc), doc, result.getSeqNo(), secondIndexRequest.primaryTerm(), result.getVersion(), null, REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, isRetry); replicaEngine.index(secondIndexRequestReplica); replicaEngine.refresh("test"); try (Engine.Searcher searcher = replicaEngine.acquireSearcher("test")) { @@ -3292,7 +3292,7 @@ public Engine.Index appendOnlyPrimary(ParsedDocument doc, boolean retry, final l } public Engine.Index appendOnlyReplica(ParsedDocument doc, boolean retry, final long autoGeneratedIdTimestamp, final long seqNo) { - return new Engine.Index(newUid(doc), doc, seqNo, 2, 1, VersionType.EXTERNAL, + return new Engine.Index(newUid(doc), doc, seqNo, 2, 1, null, Engine.Operation.Origin.REPLICA, System.nanoTime(), autoGeneratedIdTimestamp, retry); } @@ -3694,7 +3694,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio sequenceNumberSupplier.getAsLong(), 1, i, - VersionType.EXTERNAL, + origin == PRIMARY ? VersionType.EXTERNAL : null, origin, System.nanoTime(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, @@ -3708,7 +3708,7 @@ public void testOutOfOrderSequenceNumbersWithVersionConflict() throws IOExceptio sequenceNumberSupplier.getAsLong(), 1, i, - VersionType.EXTERNAL, + origin == PRIMARY ? VersionType.EXTERNAL : null, origin, System.nanoTime()); operations.add(delete); @@ -3928,7 +3928,7 @@ public void markSeqNoAsCompleted(long seqNo) { final ParsedDocument doc = testParsedDocument(id, null, testDocumentWithTextField(), SOURCE, null); final Term uid = newUid(doc); final long time = System.nanoTime(); - actualEngine.index(new Engine.Index(uid, doc, seqNo, 1, 1, VersionType.EXTERNAL, REPLICA, time, time, false)); + actualEngine.index(new Engine.Index(uid, doc, seqNo, 1, 1, null, REPLICA, time, time, false)); if (rarely()) { actualEngine.rollTranslogGeneration(); } @@ -4686,7 +4686,7 @@ public void testTrimUnsafeCommits() throws Exception { for (int i = 0; i < seqNos.size(); i++) { ParsedDocument doc = testParsedDocument(Long.toString(seqNos.get(i)), null, testDocument(), new BytesArray("{}"), null); Engine.Index index = new Engine.Index(newUid(doc), doc, seqNos.get(i), 0, - 1, VersionType.EXTERNAL, REPLICA, System.nanoTime(), -1, false); + 1, null, REPLICA, System.nanoTime(), -1, false); engine.index(index); if (randomBoolean()) { engine.flush(); diff --git a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java index ee97ba14fe09e..f01d5e54a2e16 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/RecoveryDuringReplicationTests.java @@ -186,7 +186,6 @@ public void testRecoveryToReplicaThatReceivedExtraDocument() throws Exception { remainingReplica.applyIndexOperationOnReplica( remainingReplica.getLocalCheckpoint() + 1, 1, - VersionType.EXTERNAL, randomNonNegativeLong(), false, SourceToParse.source("index", "type", "replica", new BytesArray("{}"), XContentType.JSON)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index ac52378fc6b9d..2e07ec950a572 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -72,7 +72,6 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.env.NodeEnvironment; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.engine.CommitStats; import org.elasticsearch.index.engine.Engine; import org.elasticsearch.index.engine.EngineException; @@ -1545,17 +1544,17 @@ public void testRecoverFromStoreWithOutOfOrderDelete() throws IOException { * - If flush and then recover from the existing store, delete #1 will be removed while index #0 is still retained and replayed. */ final IndexShard shard = newStartedShard(false); - shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id", VersionType.EXTERNAL); + shard.applyDeleteOperationOnReplica(1, 2, "_doc", "id"); shard.getEngine().rollTranslogGeneration(); // isolate the delete in it's own generation - shard.applyIndexOperationOnReplica(0, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id", new BytesArray("{}"), XContentType.JSON)); - shard.applyIndexOperationOnReplica(3, 3, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(3, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to skip the translog gen #1 in recovery. shard.flush(new FlushRequest().force(true).waitIfOngoing(true)); - shard.applyIndexOperationOnReplica(2, 3, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(2, 3, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-2", new BytesArray("{}"), XContentType.JSON)); - shard.applyIndexOperationOnReplica(5, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(shard.shardId().getIndexName(), "_doc", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; @@ -1646,8 +1645,7 @@ public void testRecoverFromStoreWithNoOps() throws IOException { updateMappings(otherShard, shard.indexSettings().getIndexMetaData()); SourceToParse sourceToParse = SourceToParse.source(shard.shardId().getIndexName(), "_doc", "1", new BytesArray("{}"), XContentType.JSON); - otherShard.applyIndexOperationOnReplica(1, 1, - VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + otherShard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); final ShardRouting primaryShardRouting = shard.routingEntry(); IndexShard newShard = reinitShard(otherShard, ShardRoutingHelper.initWithSameId(primaryShardRouting, @@ -1763,18 +1761,18 @@ public void testRecoverFromStoreRemoveStaleOperations() throws Exception { final IndexShard shard = newStartedShard(false); final String indexName = shard.shardId().getIndexName(); // Index #0, index #1 - shard.applyIndexOperationOnReplica(0, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-0", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); shard.updateGlobalCheckpointOnReplica(0, "test"); // stick the global checkpoint here. - shard.applyIndexOperationOnReplica(1, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(1, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-1", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1")); // Simulate resync (without rollback): Noop #1, index #2 acquireReplicaOperationPermitBlockingly(shard, shard.primaryTerm + 1); shard.markSeqNoAsNoop(1, "test"); - shard.applyIndexOperationOnReplica(2, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + shard.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "_doc", "doc-2", new BytesArray("{}"), XContentType.JSON)); flushShard(shard); assertThat(getShardDocUIDs(shard), containsInAnyOrder("doc-0", "doc-1", "doc-2")); @@ -2104,11 +2102,11 @@ public void testRecoverFromTranslog() throws IOException { int numCorruptEntries = 0; for (int i = 0; i < numTotalEntries; i++) { if (randomBoolean()) { - operations.add(new Translog.Index("_doc", "1", 0, primary.getPrimaryTerm(), 1, VersionType.INTERNAL, + operations.add(new Translog.Index("_doc", "1", 0, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar\"}".getBytes(Charset.forName("UTF-8")), null, -1)); } else { // corrupt entry - operations.add(new Translog.Index("_doc", "2", 1, primary.getPrimaryTerm(), 1, VersionType.INTERNAL, + operations.add(new Translog.Index("_doc", "2", 1, primary.getPrimaryTerm(), 1, "{\"foo\" : \"bar}".getBytes(Charset.forName("UTF-8")), null, -1)); numCorruptEntries++; } @@ -2603,8 +2601,7 @@ private Result indexOnReplicaWithGaps( final String id = Integer.toString(i); SourceToParse sourceToParse = SourceToParse.source(indexShard.shardId().getIndexName(), "_doc", id, new BytesArray("{}"), XContentType.JSON); - indexShard.applyIndexOperationOnReplica(i, - 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + indexShard.applyIndexOperationOnReplica(i, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (!gap && i == localCheckpoint + 1) { localCheckpoint++; } diff --git a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java index dbbb38090bc3b..b255238c8648c 100644 --- a/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java +++ b/server/src/test/java/org/elasticsearch/index/translog/TranslogTests.java @@ -416,9 +416,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(1)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(163L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(162L)); assertThat(stats.getUncommittedOperations(), equalTo(1)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(163L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(162L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } @@ -426,9 +426,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(2)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(212L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(210L)); assertThat(stats.getUncommittedOperations(), equalTo(2)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(212L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(210L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } @@ -436,9 +436,9 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(3)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(261L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(258L)); assertThat(stats.getUncommittedOperations(), equalTo(3)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(261L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(258L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } @@ -446,13 +446,13 @@ public void testStats() throws IOException { { final TranslogStats stats = stats(); assertThat(stats.estimatedNumberOfOperations(), equalTo(4)); - assertThat(stats.getTranslogSizeInBytes(), equalTo(303L)); + assertThat(stats.getTranslogSizeInBytes(), equalTo(300L)); assertThat(stats.getUncommittedOperations(), equalTo(4)); - assertThat(stats.getUncommittedSizeInBytes(), equalTo(303L)); + assertThat(stats.getUncommittedSizeInBytes(), equalTo(300L)); assertThat(stats.getEarliestLastModifiedAge(), greaterThan(1L)); } - final long expectedSizeInBytes = 358L; + final long expectedSizeInBytes = 355L; translog.rollGeneration(); { final TranslogStats stats = stats(); @@ -725,14 +725,12 @@ public void testConcurrentWritesWithVaryingSize() throws Throwable { assertEquals(expIndexOp.type(), indexOp.type()); assertEquals(expIndexOp.source(), indexOp.source()); assertEquals(expIndexOp.version(), indexOp.version()); - assertEquals(expIndexOp.versionType(), indexOp.versionType()); break; case DELETE: Translog.Delete delOp = (Translog.Delete) op; Translog.Delete expDelOp = (Translog.Delete) expectedOp; assertEquals(expDelOp.uid(), delOp.uid()); assertEquals(expDelOp.version(), delOp.version()); - assertEquals(expDelOp.versionType(), delOp.versionType()); break; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) op; @@ -1478,7 +1476,7 @@ public void testRecoveryUncommittedCorruptedCheckpoint() throws IOException { try (Translog ignored = new Translog(config, translogUUID, deletionPolicy, () -> SequenceNumbers.NO_OPS_PERFORMED, primaryTerm::get)) { fail("corrupted"); } catch (IllegalStateException ex) { - assertEquals("Checkpoint file translog-3.ckp already exists but has corrupted content expected: Checkpoint{offset=3080, " + + assertEquals("Checkpoint file translog-3.ckp already exists but has corrupted content expected: Checkpoint{offset=3025, " + "numOps=55, generation=3, minSeqNo=45, maxSeqNo=99, globalCheckpoint=-1, minTranslogGeneration=1, trimmedAboveSeqNo=-2} but got: Checkpoint{offset=0, numOps=0, " + "generation=0, minSeqNo=-1, maxSeqNo=-1, globalCheckpoint=-1, minTranslogGeneration=0, trimmedAboveSeqNo=-2}", ex.getMessage()); } @@ -1842,8 +1840,7 @@ public void run() { new Term("_uid", threadId + "_" + opCount), seqNoGenerator.getAndIncrement(), primaryTerm.get(), - 1 + randomInt(100000), - randomFrom(VersionType.values())); + 1 + randomInt(100000)); break; case NO_OP: op = new Translog.NoOp(seqNoGenerator.getAndIncrement(), primaryTerm.get(), randomAlphaOfLength(16)); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index 537409f35d175..e7606328c7665 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -122,22 +122,22 @@ public void testRecoveryWithOutOfOrderDelete() throws Exception { final String indexName = orgReplica.shardId().getIndexName(); // delete #1 - orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id", VersionType.EXTERNAL); + orgReplica.applyDeleteOperationOnReplica(1, 2, "type", "id"); getTranslog(orgReplica).rollGeneration(); // isolate the delete in it's own generation // index #0 - orgReplica.applyIndexOperationOnReplica(0, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(0, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id", new BytesArray("{}"), XContentType.JSON)); // index #3 - orgReplica.applyIndexOperationOnReplica(3, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(3, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id-3", new BytesArray("{}"), XContentType.JSON)); // Flushing a new commit with local checkpoint=1 allows to delete the translog gen #1. orgReplica.flush(new FlushRequest().force(true).waitIfOngoing(true)); // index #2 - orgReplica.applyIndexOperationOnReplica(2, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(2, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id-2", new BytesArray("{}"), XContentType.JSON)); orgReplica.updateGlobalCheckpointOnReplica(3L, "test"); // index #5 -> force NoOp #4. - orgReplica.applyIndexOperationOnReplica(5, 1, VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, + orgReplica.applyIndexOperationOnReplica(5, 1, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, SourceToParse.source(indexName, "type", "id-5", new BytesArray("{}"), XContentType.JSON)); final int translogOps; diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java index a23e29b0bcd6b..f26522245493f 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/EngineTestCase.java @@ -52,7 +52,6 @@ import org.elasticsearch.core.internal.io.IOUtils; import org.elasticsearch.index.Index; import org.elasticsearch.index.IndexSettings; -import org.elasticsearch.index.VersionType; import org.elasticsearch.index.codec.CodecService; import org.elasticsearch.index.mapper.IdFieldMapper; import org.elasticsearch.index.mapper.Mapping; @@ -493,14 +492,12 @@ protected Engine.Index indexForDoc(ParsedDocument doc) { protected Engine.Index replicaIndexForDoc(ParsedDocument doc, long version, long seqNo, boolean isRetry) { - return new Engine.Index(newUid(doc), doc, seqNo, primaryTerm.get(), version, VersionType.EXTERNAL, - Engine.Operation.Origin.REPLICA, System.nanoTime(), - IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, isRetry); + return new Engine.Index(newUid(doc), doc, seqNo, primaryTerm.get(), version, null, Engine.Operation.Origin.REPLICA, + System.nanoTime(), IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, isRetry); } protected Engine.Delete replicaDeleteForDoc(String id, long version, long seqNo, long startTime) { - return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, VersionType.EXTERNAL, - Engine.Operation.Origin.REPLICA, startTime); + return new Engine.Delete("test", id, newUid(id), seqNo, 1, version, null, Engine.Operation.Origin.REPLICA, startTime); } /** diff --git a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java index 53fe89ac17ea5..9999a3b3748f1 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java +++ b/test/framework/src/main/java/org/elasticsearch/index/engine/TranslogHandler.java @@ -124,14 +124,12 @@ private Engine.Operation convertToEngineOp(Translog.Operation operation, Engine. source(indexName, index.type(), index.id(), index.source(), XContentHelper.xContentType(index.source())) .routing(index.routing()), index.seqNo(), index.primaryTerm(), - index.version(), index.versionType().versionTypeForReplicationAndRecovery(), origin, - index.getAutoGeneratedIdTimestamp(), true); + index.version(), null, origin, index.getAutoGeneratedIdTimestamp(), true); return engineIndex; case DELETE: final Translog.Delete delete = (Translog.Delete) operation; final Engine.Delete engineDelete = new Engine.Delete(delete.type(), delete.id(), delete.uid(), delete.seqNo(), - delete.primaryTerm(), delete.version(), delete.versionType().versionTypeForReplicationAndRecovery(), - origin, System.nanoTime()); + delete.primaryTerm(), delete.version(), null, origin, System.nanoTime()); return engineDelete; case NO_OP: final Translog.NoOp noOp = (Translog.NoOp) operation; diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index 5a8e91841c5a7..e4849be20e16e 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -573,7 +573,7 @@ protected Engine.IndexResult indexDoc(IndexShard shard, String type, String id, shard.getLocalCheckpoint()); } else { result = shard.applyIndexOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0, - VersionType.EXTERNAL, IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); + IndexRequest.UNSET_AUTO_GENERATED_TIMESTAMP, false, sourceToParse); if (result.getResultType() == Engine.Result.Type.MAPPING_UPDATE_REQUIRED) { throw new TransportReplicationAction.RetryOnReplicaException(shard.shardId, "Mappings are not available on the replica yet, triggered update: " + result.getRequiredMappingUpdate()); @@ -591,7 +591,7 @@ protected Engine.DeleteResult deleteDoc(IndexShard shard, String type, String id if (shard.routingEntry().primary()) { return shard.applyDeleteOperationOnPrimary(Versions.MATCH_ANY, type, id, VersionType.INTERNAL); } else { - return shard.applyDeleteOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0L, type, id, VersionType.EXTERNAL); + return shard.applyDeleteOperationOnReplica(shard.seqNoStats().getMaxSeqNo() + 1, 0L, type, id); } } From 413d2111e618eef7b4fc1b3cc48770f5101c1fa3 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Tue, 17 Jul 2018 18:06:16 +0200 Subject: [PATCH 088/107] ESIndexLevelReplicationTestCase doesn't support replicated failures but it's good to know what they are Sometimes we have a test failure that hits an `UnsupportedOperationException` in this infrastructure. When debugging you want to know what caused this unexpected failure, but right now we're silent about it. This commit adds some information to the `UnsupportedOperationException` Relates to #32127 --- .../index/replication/ESIndexLevelReplicationTestCase.java | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index ab18e359458bd..9de8821682216 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -477,7 +477,7 @@ public ShardRouting routingEntry() { @Override public void failShard(String message, Exception exception) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("failing a primary isn't supported. failure: " + message, exception); } @Override @@ -550,13 +550,13 @@ public void onFailure(Exception e) { public void failShardIfNeeded(ShardRouting replica, String message, Exception exception, Runnable onSuccess, Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("failing shard " + replica + " isn't supported. failure: " + message, exception); } @Override public void markShardCopyAsStaleIfNeeded(ShardId shardId, String allocationId, Runnable onSuccess, Consumer onPrimaryDemoted, Consumer onIgnoredFailure) { - throw new UnsupportedOperationException(); + throw new UnsupportedOperationException("can't mark " + shardId + ", aid [" + allocationId + "] as stale"); } } From fa168755c2eb178b248bd366f715f4638cc07a5a Mon Sep 17 00:00:00 2001 From: Toby McLaughlin Date: Wed, 18 Jul 2018 17:07:31 +1000 Subject: [PATCH 089/107] [DOCS] Update TLS on Docker for 6.3 (#32114) Remove references to the `platinum` image and add a self-generated trial licence to the example for TLS on Docker. Fixes elastic/elasticsearch-docker#176 --- .../configuring-tls-docker.asciidoc | 29 ++++++++++++++----- 1 file changed, 21 insertions(+), 8 deletions(-) diff --git a/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc b/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc index d93d4e523d9f2..49913382482bd 100644 --- a/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc +++ b/x-pack/docs/en/security/securing-communications/configuring-tls-docker.asciidoc @@ -1,12 +1,13 @@ [role="xpack"] [[configuring-tls-docker]] -=== Encrypting Communications in an {es} Docker Image +=== Encrypting Communications in an {es} Docker Container Starting with version 6.0.0, {security} (Gold, Platinum or Enterprise subscriptions) https://www.elastic.co/guide/en/elasticsearch/reference/6.0/breaking-6.0.0-xes.html[requires SSL/TLS] encryption for the transport networking layer. This section demonstrates an easy path to get started with SSL/TLS for both -HTTPS and transport using the `elasticsearch-platinum` docker image. +HTTPS and transport using the {es} Docker image. The example uses +Docker Compose to manage the containers. For further details, please refer to {xpack-ref}/encrypting-communications.html[Encrypting Communications] and @@ -17,7 +18,7 @@ https://www.elastic.co/subscriptions[available subscriptions]. <>. -Inside a new, empty, directory create the following **four files**: +Inside a new, empty directory, create the following four files: `instances.yml`: ["source","yaml"] @@ -29,6 +30,7 @@ instances: - localhost ip: - 127.0.0.1 + - name: es02 dns: - es02 @@ -60,6 +62,7 @@ ifeval::["{release-state}"!="unreleased"] ["source","yaml",subs="attributes"] ---- version: '2.2' + services: create_certs: container_name: create_certs @@ -96,6 +99,7 @@ ifeval::["{release-state}"!="unreleased"] ["source","yaml",subs="attributes"] ---- version: '2.2' + services: es01: container_name: es01 @@ -105,9 +109,11 @@ services: - discovery.zen.minimum_master_nodes=2 - ELASTIC_PASSWORD=$ELASTIC_PASSWORD <1> - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - xpack.license.self_generated.type=trial <2> + - xpack.security.enabled=true - xpack.security.http.ssl.enabled=true - xpack.security.transport.ssl.enabled=true - - xpack.security.transport.ssl.verification_mode=certificate <2> + - xpack.security.transport.ssl.verification_mode=certificate <3> - xpack.ssl.certificate_authorities=$CERTS_DIR/ca/ca.crt - xpack.ssl.certificate=$CERTS_DIR/es01/es01.crt - xpack.ssl.key=$CERTS_DIR/es01/es01.key @@ -119,15 +125,18 @@ services: interval: 30s timeout: 10s retries: 5 + es02: container_name: es02 - image: docker.elastic.co/elasticsearch/elasticsearch-platinum:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:{version} environment: - node.name=es02 - discovery.zen.minimum_master_nodes=2 - ELASTIC_PASSWORD=$ELASTIC_PASSWORD - discovery.zen.ping.unicast.hosts=es01 - "ES_JAVA_OPTS=-Xms512m -Xmx512m" + - xpack.license.self_generated.type=trial + - xpack.security.enabled=true - xpack.security.http.ssl.enabled=true - xpack.security.transport.ssl.enabled=true - xpack.security.transport.ssl.verification_mode=certificate @@ -135,16 +144,20 @@ services: - xpack.ssl.certificate=$CERTS_DIR/es02/es02.crt - xpack.ssl.key=$CERTS_DIR/es02/es02.key volumes: ['esdata_02:/usr/share/elasticsearch/data', './certs:$CERTS_DIR'] + wait_until_ready: - image: docker.elastic.co/elasticsearch/elasticsearch-platinum:{version} + image: docker.elastic.co/elasticsearch/elasticsearch:{version} command: /usr/bin/true depends_on: {"es01": {"condition": "service_healthy"}} + volumes: {"esdata_01": {"driver": "local"}, "esdata_02": {"driver": "local"}} ---- -<1> Bootstrap `elastic` with the password defined in `.env`. See +<1> Bootstrap `elastic` with the password defined in `.env`. See {stack-ov}/built-in-users.html#bootstrap-elastic-passwords[the Elastic Bootstrap Password]. -<2> Disable verification of authenticity for inter-node communication. Allows +<2> Automatically generate and apply a trial subscription, in order to enable +{security}. +<3> Disable verification of authenticity for inter-node communication. Allows creating self-signed certificates without having to pin specific internal IP addresses. endif::[] From 688deeb344593f82960ccbbf1b9d9a12356765de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 18 Jul 2018 09:12:28 +0200 Subject: [PATCH 090/107] Fix `range` queries on `_type` field for singe type indices (#31756) With the introduction of single types in 6.x, the `_type` field is no longer indexed, which leads to certain queries that were working before throw errors now. One such query is the `range` query, that, if performed on a single typer index, currently throws an IAE since the field is not indexed. This change adds special treatment for this case in the TypeFieldMapper, comparing the range queries lower and upper bound to the one existing type and either returns a MatchAllDocs or a MatchNoDocs query. Relates to #31632 Closes #31476 --- .../index/mapper/TypeFieldMapper.java | 27 ++++++++++++++ .../search/query/SearchQueryIT.java | 36 +++++++++++++++++++ 2 files changed, 63 insertions(+) diff --git a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java index ffb548fd0f10f..71bd2e93d3039 100644 --- a/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java +++ b/server/src/main/java/org/elasticsearch/index/mapper/TypeFieldMapper.java @@ -35,6 +35,8 @@ import org.apache.lucene.search.TermInSetQuery; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; +import org.elasticsearch.common.logging.DeprecationLogger; +import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.lucene.search.Queries; import org.elasticsearch.common.xcontent.XContentBuilder; @@ -90,6 +92,8 @@ public MetadataFieldMapper getDefault(MappedFieldType fieldType, ParserContext c static final class TypeFieldType extends StringFieldType { + private static final DeprecationLogger DEPRECATION_LOGGER = new DeprecationLogger(ESLoggerFactory.getLogger(TypeFieldType.class)); + TypeFieldType() { } @@ -154,6 +158,29 @@ public Query termsQuery(List values, QueryShardContext context) { } } + @Override + public Query rangeQuery(Object lowerTerm, Object upperTerm, boolean includeLower, boolean includeUpper, QueryShardContext context) { + DEPRECATION_LOGGER.deprecatedAndMaybeLog("range_single_type", + "Running [range] query on [_type] field for an index with a single type. As types are deprecated, this functionality will be removed in future releases."); + Query result = new MatchAllDocsQuery(); + String type = context.getMapperService().documentMapper().type(); + if (type != null) { + BytesRef typeBytes = new BytesRef(type); + if (lowerTerm != null) { + int comp = indexedValueForSearch(lowerTerm).compareTo(typeBytes); + if (comp > 0 || (comp == 0 && includeLower == false)) { + result = new MatchNoDocsQuery("[_type] was lexicographically smaller than lower bound of range"); + } + } + if (upperTerm != null) { + int comp = indexedValueForSearch(upperTerm).compareTo(typeBytes); + if (comp < 0 || (comp == 0 && includeUpper == false)) { + result = new MatchNoDocsQuery("[_type] was lexicographically greater than upper bound of range"); + } + } + } + return result; + } } /** diff --git a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java index be71867edd2a0..1694f86c53eac 100644 --- a/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java +++ b/server/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java @@ -1822,4 +1822,40 @@ public void testRangeQueryRangeFields_24744() throws Exception { SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get(); assertHitCount(searchResponse, 1); } + + public void testRangeQueryTypeField_31476() throws Exception { + assertAcked(prepareCreate("test").addMapping("foo", "field", "type=keyword")); + + client().prepareIndex("test", "foo", "1").setSource("field", "value").get(); + refresh(); + + RangeQueryBuilder range = new RangeQueryBuilder("_type").from("ape").to("zebra"); + SearchResponse searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 1); + + range = new RangeQueryBuilder("_type").from("monkey").to("zebra"); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("ape").to("donkey"); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(false); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("ape").to("foo").includeUpper(true); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 1); + + range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(false); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 0); + + range = new RangeQueryBuilder("_type").from("foo").to("zebra").includeLower(true); + searchResponse = client().prepareSearch("test").setQuery(range).get(); + assertHitCount(searchResponse, 1); + } + } From ace377100c5d3dcf11118856bff2c4fb246c0e32 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 18 Jul 2018 08:55:05 +0100 Subject: [PATCH 091/107] Term -> Token; deps --- .../ScriptedConditionTokenFilterFactory.java | 5 +++++ .../ScriptedConditionTokenFilterTests.java | 12 +++++----- .../analysis-common/60_analysis_scripting.yml | 2 +- .../analyze/TransportAnalyzeAction.java | 22 +++++++++++++++++++ 4 files changed, 34 insertions(+), 7 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java index 6ceebf07389b6..ad4cc2dc51599 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -1,5 +1,6 @@ package org.elasticsearch.analysis.common; +import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -8,6 +9,7 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; +import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; @@ -44,6 +46,9 @@ public class ScriptedConditionTokenFilterFactory extends AbstractTokenFilterFact this.factory = scriptService.compile(script, AnalysisPredicateScript.CONTEXT); this.filterNames = settings.getAsList("filter"); + if (this.filterNames.isEmpty()) { + throw new IllegalArgumentException("Empty list of filters provided to tokenfilter [" + name + "]"); + } } @Override diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java index c7e47959bc67c..e61b7627a8eb0 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java @@ -26,7 +26,7 @@ public void testSimpleCondition() throws Exception { Settings indexSettings = Settings.builder() .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) .put("index.analysis.filter.cond.type", "condition") - .put("index.analysis.filter.cond.script.source", "return \"two\".equals(term.term)") + .put("index.analysis.filter.cond.script.source", "token.getTerm().length() > 5") .putList("index.analysis.filter.cond.filter", "uppercase") .put("index.analysis.analyzer.myAnalyzer.type", "custom") .put("index.analysis.analyzer.myAnalyzer.tokenizer", "standard") @@ -36,8 +36,8 @@ public void testSimpleCondition() throws Exception { AnalysisPredicateScript.Factory factory = () -> new AnalysisPredicateScript() { @Override - public boolean execute(Token term) { - return "two".contentEquals(term.term); + public boolean execute(Token token) { + return token.getTerm().length() > 5; } }; @@ -46,7 +46,7 @@ public boolean execute(Token term) { @Override public FactoryType compile(Script script, ScriptContext context) { assertEquals(context, AnalysisPredicateScript.CONTEXT); - assertEquals(new Script("return \"two\".equals(term.term)"), script); + assertEquals(new Script("token.getTerm().length() > 5"), script); return (FactoryType) factory; } }; @@ -60,8 +60,8 @@ public FactoryType compile(Script script, ScriptContext 5" - filter: [ lowercase ] - length: { tokens: 3 } - match: { tokens.0.token: "vorsprung" } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 35f1f725b65ad..30b212175ca25 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -52,6 +52,7 @@ import org.elasticsearch.index.analysis.IndexAnalyzers; import org.elasticsearch.index.analysis.MultiTermAwareComponent; import org.elasticsearch.index.analysis.NamedAnalyzer; +import org.elasticsearch.index.analysis.ReferringFilterFactory; import org.elasticsearch.index.analysis.TokenFilterFactory; import org.elasticsearch.index.analysis.TokenizerFactory; import org.elasticsearch.index.mapper.KeywordFieldMapper; @@ -574,6 +575,7 @@ private static List parseTokenFilterFactories(AnalyzeRequest Environment environment, Tuple tokenizerFactory, List charFilterFactoryList, boolean normalizer) throws IOException { List tokenFilterFactoryList = new ArrayList<>(); + List referringFilters = new ArrayList<>(); if (request.tokenFilters() != null && request.tokenFilters().size() > 0) { List tokenFilters = request.tokenFilters(); for (AnalyzeRequest.NameOrDefinition tokenFilter : tokenFilters) { @@ -627,7 +629,27 @@ private static List parseTokenFilterFactories(AnalyzeRequest tokenFilterFactory = (TokenFilterFactory) ((MultiTermAwareComponent) tokenFilterFactory).getMultiTermComponent(); } tokenFilterFactoryList.add(tokenFilterFactory); + if (tokenFilterFactory instanceof ReferringFilterFactory) { + referringFilters.add((ReferringFilterFactory)tokenFilterFactory); + } + } + } + if (referringFilters.isEmpty() == false) { + if (indexSettings == null) { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); + indexSettings = new IndexSettings(metaData, Settings.EMPTY); } + Map prebuiltFilters = analysisRegistry.buildTokenFilterFactories(indexSettings); + for (ReferringFilterFactory rff : referringFilters) { + rff.setReferences(prebuiltFilters); + } + } return tokenFilterFactoryList; } From 84ee20e65f9e485479d8545307389ddd36581663 Mon Sep 17 00:00:00 2001 From: Alpar Torok Date: Wed, 18 Jul 2018 08:23:59 +0000 Subject: [PATCH 092/107] Fix CP for namingConventions when gradle home has spaces (#31914) * Fix CP for namingConventions when gradle home has spaces Closes #31736. Probably not Windows specific, just not common to have spaces on Linux. --- .../precommit/NamingConventionsTask.java | 35 +++++++++++-------- 1 file changed, 21 insertions(+), 14 deletions(-) diff --git a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java index cfbb75456bc6c..297586e9ac6f3 100644 --- a/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java +++ b/buildSrc/src/main/java/org/elasticsearch/gradle/precommit/NamingConventionsTask.java @@ -16,6 +16,8 @@ import java.io.File; import java.io.FileWriter; import java.io.IOException; +import java.net.URISyntaxException; +import java.net.URL; import java.util.Objects; /** @@ -30,16 +32,25 @@ public NamingConventionsTask() { final Project project = getProject(); SourceSetContainer sourceSets = getJavaSourceSets(); - final FileCollection classpath = project.files( - // This works because the class only depends on one class from junit that will be available from the - // tests compile classpath. It's the most straight forward way of telling Java where to find the main - // class. - NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation().getPath(), - // the tests to be loaded - checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(), - sourceSets.getByName("test").getCompileClasspath(), - sourceSets.getByName("test").getOutput() - ); + final FileCollection classpath; + try { + URL location = NamingConventionsCheck.class.getProtectionDomain().getCodeSource().getLocation(); + if (location.getProtocol().equals("file") == false) { + throw new GradleException("Unexpected location for NamingConventionCheck class: "+ location); + } + classpath = project.files( + // This works because the class only depends on one class from junit that will be available from the + // tests compile classpath. It's the most straight forward way of telling Java where to find the main + // class. + location.toURI().getPath(), + // the tests to be loaded + checkForTestsInMain ? sourceSets.getByName("main").getRuntimeClasspath() : project.files(), + sourceSets.getByName("test").getCompileClasspath(), + sourceSets.getByName("test").getOutput() + ); + } catch (URISyntaxException e) { + throw new AssertionError(e); + } dependsOn(project.getTasks().matching(it -> "testCompileClasspath".equals(it.getName()))); getInputs().files(classpath); @@ -111,10 +122,6 @@ public void setSuccessMarker(File successMarker) { this.successMarker = successMarker; } - public boolean getSkipIntegTestInDisguise() { - return skipIntegTestInDisguise; - } - public boolean isSkipIntegTestInDisguise() { return skipIntegTestInDisguise; } From 947fa2e63db3e7d6ba8bbb490d89dbde49bec3a7 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Christoph=20B=C3=BCscher?= Date: Wed, 18 Jul 2018 10:34:42 +0200 Subject: [PATCH 093/107] Fix Java 11 javadoc compile problem Java 11 complains with a "type arguments not allowed here" error when types are used in javadoc links it seems. Simply removing it. --- .../java/org/elasticsearch/painless/PainlessDocGenerator.java | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java index 4486a52ccb1be..cc596dcc39564 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessDocGenerator.java @@ -328,7 +328,7 @@ private static String javadocRoot(PainlessField field) { } /** - * Pick the javadoc root for a {@link Class}. + * Pick the javadoc root for a {@link Class}. */ private static String javadocRoot(Class clazz) { String classPackage = clazz.getPackage().getName(); From e67cede7da23f197d6f352793753e25187f6d604 Mon Sep 17 00:00:00 2001 From: Boaz Leskes Date: Wed, 18 Jul 2018 11:30:44 +0200 Subject: [PATCH 094/107] A replica can be promoted and started in one cluster state update (#32042) When a replica is fully recovered (i.e., in `POST_RECOVERY` state) we send a request to the master to start the shard. The master changes the state of the replica and publishes a cluster state to that effect. In certain cases, that cluster state can be processed on the node hosting the replica *together* with a cluster state that promotes that, now started, replica to a primary. This can happen due to cluster state batched processing or if the master died after having committed the cluster state that starts the shard but before publishing it to the node with the replica. If the master also held the primary shard, the new master node will remove the primary (as it failed) and will also immediately promote the replica (thinking it is started). Sadly our code in IndexShard didn't allow for this which caused [assertions](https://github.com/elastic/elasticsearch/blob/13917162ad5c59a96ccb4d6a81a5044546c45c22/server/src/main/java/org/elasticsearch/index/seqno/ReplicationTracker.java#L482) to be tripped in some of our tests runs. --- .../elasticsearch/index/shard/IndexShard.java | 19 ++-- .../IndexLevelReplicationTests.java | 2 +- .../index/shard/IndexShardTests.java | 67 +++++--------- ...actIndicesClusterStateServiceTestCase.java | 8 ++ .../PeerRecoveryTargetServiceTests.java | 2 +- .../indices/recovery/RecoveryTests.java | 2 +- .../ESIndexLevelReplicationTestCase.java | 9 +- .../index/shard/IndexShardTestCase.java | 90 +++++++++++++++---- 8 files changed, 126 insertions(+), 73 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java index b07e22875e81f..fc08438a7d9c5 100644 --- a/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java +++ b/server/src/main/java/org/elasticsearch/index/shard/IndexShard.java @@ -413,10 +413,9 @@ public void updateShardState(final ShardRouting newRouting, if (state == IndexShardState.POST_RECOVERY && newRouting.active()) { assert currentRouting.active() == false : "we are in POST_RECOVERY, but our shard routing is active " + currentRouting; - - if (newRouting.primary() && currentRouting.isRelocationTarget() == false) { - replicationTracker.activatePrimaryMode(getLocalCheckpoint()); - } + assert currentRouting.isRelocationTarget() == false || currentRouting.primary() == false || + replicationTracker.isPrimaryMode() : + "a primary relocation is completed by the master, but primary mode is not active " + currentRouting; changeState(IndexShardState.STARTED, "global state is [" + newRouting.state() + "]"); } else if (currentRouting.primary() && currentRouting.relocating() && replicationTracker.isPrimaryMode() == false && @@ -432,7 +431,12 @@ public void updateShardState(final ShardRouting newRouting, final CountDownLatch shardStateUpdated = new CountDownLatch(1); if (newRouting.primary()) { - if (newPrimaryTerm != primaryTerm) { + if (newPrimaryTerm == primaryTerm) { + if (currentRouting.initializing() && currentRouting.isRelocationTarget() == false && newRouting.active()) { + // the master started a recovering primary, activate primary mode. + replicationTracker.activatePrimaryMode(getLocalCheckpoint()); + } + } else { assert currentRouting.primary() == false : "term is only increased as part of primary promotion"; /* Note that due to cluster state batching an initializing primary shard term can failed and re-assigned * in one state causing it's term to be incremented. Note that if both current shard state and new @@ -521,6 +525,11 @@ public void onFailure(Exception e) { } // set this last, once we finished updating all internal state. this.shardRouting = newRouting; + + assert this.shardRouting.primary() == false || + this.shardRouting.started() == false || // note that we use started and not active to avoid relocating shards + this.replicationTracker.isPrimaryMode() + : "an started primary must be in primary mode " + this.shardRouting; shardStateUpdated.countDown(); } if (currentRouting != null && currentRouting.active() == false && newRouting.active()) { diff --git a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java index 018548be9629f..b05b1e5cc5ca0 100644 --- a/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java +++ b/server/src/test/java/org/elasticsearch/index/replication/IndexLevelReplicationTests.java @@ -363,7 +363,7 @@ public void testSeqNoCollision() throws Exception { logger.info("--> Promote replica2 as the primary"); shards.promoteReplicaToPrimary(replica2); logger.info("--> Recover replica3 from replica2"); - recoverReplica(replica3, replica2); + recoverReplica(replica3, replica2, true); try (Translog.Snapshot snapshot = getTranslog(replica3).newSnapshot()) { assertThat(snapshot.totalOperations(), equalTo(initDocs + 1)); assertThat(snapshot.next(), equalTo(op2)); diff --git a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java index 2e07ec950a572..15e6151457fa2 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/IndexShardTests.java @@ -226,6 +226,7 @@ public void testPersistenceStateMetadataPersistence() throws Exception { } public void testFailShard() throws Exception { + allowShardFailures(); IndexShard shard = newStartedShard(); final ShardPath shardPath = shard.shardPath(); assertNotNull(shardPath); @@ -309,7 +310,8 @@ public void testRejectOperationPermitWithHigherTermWhenNotStarted() throws IOExc } public void testPrimaryPromotionDelaysOperations() throws IOException, BrokenBarrierException, InterruptedException { - final IndexShard indexShard = newStartedShard(false); + final IndexShard indexShard = newShard(false); + recoveryEmptyReplica(indexShard, randomBoolean()); final int operations = scaledRandomIntBetween(1, 64); final CyclicBarrier barrier = new CyclicBarrier(1 + operations); @@ -353,20 +355,10 @@ public void onFailure(Exception e) { barrier.await(); latch.await(); - // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = - newShardRouting( - replicaRouting.shardId(), - replicaRouting.currentNodeId(), - null, - true, - ShardRoutingState.STARTED, - replicaRouting.allocationId()); - indexShard.updateShardState(primaryRouting, indexShard.getPrimaryTerm() + 1, (shard, listener) -> {}, - 0L, Collections.singleton(primaryRouting.allocationId().getId()), - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), - Collections.emptySet()); + promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); + final int delayedOperations = scaledRandomIntBetween(1, 64); final CyclicBarrier delayedOperationsBarrier = new CyclicBarrier(1 + delayedOperations); @@ -428,8 +420,9 @@ public void onFailure(Exception e) { * 1) Internal state (ala ReplicationTracker) have been updated * 2) Primary term is set to the new term */ - public void testPublishingOrderOnPromotion() throws IOException, BrokenBarrierException, InterruptedException { - final IndexShard indexShard = newStartedShard(false); + public void testPublishingOrderOnPromotion() throws IOException, InterruptedException, BrokenBarrierException { + final IndexShard indexShard = newShard(false); + recoveryEmptyReplica(indexShard, randomBoolean()); final long promotedTerm = indexShard.getPrimaryTerm() + 1; final CyclicBarrier barrier = new CyclicBarrier(2); final AtomicBoolean stop = new AtomicBoolean(); @@ -448,18 +441,10 @@ public void testPublishingOrderOnPromotion() throws IOException, BrokenBarrierEx }); thread.start(); - final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = newShardRouting(replicaRouting.shardId(), replicaRouting.currentNodeId(), null, true, - ShardRoutingState.STARTED, replicaRouting.allocationId()); - - - final Set inSyncAllocationIds = Collections.singleton(primaryRouting.allocationId().getId()); - final IndexShardRoutingTable routingTable = - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(); barrier.await(); - // promote the replica - indexShard.updateShardState(primaryRouting, promotedTerm, (shard, listener) -> {}, 0L, inSyncAllocationIds, routingTable, - Collections.emptySet()); + final ShardRouting replicaRouting = indexShard.routingEntry(); + promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); stop.set(true); thread.join(); @@ -468,7 +453,8 @@ public void testPublishingOrderOnPromotion() throws IOException, BrokenBarrierEx public void testPrimaryFillsSeqNoGapsOnPromotion() throws Exception { - final IndexShard indexShard = newStartedShard(false); + final IndexShard indexShard = newShard(false); + recoveryEmptyReplica(indexShard, randomBoolean()); // most of the time this is large enough that most of the time there will be at least one gap final int operations = 1024 - scaledRandomIntBetween(0, 1024); @@ -479,17 +465,8 @@ public void testPrimaryFillsSeqNoGapsOnPromotion() throws Exception { // promote the replica final ShardRouting replicaRouting = indexShard.routingEntry(); - final ShardRouting primaryRouting = - newShardRouting( - replicaRouting.shardId(), - replicaRouting.currentNodeId(), - null, - true, - ShardRoutingState.STARTED, - replicaRouting.allocationId()); - indexShard.updateShardState(primaryRouting, indexShard.getPrimaryTerm() + 1, (shard, listener) -> {}, - 0L, Collections.singleton(primaryRouting.allocationId().getId()), - new IndexShardRoutingTable.Builder(primaryRouting.shardId()).addShard(primaryRouting).build(), Collections.emptySet()); + promoteReplica(indexShard, Collections.singleton(replicaRouting.allocationId().getId()), + new IndexShardRoutingTable.Builder(replicaRouting.shardId()).addShard(replicaRouting).build()); /* * This operation completing means that the delay operation executed as part of increasing the primary term has completed and the @@ -506,7 +483,7 @@ public void onResponse(Releasable releasable) { @Override public void onFailure(Exception e) { - throw new RuntimeException(e); + throw new AssertionError(e); } }, ThreadPool.Names.GENERIC, ""); @@ -846,7 +823,7 @@ public void testGlobalCheckpointSync() throws IOException { // add a replica recoverShardFromStore(primaryShard); final IndexShard replicaShard = newShard(shardId, false); - recoverReplica(replicaShard, primaryShard); + recoverReplica(replicaShard, primaryShard, true); final int maxSeqNo = randomIntBetween(0, 128); for (int i = 0; i <= maxSeqNo; i++) { EngineTestCase.generateNewSeqNo(primaryShard.getEngine()); @@ -1625,7 +1602,7 @@ public void testPrimaryHandOffUpdatesLocalCheckpoint() throws IOException { IndexShardTestCase.updateRoutingEntry(primarySource, primarySource.routingEntry().relocate(randomAlphaOfLength(10), -1)); final IndexShard primaryTarget = newShard(primarySource.routingEntry().getTargetRelocatingShard()); updateMappings(primaryTarget, primarySource.indexSettings().getIndexMetaData()); - recoverReplica(primaryTarget, primarySource); + recoverReplica(primaryTarget, primarySource, true); // check that local checkpoint of new primary is properly tracked after primary relocation assertThat(primaryTarget.getLocalCheckpoint(), equalTo(totalOps - 1L)); @@ -2082,7 +2059,7 @@ public long indexTranslogOperations(List operations, int tot assertFalse(replica.isSyncNeeded()); return localCheckpoint; } - }, true); + }, true, true); closeShards(primary, replica); } @@ -2189,7 +2166,7 @@ public long indexTranslogOperations(List operations, int tot assertTrue(replica.isActive()); return localCheckpoint; } - }, false); + }, false, true); closeShards(primary, replica); } @@ -2241,7 +2218,7 @@ public void finalizeRecovery(long globalCheckpoint) throws IOException { super.finalizeRecovery(globalCheckpoint); assertListenerCalled.accept(replica); } - }, false); + }, false, true); closeShards(primary, replica); } diff --git a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java index 35bbc497838f2..5c6b000f7e519 100644 --- a/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java +++ b/server/src/test/java/org/elasticsearch/indices/cluster/AbstractIndicesClusterStateServiceTestCase.java @@ -357,6 +357,14 @@ public void updateShardState(ShardRouting shardRouting, assertTrue("and active shard must stay active, current: " + this.shardRouting + ", got: " + shardRouting, shardRouting.active()); } + if (this.shardRouting.primary()) { + assertTrue("a primary shard can't be demoted", shardRouting.primary()); + } else if (shardRouting.primary()) { + // note: it's ok for a replica in post recovery to be started and promoted at once + // this can happen when the primary failed after we sent the start shard message + assertTrue("a replica can only be promoted when active. current: " + this.shardRouting + " new: " + shardRouting, + shardRouting.active()); + } this.shardRouting = shardRouting; if (shardRouting.primary()) { term = newPrimaryTerm; diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java index 3b50fa649150c..4b1419375e6e5 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/PeerRecoveryTargetServiceTests.java @@ -43,7 +43,7 @@ public void testGetStartingSeqNo() throws Exception { try { // Empty store { - recoveryEmptyReplica(replica); + recoveryEmptyReplica(replica, true); final RecoveryTarget recoveryTarget = new RecoveryTarget(replica, null, null, null); assertThat(PeerRecoveryTargetService.getStartingSeqNo(logger, recoveryTarget), equalTo(0L)); recoveryTarget.decRef(); diff --git a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java index e7606328c7665..aaba17c315187 100644 --- a/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java +++ b/server/src/test/java/org/elasticsearch/indices/recovery/RecoveryTests.java @@ -261,7 +261,7 @@ public void testPeerRecoverySendSafeCommitInFileBased() throws Exception { } IndexShard replicaShard = newShard(primaryShard.shardId(), false); updateMappings(replicaShard, primaryShard.indexSettings().getIndexMetaData()); - recoverReplica(replicaShard, primaryShard); + recoverReplica(replicaShard, primaryShard, true); List commits = DirectoryReader.listCommits(replicaShard.store().directory()); long maxSeqNo = Long.parseLong(commits.get(0).getUserData().get(SequenceNumbers.MAX_SEQ_NO)); assertThat(maxSeqNo, lessThanOrEqualTo(globalCheckpoint)); diff --git a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java index 9de8821682216..5a5ee12065c9c 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/replication/ESIndexLevelReplicationTestCase.java @@ -265,7 +265,7 @@ public synchronized IndexShard addReplicaWithExistingPath(final ShardPath shardP RecoverySource.PeerRecoverySource.INSTANCE); final IndexShard newReplica = - newShard(shardRouting, shardPath, indexMetaData, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER); + newShard(shardRouting, shardPath, indexMetaData, null, getEngineFactory(shardRouting), () -> {}, EMPTY_EVENT_LISTENER); replicas.add(newReplica); updateAllocationIDsOnPrimary(); return newReplica; @@ -341,8 +341,11 @@ public void recoverReplica( IndexShard replica, BiFunction targetSupplier, boolean markAsRecovering) throws IOException { - ESIndexLevelReplicationTestCase.this.recoverReplica(replica, primary, targetSupplier, markAsRecovering, activeIds(), - routingTable(Function.identity())); + final IndexShardRoutingTable routingTable = routingTable(Function.identity()); + final Set inSyncIds = activeIds(); + ESIndexLevelReplicationTestCase.this.recoverUnstartedReplica(replica, primary, targetSupplier, markAsRecovering, inSyncIds, + routingTable); + ESIndexLevelReplicationTestCase.this.startReplicaAfterRecovery(replica, primary, inSyncIds, routingTable); } public synchronized DiscoveryNode getPrimaryNode() { diff --git a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java index e4849be20e16e..0cbc6e44502fe 100644 --- a/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/index/shard/IndexShardTestCase.java @@ -92,8 +92,10 @@ import java.util.HashSet; import java.util.Set; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; import java.util.function.BiFunction; +import java.util.function.Consumer; import static org.elasticsearch.cluster.routing.TestShardRouting.newShardRouting; import static org.hamcrest.Matchers.contains; @@ -108,6 +110,14 @@ public abstract class IndexShardTestCase extends ESTestCase { public static final IndexEventListener EMPTY_EVENT_LISTENER = new IndexEventListener() {}; + private static final AtomicBoolean failOnShardFailures = new AtomicBoolean(true); + + private static final Consumer DEFAULT_SHARD_FAILURE_HANDLER = failure -> { + if (failOnShardFailures.get()) { + throw new AssertionError(failure.reason, failure.cause); + } + }; + protected static final PeerRecoveryTargetService.RecoveryListener recoveryListener = new PeerRecoveryTargetService.RecoveryListener() { @Override public void onRecoveryDone(RecoveryState state) { @@ -128,6 +138,7 @@ public void setUp() throws Exception { super.setUp(); threadPool = new TestThreadPool(getClass().getName(), threadPoolSettings()); primaryTerm = randomIntBetween(1, 100); // use random but fixed term for creating shards + failOnShardFailures.set(true); } @Override @@ -139,6 +150,15 @@ public void tearDown() throws Exception { } } + /** + * by default, tests will fail if any shard created by this class fails. Tests that cause failures by design + * can call this method to ignore those failures + * + */ + protected void allowShardFailures() { + failOnShardFailures.set(false); + } + public Settings threadPoolSettings() { return Settings.EMPTY; } @@ -270,7 +290,7 @@ protected IndexShard newShard(ShardRouting routing, IndexMetaData indexMetaData, /** * creates a new initializing shard. - * @param routing shard routing to use + * @param routing shard routing to use * @param shardPath path to use for shard data * @param indexMetaData indexMetaData for the shard, including any mapping * @param indexSearcherWrapper an optional wrapper to be used during searchers @@ -302,6 +322,7 @@ protected IndexShard newShard(ShardRouting routing, ShardPath shardPath, IndexMe engineFactory, indexEventListener, indexSearcherWrapper, threadPool, BigArrays.NON_RECYCLING_INSTANCE, warmer, Collections.emptyList(), Arrays.asList(listeners), globalCheckpointSyncer, breakerService); + indexShard.addShardFailureCallback(DEFAULT_SHARD_FAILURE_HANDLER); success = true; } finally { if (success == false) { @@ -358,7 +379,7 @@ protected IndexShard newStartedShard(boolean primary) throws IOException { if (primary) { recoverShardFromStore(shard); } else { - recoveryEmptyReplica(shard); + recoveryEmptyReplica(shard, true); } return shard; } @@ -399,11 +420,11 @@ public static void updateRoutingEntry(IndexShard shard, ShardRouting shardRoutin inSyncIds, newRoutingTable, Collections.emptySet()); } - protected void recoveryEmptyReplica(IndexShard replica) throws IOException { + protected void recoveryEmptyReplica(IndexShard replica, boolean startReplica) throws IOException { IndexShard primary = null; try { primary = newStartedShard(true); - recoverReplica(replica, primary); + recoverReplica(replica, primary, startReplica); } finally { closeShards(primary); } @@ -415,42 +436,48 @@ protected DiscoveryNode getFakeDiscoNode(String id) { } /** recovers a replica from the given primary **/ - protected void recoverReplica(IndexShard replica, IndexShard primary) throws IOException { + protected void recoverReplica(IndexShard replica, IndexShard primary, boolean startReplica) throws IOException { recoverReplica(replica, primary, (r, sourceNode) -> new RecoveryTarget(r, sourceNode, recoveryListener, version -> { }), - true); + true, true); } /** recovers a replica from the given primary **/ protected void recoverReplica(final IndexShard replica, final IndexShard primary, final BiFunction targetSupplier, - final boolean markAsRecovering) throws IOException { + final boolean markAsRecovering, final boolean markAsStarted) throws IOException { IndexShardRoutingTable.Builder newRoutingTable = new IndexShardRoutingTable.Builder(replica.shardId()); newRoutingTable.addShard(primary.routingEntry()); if (replica.routingEntry().isRelocationTarget() == false) { newRoutingTable.addShard(replica.routingEntry()); } - recoverReplica(replica, primary, targetSupplier, markAsRecovering, - Collections.singleton(primary.routingEntry().allocationId().getId()), - newRoutingTable.build()); + final Set inSyncIds = Collections.singleton(primary.routingEntry().allocationId().getId()); + final IndexShardRoutingTable routingTable = newRoutingTable.build(); + recoverUnstartedReplica(replica, primary, targetSupplier, markAsRecovering, inSyncIds, routingTable); + if (markAsStarted) { + startReplicaAfterRecovery(replica, primary, inSyncIds, routingTable); + } } /** * Recovers a replica from the give primary, allow the user to supply a custom recovery target. A typical usage of a custom recovery * target is to assert things in the various stages of recovery. + * + * Note: this method keeps the shard in {@link IndexShardState#POST_RECOVERY} and doesn't start it. + * * @param replica the recovery target shard * @param primary the recovery source shard * @param targetSupplier supplies an instance of {@link RecoveryTarget} * @param markAsRecovering set to {@code false} if the replica is marked as recovering */ - protected final void recoverReplica(final IndexShard replica, - final IndexShard primary, - final BiFunction targetSupplier, - final boolean markAsRecovering, - final Set inSyncIds, - final IndexShardRoutingTable routingTable) throws IOException { + protected final void recoverUnstartedReplica(final IndexShard replica, + final IndexShard primary, + final BiFunction targetSupplier, + final boolean markAsRecovering, + final Set inSyncIds, + final IndexShardRoutingTable routingTable) throws IOException { final DiscoveryNode pNode = getFakeDiscoNode(primary.routingEntry().currentNodeId()); final DiscoveryNode rNode = getFakeDiscoNode(replica.routingEntry().currentNodeId()); if (markAsRecovering) { @@ -478,11 +505,15 @@ protected final void recoverReplica(final IndexShard replica, request, (int) ByteSizeUnit.MB.toBytes(1), Settings.builder().put(Node.NODE_NAME_SETTING.getKey(), pNode.getName()).build()); - final ShardRouting initializingReplicaRouting = replica.routingEntry(); primary.updateShardState(primary.routingEntry(), primary.getPrimaryTerm(), null, currentClusterStateVersion.incrementAndGet(), inSyncIds, routingTable, Collections.emptySet()); recovery.recoverToTarget(); recoveryTarget.markAsDone(); + } + + protected void startReplicaAfterRecovery(IndexShard replica, IndexShard primary, Set inSyncIds, + IndexShardRoutingTable routingTable) throws IOException { + ShardRouting initializingReplicaRouting = replica.routingEntry(); IndexShardRoutingTable newRoutingTable = initializingReplicaRouting.isRelocationTarget() ? new IndexShardRoutingTable.Builder(routingTable) @@ -502,6 +533,31 @@ protected final void recoverReplica(final IndexShard replica, currentClusterStateVersion.get(), inSyncIdsWithReplica, newRoutingTable, Collections.emptySet()); } + + /** + * promotes a replica to primary, incrementing it's term and starting it if needed + */ + protected void promoteReplica(IndexShard replica, Set inSyncIds, IndexShardRoutingTable routingTable) throws IOException { + assertThat(inSyncIds, contains(replica.routingEntry().allocationId().getId())); + final ShardRouting routingEntry = newShardRouting( + replica.routingEntry().shardId(), + replica.routingEntry().currentNodeId(), + null, + true, + ShardRoutingState.STARTED, + replica.routingEntry().allocationId()); + + final IndexShardRoutingTable newRoutingTable = new IndexShardRoutingTable.Builder(routingTable) + .removeShard(replica.routingEntry()) + .addShard(routingEntry) + .build(); + replica.updateShardState(routingEntry, replica.getPrimaryTerm() + 1, + (is, listener) -> + listener.onResponse(new PrimaryReplicaSyncer.ResyncTask(1, "type", "action", "desc", null, Collections.emptyMap())), + currentClusterStateVersion.incrementAndGet(), + inSyncIds, newRoutingTable, Collections.emptySet()); + } + private Store.MetadataSnapshot getMetadataSnapshotOrEmpty(IndexShard replica) throws IOException { Store.MetadataSnapshot result; try { From 8924ac326ea5536082835f938fb4c12fdd329629 Mon Sep 17 00:00:00 2001 From: Vladimir Dolzhenko Date: Wed, 18 Jul 2018 12:18:00 +0200 Subject: [PATCH 095/107] Add EC2 credential test for repository-s3 (#31918) Add EC2 credential test for repository-s3 Relates to #26913 --- .../gradle/test/ClusterConfiguration.groovy | 7 +- .../gradle/test/ClusterFormationTasks.groovy | 29 ++- .../elasticsearch/gradle/test/NodeInfo.groovy | 10 +- plugins/repository-s3/build.gradle | 50 +++- .../repositories/s3/AmazonS3Fixture.java | 222 ++++++++++++---- .../40_repository_ec2_credentials.yml | 243 ++++++++++++++++++ 6 files changed, 483 insertions(+), 78 deletions(-) create mode 100644 plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy index 5c363ac043aff..d6477e05b15d5 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterConfiguration.groovy @@ -137,7 +137,10 @@ class ClusterConfiguration { this.project = project } - Map systemProperties = new HashMap<>() + // **Note** for systemProperties, settings, keystoreFiles etc: + // value could be a GString that is evaluated to just a String + // there are cases when value depends on task that is not executed yet on configuration stage + Map systemProperties = new HashMap<>() Map settings = new HashMap<>() @@ -157,7 +160,7 @@ class ClusterConfiguration { List dependencies = new ArrayList<>() @Input - void systemProperty(String property, String value) { + void systemProperty(String property, Object value) { systemProperties.put(property, value) } diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy index 0349130076cfc..4ede349b206d6 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/ClusterFormationTasks.groovy @@ -609,7 +609,6 @@ class ClusterFormationTasks { /** Adds a task to start an elasticsearch node with the given configuration */ static Task configureStartTask(String name, Project project, Task setup, NodeInfo node) { - // this closure is converted into ant nodes by groovy's AntBuilder Closure antRunner = { AntBuilder ant -> ant.exec(executable: node.executable, spawn: node.config.daemonize, dir: node.cwd, taskname: 'elasticsearch') { @@ -630,13 +629,6 @@ class ClusterFormationTasks { node.writeWrapperScript() } - // we must add debug options inside the closure so the config is read at execution time, as - // gradle task options are not processed until the end of the configuration phase - if (node.config.debug) { - println 'Running elasticsearch in debug mode, suspending until connected on port 8000' - node.env['ES_JAVA_OPTS'] = '-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000' - } - node.getCommandString().eachLine { line -> logger.info(line) } if (logger.isInfoEnabled() || node.config.daemonize == false) { @@ -654,6 +646,27 @@ class ClusterFormationTasks { } start.doLast(elasticsearchRunner) start.doFirst { + // Configure ES JAVA OPTS - adds system properties, assertion flags, remote debug etc + List esJavaOpts = [node.env.get('ES_JAVA_OPTS', '')] + String collectedSystemProperties = node.config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") + esJavaOpts.add(collectedSystemProperties) + esJavaOpts.add(node.config.jvmArgs) + if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) { + // put the enable assertions options before other options to allow + // flexibility to disable assertions for specific packages or classes + // in the cluster-specific options + esJavaOpts.add("-ea") + esJavaOpts.add("-esa") + } + // we must add debug options inside the closure so the config is read at execution time, as + // gradle task options are not processed until the end of the configuration phase + if (node.config.debug) { + println 'Running elasticsearch in debug mode, suspending until connected on port 8000' + esJavaOpts.add('-agentlib:jdwp=transport=dt_socket,server=y,suspend=y,address=8000') + } + node.env['ES_JAVA_OPTS'] = esJavaOpts.join(" ") + + // project.logger.info("Starting node in ${node.clusterName} distribution: ${node.config.distribution}") } return start diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy index 5e67dfa55cfd4..7844ea77fc18f 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/test/NodeInfo.groovy @@ -180,15 +180,7 @@ class NodeInfo { } args.addAll("-E", "node.portsfile=true") - String collectedSystemProperties = config.systemProperties.collect { key, value -> "-D${key}=${value}" }.join(" ") - String esJavaOpts = config.jvmArgs.isEmpty() ? collectedSystemProperties : collectedSystemProperties + " " + config.jvmArgs - if (Boolean.parseBoolean(System.getProperty('tests.asserts', 'true'))) { - // put the enable assertions options before other options to allow - // flexibility to disable assertions for specific packages or classes - // in the cluster-specific options - esJavaOpts = String.join(" ", "-ea", "-esa", esJavaOpts) - } - env = ['ES_JAVA_OPTS': esJavaOpts] + env = [:] for (Map.Entry property : System.properties.entrySet()) { if (property.key.startsWith('tests.es.')) { args.add("-E") diff --git a/plugins/repository-s3/build.gradle b/plugins/repository-s3/build.gradle index 5af0a412b4cc0..225d523817e7d 100644 --- a/plugins/repository-s3/build.gradle +++ b/plugins/repository-s3/build.gradle @@ -89,18 +89,26 @@ String s3TemporarySessionToken = System.getenv("amazon_s3_session_token_temporar String s3TemporaryBucket = System.getenv("amazon_s3_bucket_temporary") String s3TemporaryBasePath = System.getenv("amazon_s3_base_path_temporary") +String s3EC2Bucket = System.getenv("amazon_s3_bucket_ec2") +String s3EC2BasePath = System.getenv("amazon_s3_base_path_ec2") + // If all these variables are missing then we are testing against the internal fixture instead, which has the following // credentials hard-coded in. -if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath) { +if (!s3PermanentAccessKey && !s3PermanentSecretKey && !s3PermanentBucket && !s3PermanentBasePath + && !s3EC2Bucket && !s3EC2BasePath) { s3PermanentAccessKey = 's3_integration_test_permanent_access_key' s3PermanentSecretKey = 's3_integration_test_permanent_secret_key' s3PermanentBucket = 'permanent-bucket-test' s3PermanentBasePath = 'integration_test' + s3EC2Bucket = 'ec2-bucket-test' + s3EC2BasePath = 'integration_test' + useFixture = true -} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath) { +} else if (!s3PermanentAccessKey || !s3PermanentSecretKey || !s3PermanentBucket || !s3PermanentBasePath + || !s3EC2Bucket || !s3EC2BasePath) { throw new IllegalArgumentException("not all options specified to run against external S3 service") } @@ -274,24 +282,52 @@ if (useFixture && minioDistribution) { integTestMinioRunner.dependsOn(startMinio) integTestMinioRunner.finalizedBy(stopMinio) // Minio only supports a single access key, see https://github.com/minio/minio/pull/5968 - integTestMinioRunner.systemProperty 'tests.rest.blacklist', 'repository_s3/30_repository_temporary_credentials/*' + integTestMinioRunner.systemProperty 'tests.rest.blacklist', [ + 'repository_s3/30_repository_temporary_credentials/*', + 'repository_s3/40_repository_ec2_credentials/*' + ].join(",") project.check.dependsOn(integTestMinio) } +File parentFixtures = new File(project.buildDir, "fixtures") +File s3FixtureFile = new File(parentFixtures, 's3Fixture.properties') + +task s3FixtureProperties { + outputs.file(s3FixtureFile) + def s3FixtureOptions = [ + "tests.seed" : project.testSeed, + "s3Fixture.permanent_bucket_name" : s3PermanentBucket, + "s3Fixture.permanent_key" : s3PermanentAccessKey, + "s3Fixture.temporary_bucket_name" : s3TemporaryBucket, + "s3Fixture.temporary_key" : s3TemporaryAccessKey, + "s3Fixture.temporary_session_token": s3TemporarySessionToken, + "s3Fixture.ec2_bucket_name" : s3EC2Bucket + ] + + doLast { + file(s3FixtureFile).text = s3FixtureOptions.collect { k, v -> "$k = $v" }.join("\n") + } +} + /** A task to start the AmazonS3Fixture which emulates an S3 service **/ task s3Fixture(type: AntFixture) { dependsOn testClasses + dependsOn s3FixtureProperties + inputs.file(s3FixtureFile) + env 'CLASSPATH', "${ -> project.sourceSets.test.runtimeClasspath.asPath }" executable = new File(project.runtimeJavaHome, 'bin/java') - args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3PermanentBucket, s3TemporaryBucket + args 'org.elasticsearch.repositories.s3.AmazonS3Fixture', baseDir, s3FixtureFile.getAbsolutePath() } Map expansions = [ 'permanent_bucket': s3PermanentBucket, 'permanent_base_path': s3PermanentBasePath, 'temporary_bucket': s3TemporaryBucket, - 'temporary_base_path': s3TemporaryBasePath + 'temporary_base_path': s3TemporaryBasePath, + 'ec2_bucket': s3EC2Bucket, + 'ec2_base_path': s3EC2BasePath ] processTestResources { @@ -319,6 +355,10 @@ integTestCluster { /* Use a closure on the string to delay evaluation until tests are executed */ setting 's3.client.integration_test_permanent.endpoint', "http://${-> s3Fixture.addressAndPort}" setting 's3.client.integration_test_temporary.endpoint', "http://${-> s3Fixture.addressAndPort}" + setting 's3.client.integration_test_ec2.endpoint', "http://${-> s3Fixture.addressAndPort}" + + // to redirect InstanceProfileCredentialsProvider to custom auth point + systemProperty "com.amazonaws.sdk.ec2MetadataServiceEndpointOverride", "http://${-> s3Fixture.addressAndPort}" } else { println "Using an external service to test the repository-s3 plugin" } diff --git a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java index 9b38669da2563..ce6c472314999 100644 --- a/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java +++ b/plugins/repository-s3/src/test/java/org/elasticsearch/repositories/s3/AmazonS3Fixture.java @@ -18,6 +18,14 @@ */ package org.elasticsearch.repositories.s3; +import org.apache.http.client.methods.HttpDelete; +import org.apache.http.client.methods.HttpGet; +import org.apache.http.client.methods.HttpHead; +import org.apache.http.client.methods.HttpPost; +import org.apache.http.client.methods.HttpPut; +import org.elasticsearch.common.TriFunction; +import org.elasticsearch.common.io.PathUtils; +import org.elasticsearch.test.fixture.AbstractHttpFixture; import com.amazonaws.util.DateUtils; import org.elasticsearch.common.Strings; @@ -26,20 +34,26 @@ import org.elasticsearch.common.util.concurrent.ConcurrentCollections; import org.elasticsearch.rest.RestStatus; import org.elasticsearch.rest.RestUtils; -import org.elasticsearch.test.fixture.AbstractHttpFixture; import java.io.BufferedInputStream; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.io.InputStream; import java.io.InputStreamReader; +import java.nio.file.Files; import java.util.ArrayList; import java.util.Date; import java.util.HashMap; import java.util.List; import java.util.Map; -import java.util.Objects; +import java.util.Properties; +import java.util.Random; +import java.util.concurrent.TimeUnit; +import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLength; +import static com.carrotsearch.randomizedtesting.generators.RandomStrings.randomAsciiAlphanumOfLengthBetween; import static java.nio.charset.StandardCharsets.UTF_8; +import static java.util.Objects.requireNonNull; /** * {@link AmazonS3Fixture} emulates an AWS S3 service @@ -47,88 +61,126 @@ * he implementation is based on official documentation available at https://docs.aws.amazon.com/AmazonS3/latest/API/. */ public class AmazonS3Fixture extends AbstractHttpFixture { + private static final String AUTH = "AUTH"; + private static final String NON_AUTH = "NON_AUTH"; + + private static final String EC2_PROFILE = "ec2Profile"; + + private final Properties properties; + private final Random random; /** List of the buckets stored on this test server **/ private final Map buckets = ConcurrentCollections.newConcurrentMap(); /** Request handlers for the requests made by the S3 client **/ private final PathTrie handlers; - private final String permanentBucketName; - private final String temporaryBucketName; /** * Creates a {@link AmazonS3Fixture} */ - private AmazonS3Fixture(final String workingDir, final String permanentBucketName, final String temporaryBucketName) { + private AmazonS3Fixture(final String workingDir, Properties properties) { super(workingDir); - this.permanentBucketName = permanentBucketName; - this.temporaryBucketName = temporaryBucketName; + this.properties = properties; + this.random = new Random(Long.parseUnsignedLong(requireNonNull(properties.getProperty("tests.seed")), 16)); + + new Bucket("s3Fixture.permanent", false); + new Bucket("s3Fixture.temporary", true); + final Bucket ec2Bucket = new Bucket("s3Fixture.ec2", + randomAsciiAlphanumOfLength(random, 10), randomAsciiAlphanumOfLength(random, 10)); + + this.handlers = defaultHandlers(buckets, ec2Bucket); + } + + private static String nonAuthPath(Request request) { + return nonAuthPath(request.getMethod(), request.getPath()); + } + + private static String nonAuthPath(String method, String path) { + return NON_AUTH + " " + method + " " + path; + } + + private static String authPath(Request request) { + return authPath(request.getMethod(), request.getPath()); + } - this.buckets.put(permanentBucketName, new Bucket(permanentBucketName)); - this.buckets.put(temporaryBucketName, new Bucket(temporaryBucketName)); - this.handlers = defaultHandlers(buckets); + private static String authPath(String method, String path) { + return AUTH + " " + method + " " + path; } @Override protected Response handle(final Request request) throws IOException { - final RequestHandler handler = handlers.retrieve(request.getMethod() + " " + request.getPath(), request.getParameters()); + final String nonAuthorizedPath = nonAuthPath(request); + final RequestHandler nonAuthorizedHandler = handlers.retrieve(nonAuthorizedPath, request.getParameters()); + if (nonAuthorizedHandler != null) { + return nonAuthorizedHandler.handle(request); + } + + final String authorizedPath = authPath(request); + final RequestHandler handler = handlers.retrieve(authorizedPath, request.getParameters()); if (handler != null) { - final String authorization = request.getHeader("Authorization"); - final String permittedBucket; - if (authorization.contains("s3_integration_test_permanent_access_key")) { - final String sessionToken = request.getHeader("x-amz-security-token"); + final String bucketName = request.getParam("bucket"); + if (bucketName == null) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); + } + final Bucket bucket = buckets.get(bucketName); + if (bucket == null) { + return newBucketNotFoundError(request.getId(), bucketName); + } + final Response authResponse = authenticateBucket(request, bucket); + if (authResponse != null) { + return authResponse; + } + + return handler.handle(request); + + } else { + return newInternalError(request.getId(), "No handler defined for request [" + request + "]"); + } + } + + private Response authenticateBucket(Request request, Bucket bucket) { + final String authorization = request.getHeader("Authorization"); + if (authorization == null) { + return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); + } + if (authorization.contains(bucket.key)) { + final String sessionToken = request.getHeader("x-amz-security-token"); + if (bucket.token == null) { if (sessionToken != null) { return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Unexpected session token", ""); } - permittedBucket = permanentBucketName; - } else if (authorization.contains("s3_integration_test_temporary_access_key")) { - final String sessionToken = request.getHeader("x-amz-security-token"); + } else { if (sessionToken == null) { return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "No session token", ""); } - if (sessionToken.equals("s3_integration_test_temporary_session_token") == false) { + if (sessionToken.equals(bucket.token) == false) { return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad session token", ""); } - permittedBucket = temporaryBucketName; - } else { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad access key", ""); - } - - final String bucket = request.getParam("bucket"); - if (bucket != null && permittedBucket.equals(bucket) == false) { - // allow a null bucket to support the multi-object-delete API which - // passes the bucket name in the host header instead of the URL. - if (buckets.containsKey(bucket)) { - return newError(request.getId(), RestStatus.FORBIDDEN, "AccessDenied", "Bad bucket", ""); - } else { - return newBucketNotFoundError(request.getId(), bucket); - } } - return handler.handle(request); - - } else { - return newInternalError(request.getId(), "No handler defined for request [" + request + "]"); } + return null; } public static void main(final String[] args) throws Exception { - if (args == null || args.length != 3) { - throw new IllegalArgumentException( - "AmazonS3Fixture "); + if (args == null || args.length != 2) { + throw new IllegalArgumentException("AmazonS3Fixture "); } - - final AmazonS3Fixture fixture = new AmazonS3Fixture(args[0], args[1], args[2]); + final Properties properties = new Properties(); + try (InputStream is = Files.newInputStream(PathUtils.get(args[1]))) { + properties.load(is); + } + final AmazonS3Fixture fixture = new AmazonS3Fixture(args[0], properties); fixture.listen(); } /** Builds the default request handlers **/ - private static PathTrie defaultHandlers(final Map buckets) { + private PathTrie defaultHandlers(final Map buckets, final Bucket ec2Bucket) { final PathTrie handlers = new PathTrie<>(RestUtils.REST_DECODER); // HEAD Object // // https://docs.aws.amazon.com/AmazonS3/latest/API/RESTObjectHEAD.html - objectsPaths("HEAD /{bucket}").forEach(path -> + objectsPaths(authPath(HttpHead.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String bucketName = request.getParam("bucket"); @@ -150,7 +202,7 @@ private static PathTrie defaultHandlers(final Map + objectsPaths(authPath(HttpPut.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String destBucketName = request.getParam("bucket"); @@ -200,7 +252,7 @@ private static PathTrie defaultHandlers(final Map + objectsPaths(authPath(HttpDelete.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String bucketName = request.getParam("bucket"); @@ -218,7 +270,7 @@ private static PathTrie defaultHandlers(final Map + objectsPaths(authPath(HttpGet.METHOD_NAME, "/{bucket}")).forEach(path -> handlers.insert(path, (request) -> { final String bucketName = request.getParam("bucket"); @@ -239,7 +291,7 @@ private static PathTrie defaultHandlers(final Map { + handlers.insert(authPath(HttpHead.METHOD_NAME, "/{bucket}"), (request) -> { String bucket = request.getParam("bucket"); if (Strings.hasText(bucket) && buckets.containsKey(bucket)) { return new Response(RestStatus.OK.getStatus(), TEXT_PLAIN_CONTENT_TYPE, EMPTY_BYTE); @@ -251,7 +303,7 @@ private static PathTrie defaultHandlers(final Map { + handlers.insert(authPath(HttpGet.METHOD_NAME, "/{bucket}/"), (request) -> { final String bucketName = request.getParam("bucket"); final Bucket bucket = buckets.get(bucketName); @@ -269,7 +321,7 @@ private static PathTrie defaultHandlers(final Map { + handlers.insert(nonAuthPath(HttpPost.METHOD_NAME, "/"), (request) -> { final List deletes = new ArrayList<>(); final List errors = new ArrayList<>(); @@ -292,7 +344,12 @@ private static PathTrie defaultHandlers(final Map defaultHandlers(final Map credentialResponseFunction = (profileName, key, token) -> { + final Date expiration = new Date(new Date().getTime() + TimeUnit.DAYS.toMillis(1)); + final String response = "{" + + "\"AccessKeyId\": \"" + key + "\"," + + "\"Expiration\": \"" + DateUtils.formatISO8601Date(expiration) + "\"," + + "\"RoleArn\": \"" + randomAsciiAlphanumOfLengthBetween(random, 1, 20) + "\"," + + "\"SecretAccessKey\": \"" + randomAsciiAlphanumOfLengthBetween(random, 1, 20) + "\"," + + "\"Token\": \"" + token + "\"" + + "}"; + + final Map headers = new HashMap<>(contentType("application/json")); + return new Response(RestStatus.OK.getStatus(), headers, response.getBytes(UTF_8)); + }; + + // GET + // + // http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/latest/meta-data/iam/security-credentials/"), (request) -> { + final String response = EC2_PROFILE; + + final Map headers = new HashMap<>(contentType("text/plain")); + return new Response(RestStatus.OK.getStatus(), headers, response.getBytes(UTF_8)); + }); + + // GET + // + // http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/iam-roles-for-amazon-ec2.html + handlers.insert(nonAuthPath(HttpGet.METHOD_NAME, "/latest/meta-data/iam/security-credentials/{profileName}"), (request) -> { + final String profileName = request.getParam("profileName"); + if (EC2_PROFILE.equals(profileName) == false) { + return new Response(RestStatus.NOT_FOUND.getStatus(), new HashMap<>(), "unknown credentials".getBytes(UTF_8)); + } + return credentialResponseFunction.apply(profileName, ec2Bucket.key, ec2Bucket.token); + }); + return handlers; } + private static String prop(Properties properties, String propertyName) { + return requireNonNull(properties.getProperty(propertyName), + "property '" + propertyName + "' is missing"); + } + /** * Represents a S3 bucket. */ - static class Bucket { + class Bucket { /** Bucket name **/ final String name; + final String key; + + final String token; + /** Blobs contained in the bucket **/ final Map objects; - Bucket(final String name) { - this.name = Objects.requireNonNull(name); + private Bucket(final String prefix, final boolean tokenRequired) { + this(prefix, prop(properties, prefix + "_key"), + tokenRequired ? prop(properties, prefix + "_session_token") : null); + } + + private Bucket(final String prefix, final String key, final String token) { + this.name = prop(properties, prefix + "_bucket_name"); + this.key = key; + this.token = token; + this.objects = ConcurrentCollections.newConcurrentMap(); + if (buckets.put(name, this) != null) { + throw new IllegalArgumentException("bucket " + name + " is already registered"); + } } } diff --git a/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml new file mode 100644 index 0000000000000..2df3b8290a19b --- /dev/null +++ b/plugins/repository-s3/src/test/resources/rest-api-spec/test/repository_s3/40_repository_ec2_credentials.yml @@ -0,0 +1,243 @@ +# Integration tests for repository-s3 + +--- +setup: + + # Register repository with ec2 credentials + - do: + snapshot.create_repository: + repository: repository_ec2 + body: + type: s3 + settings: + bucket: ${ec2_bucket} + client: integration_test_ec2 + base_path: ${ec2_base_path} + canned_acl: private + storage_class: standard + +--- +"Snapshot and Restore with repository-s3 using ec2 credentials": + + # Get repository + - do: + snapshot.get_repository: + repository: repository_ec2 + + - match: { repository_ec2.settings.bucket : ${ec2_bucket} } + - match: { repository_ec2.settings.client : "integration_test_ec2" } + - match: { repository_ec2.settings.base_path : ${ec2_base_path} } + - match: { repository_ec2.settings.canned_acl : "private" } + - match: { repository_ec2.settings.storage_class : "standard" } + - is_false: repository_ec2.settings.access_key + - is_false: repository_ec2.settings.secret_key + - is_false: repository_ec2.settings.session_token + + # Index documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 1 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 2 + - snapshot: one + - index: + _index: docs + _type: doc + _id: 3 + - snapshot: one + + - do: + count: + index: docs + + - match: {count: 3} + + # Create a first snapshot + - do: + snapshot.create: + repository: repository_ec2 + snapshot: snapshot-one + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-one } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.include_global_state: true } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.status: + repository: repository_ec2 + snapshot: snapshot-one + + - is_true: snapshots + - match: { snapshots.0.snapshot: snapshot-one } + - match: { snapshots.0.state : SUCCESS } + + # Index more documents + - do: + bulk: + refresh: true + body: + - index: + _index: docs + _type: doc + _id: 4 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 5 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 6 + - snapshot: two + - index: + _index: docs + _type: doc + _id: 7 + - snapshot: two + + - do: + count: + index: docs + + - match: {count: 7} + + # Create a second snapshot + - do: + snapshot.create: + repository: repository_ec2 + snapshot: snapshot-two + wait_for_completion: true + + - match: { snapshot.snapshot: snapshot-two } + - match: { snapshot.state : SUCCESS } + - match: { snapshot.shards.failed : 0 } + + - do: + snapshot.get: + repository: repository_ec2 + snapshot: snapshot-one,snapshot-two + + - is_true: snapshots + - match: { snapshots.0.state : SUCCESS } + - match: { snapshots.1.state : SUCCESS } + + # Delete the index + - do: + indices.delete: + index: docs + + # Restore the second snapshot + - do: + snapshot.restore: + repository: repository_ec2 + snapshot: snapshot-two + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 7} + + # Delete the index again + - do: + indices.delete: + index: docs + + # Restore the first snapshot + - do: + snapshot.restore: + repository: repository_ec2 + snapshot: snapshot-one + wait_for_completion: true + + - do: + count: + index: docs + + - match: {count: 3} + + # Remove the snapshots + - do: + snapshot.delete: + repository: repository_ec2 + snapshot: snapshot-two + + - do: + snapshot.delete: + repository: repository_ec2 + snapshot: snapshot-one + +--- +"Register a repository with a non existing bucket": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ec2 + body: + type: s3 + settings: + bucket: zHHkfSqlbnBsbpSgvCYtxrEfFLqghXtyPvvvKPNBnRCicNHQLE + client: integration_test_temporary + +--- +"Register a repository with a non existing client": + + - do: + catch: /repository_exception/ + snapshot.create_repository: + repository: repository_ec2 + body: + type: s3 + settings: + bucket: repository_ec2 + client: unknown + +--- +"Get a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.get: + repository: repository_ec2 + snapshot: missing + +--- +"Delete a non existing snapshot": + + - do: + catch: /snapshot_missing_exception/ + snapshot.delete: + repository: repository_ec2 + snapshot: missing + +--- +"Restore a non existing snapshot": + + - do: + catch: /snapshot_restore_exception/ + snapshot.restore: + repository: repository_ec2 + snapshot: missing + wait_for_completion: true + +--- +teardown: + + # Remove our repository + - do: + snapshot.delete_repository: + repository: repository_ec2 From b79dc6adb8850e054f72a566265c0dd126c714ba Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 18 Jul 2018 12:42:07 +0200 Subject: [PATCH 096/107] Add more contexts to painless execute api (#30511) This change adds two contexts the execute scripts against: * SEARCH_SCRIPT: Allows to run scripts in a search script context. This context is used in `function_score` query's script function, script fields, script sorting and `terms_set` query. * FILTER_SCRIPT: Allows to run scripts in a filter script context. This context is used in the `script` query. In both contexts a index name needs to be specified and a sample document. The document is needed to create an in-memory index that the script can access via the `doc[...]` and other notations. The index name is needed because a mapping is needed to index the document. Examples: ``` POST /_scripts/painless/_execute { "script": { "source": "doc['field'].value.length()" }, "context" : { "search_script": { "document": { "field": "four" }, "index": "my-index" } } } ``` Returns: ``` { "result": 4 } ``` POST /_scripts/painless/_execute { "script": { "source": "doc['field'].value.length() <= params.max_length", "params": { "max_length": 4 } }, "context" : { "filter_script": { "document": { "field": "four" }, "index": "my-index" } } } Returns: ``` { "result": true } ``` Also changed PainlessExecuteAction.TransportAction to use TransportSingleShardAction instead of HandledAction, because now in case score or filter contexts are used the request needs to be redirected to a node that has an active IndexService for the index being referenced (a node with a shard copy for that index). --- .../painless/painless-execute-script.asciidoc | 133 +++++- .../painless/PainlessExecuteAction.java | 439 +++++++++++++++--- .../painless/PainlessExecuteApiTests.java | 113 +++++ .../painless/PainlessExecuteRequestTests.java | 44 +- .../painless/70_execute_painless_scripts.yml | 52 ++- 5 files changed, 694 insertions(+), 87 deletions(-) create mode 100644 modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java diff --git a/docs/painless/painless-execute-script.asciidoc b/docs/painless/painless-execute-script.asciidoc index a3ac5b578d781..2aca959778699 100644 --- a/docs/painless/painless-execute-script.asciidoc +++ b/docs/painless/painless-execute-script.asciidoc @@ -9,23 +9,24 @@ The Painless execute API allows an arbitrary script to be executed and a result .Parameters [options="header"] |====== -| Name | Required | Default | Description -| `script` | yes | - | The script to execute -| `context` | no | `painless_test` | The context the script should be executed in. +| Name | Required | Default | Description +| `script` | yes | - | The script to execute +| `context` | no | `painless_test` | The context the script should be executed in. +| `context_setup` | no | - | Additional parameters to the context. |====== ==== Contexts Contexts control how scripts are executed, what variables are available at runtime and what the return type is. -===== Painless test script context +===== Painless test context The `painless_test` context executes scripts as is and do not add any special parameters. The only variable that is available is `params`, which can be used to access user defined values. The result of the script is always converted to a string. If no context is specified then this context is used by default. -==== Example +====== Example Request: @@ -52,4 +53,124 @@ Response: "result": "0.1" } -------------------------------------------------- -// TESTRESPONSE \ No newline at end of file +// TESTRESPONSE + +===== Filter context + +The `filter` context executes scripts as if they were executed inside a `script` query. +For testing purposes a document must be provided that will be indexed temporarily in-memory and +is accessible to the script being tested. Because of this the _source, stored fields and doc values +are available in the script being tested. + +The following parameters may be specified in `context_setup` for a filter context: + +document:: Contains the document that will be temporarily indexed in-memory and is accessible from the script. +index:: The name of an index containing a mapping that is compatable with the document being indexed. + +====== Example + +[source,js] +---------------------------------------------------------------- +PUT /my-index +{ + "mappings": { + "_doc": { + "properties": { + "field": { + "type": "keyword" + } + } + } + } +} + +POST /_scripts/painless/_execute +{ + "script": { + "source": "doc['field'].value.length() <= params.max_length", + "params": { + "max_length": 4 + } + }, + "context": "filter", + "context_setup": { + "index": "my-index", + "document": { + "field": "four" + } + } +} +---------------------------------------------------------------- +// CONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "result": true +} +-------------------------------------------------- +// TESTRESPONSE + + +===== Score context + +The `score` context executes scripts as if they were executed inside a `script_score` function in +`function_score` query. + +The following parameters may be specified in `context_setup` for a score context: + +document:: Contains the document that will be temporarily indexed in-memory and is accessible from the script. +index:: The name of an index containing a mapping that is compatable with the document being indexed. +query:: If `_score` is used in the script then a query can specified that will be used to compute a score. + +====== Example + +[source,js] +---------------------------------------------------------------- +PUT /my-index +{ + "mappings": { + "_doc": { + "properties": { + "field": { + "type": "keyword" + }, + "rank": { + "type": "long" + } + } + } + } +} + + +POST /_scripts/painless/_execute +{ + "script": { + "source": "doc['rank'].value / params.max_rank", + "params": { + "max_rank": 5.0 + } + }, + "context": "score", + "context_setup": { + "index": "my-index", + "document": { + "rank": 4 + } + } +} +---------------------------------------------------------------- +// CONSOLE + +Response: + +[source,js] +-------------------------------------------------- +{ + "result": 0.8 +} +-------------------------------------------------- +// TESTRESPONSE diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 01139f6cf2e70..229c919a2e65d 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -18,41 +18,75 @@ */ package org.elasticsearch.painless; +import org.apache.lucene.analysis.Analyzer; +import org.apache.lucene.index.DirectoryReader; +import org.apache.lucene.index.IndexReader; +import org.apache.lucene.index.IndexWriter; +import org.apache.lucene.index.IndexWriterConfig; +import org.apache.lucene.index.LeafReaderContext; +import org.apache.lucene.search.IndexSearcher; +import org.apache.lucene.search.Query; +import org.apache.lucene.search.Scorer; +import org.apache.lucene.search.Weight; +import org.apache.lucene.store.RAMDirectory; +import org.elasticsearch.Version; import org.elasticsearch.action.Action; -import org.elasticsearch.action.ActionListener; -import org.elasticsearch.action.ActionRequest; -import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.action.support.ActionFilters; -import org.elasticsearch.action.support.HandledTransportAction; -import org.elasticsearch.client.ElasticsearchClient; +import org.elasticsearch.action.support.IndicesOptions; +import org.elasticsearch.action.support.single.shard.SingleShardRequest; +import org.elasticsearch.action.support.single.shard.TransportSingleShardAction; import org.elasticsearch.client.node.NodeClient; +import org.elasticsearch.cluster.ClusterState; +import org.elasticsearch.cluster.block.ClusterBlockException; +import org.elasticsearch.cluster.metadata.IndexNameExpressionResolver; +import org.elasticsearch.cluster.routing.ShardsIterator; +import org.elasticsearch.cluster.service.ClusterService; +import org.elasticsearch.common.CheckedBiFunction; import org.elasticsearch.common.ParseField; +import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.inject.Inject; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; +import org.elasticsearch.common.io.stream.Writeable; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.ConstructingObjectParser; +import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; +import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.Index; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.mapper.ParsedDocument; +import org.elasticsearch.index.mapper.SourceToParse; +import org.elasticsearch.index.query.AbstractQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.index.query.QueryShardContext; +import org.elasticsearch.index.shard.ShardId; +import org.elasticsearch.indices.IndicesService; import org.elasticsearch.rest.BaseRestHandler; import org.elasticsearch.rest.BytesRestResponse; import org.elasticsearch.rest.RestController; import org.elasticsearch.rest.RestRequest; import org.elasticsearch.rest.RestResponse; import org.elasticsearch.rest.action.RestBuilderListener; +import org.elasticsearch.script.FilterScript; +import org.elasticsearch.script.ScoreScript; import org.elasticsearch.script.Script; import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptService; import org.elasticsearch.script.ScriptType; -import org.elasticsearch.tasks.Task; +import org.elasticsearch.threadpool.ThreadPool; import org.elasticsearch.transport.TransportService; import java.io.IOException; -import java.util.Locale; +import java.util.Collections; +import java.util.HashMap; import java.util.Map; import java.util.Objects; @@ -75,40 +109,181 @@ public Response newResponse() { return new Response(); } - public static class Request extends ActionRequest implements ToXContent { + public static class Request extends SingleShardRequest implements ToXContent { private static final ParseField SCRIPT_FIELD = new ParseField("script"); private static final ParseField CONTEXT_FIELD = new ParseField("context"); + private static final ParseField CONTEXT_SETUP_FIELD = new ParseField("context_setup"); private static final ConstructingObjectParser PARSER = new ConstructingObjectParser<>( - "painless_execute_request", args -> new Request((Script) args[0], (SupportedContext) args[1])); + "painless_execute_request", args -> new Request((Script) args[0], (String) args[1], (ContextSetup) args[2])); static { PARSER.declareObject(ConstructingObjectParser.constructorArg(), (p, c) -> Script.parse(p), SCRIPT_FIELD); - PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { - // For now only accept an empty json object: - XContentParser.Token token = p.nextToken(); - assert token == XContentParser.Token.FIELD_NAME; - String contextType = p.currentName(); - token = p.nextToken(); - assert token == XContentParser.Token.START_OBJECT; - token = p.nextToken(); - assert token == XContentParser.Token.END_OBJECT; - token = p.nextToken(); - assert token == XContentParser.Token.END_OBJECT; - return SupportedContext.valueOf(contextType.toUpperCase(Locale.ROOT)); - }, CONTEXT_FIELD); + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), CONTEXT_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), ContextSetup::parse, CONTEXT_SETUP_FIELD); + } + + static final Map> SUPPORTED_CONTEXTS; + + static { + Map> supportedContexts = new HashMap<>(); + supportedContexts.put("painless_test", PainlessTestScript.CONTEXT); + supportedContexts.put("filter", FilterScript.CONTEXT); + supportedContexts.put("score", ScoreScript.CONTEXT); + SUPPORTED_CONTEXTS = Collections.unmodifiableMap(supportedContexts); + } + + static ScriptContext fromScriptContextName(String name) { + ScriptContext scriptContext = SUPPORTED_CONTEXTS.get(name); + if (scriptContext == null) { + throw new UnsupportedOperationException("unsupported script context name [" + name + "]"); + } + return scriptContext; + } + + static class ContextSetup implements Writeable, ToXContentObject { + + private static final ParseField INDEX_FIELD = new ParseField("index"); + private static final ParseField DOCUMENT_FIELD = new ParseField("document"); + private static final ParseField QUERY_FIELD = new ParseField("query"); + private static final ConstructingObjectParser PARSER = + new ConstructingObjectParser<>("execute_script_context", + args -> new ContextSetup((String) args[0], (BytesReference) args[1], (QueryBuilder) args[2])); + + static { + PARSER.declareString(ConstructingObjectParser.optionalConstructorArg(), INDEX_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> { + try (XContentBuilder b = XContentBuilder.builder(p.contentType().xContent())) { + b.copyCurrentStructure(p); + return BytesReference.bytes(b); + } + }, DOCUMENT_FIELD); + PARSER.declareObject(ConstructingObjectParser.optionalConstructorArg(), (p, c) -> + AbstractQueryBuilder.parseInnerQueryBuilder(p), QUERY_FIELD); + } + + private final String index; + private final BytesReference document; + private final QueryBuilder query; + + private XContentType xContentType; + + static ContextSetup parse(XContentParser parser, Void context) throws IOException { + ContextSetup contextSetup = PARSER.parse(parser, null); + contextSetup.setXContentType(parser.contentType()); + return contextSetup; + } + + ContextSetup(String index, BytesReference document, QueryBuilder query) { + this.index = index; + this.document = document; + this.query = query; + } + + ContextSetup(StreamInput in) throws IOException { + index = in.readOptionalString(); + document = in.readOptionalBytesReference(); + String xContentType = in.readOptionalString(); + if (xContentType != null) { + this.xContentType = XContentType.fromMediaType(xContentType); + } + query = in.readOptionalNamedWriteable(QueryBuilder.class); + } + + public String getIndex() { + return index; + } + + public BytesReference getDocument() { + return document; + } + + public QueryBuilder getQuery() { + return query; + } + + public XContentType getXContentType() { + return xContentType; + } + + public void setXContentType(XContentType xContentType) { + this.xContentType = xContentType; + } + + @Override + public boolean equals(Object o) { + if (this == o) return true; + if (o == null || getClass() != o.getClass()) return false; + ContextSetup that = (ContextSetup) o; + return Objects.equals(index, that.index) && + Objects.equals(document, that.document) && + Objects.equals(query, that.query); + } + + @Override + public int hashCode() { + return Objects.hash(index, document, query); + } + + @Override + public void writeTo(StreamOutput out) throws IOException { + out.writeOptionalString(index); + out.writeOptionalBytesReference(document); + out.writeOptionalString(xContentType != null ? xContentType.mediaType(): null); + out.writeOptionalNamedWriteable(query); + } + + @Override + public String toString() { + return "ContextSetup{" + + ", index='" + index + '\'' + + ", document=" + document + + ", query=" + query + + ", xContentType=" + xContentType + + '}'; + } + + @Override + public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { + builder.startObject(); + { + if (index != null) { + builder.field(INDEX_FIELD.getPreferredName(), index); + } + if (document != null) { + builder.field(DOCUMENT_FIELD.getPreferredName()); + try (XContentParser parser = XContentHelper.createParser(NamedXContentRegistry.EMPTY, + LoggingDeprecationHandler.INSTANCE, document, xContentType)) { + builder.generator().copyCurrentStructure(parser); + } + } + if (query != null) { + builder.field(QUERY_FIELD.getPreferredName(), query); + } + } + builder.endObject(); + return builder; + } + } private Script script; - private SupportedContext context; + private ScriptContext context = PainlessTestScript.CONTEXT; + private ContextSetup contextSetup; static Request parse(XContentParser parser) throws IOException { return PARSER.parse(parser, null); } - Request(Script script, SupportedContext context) { + Request(Script script, String scriptContextName, ContextSetup setup) { this.script = Objects.requireNonNull(script); - this.context = context != null ? context : SupportedContext.PAINLESS_TEST; + if (scriptContextName != null) { + this.context = fromScriptContextName(scriptContextName); + } + if (setup != null) { + this.contextSetup = setup; + index(contextSetup.index); + } } Request() { @@ -118,16 +293,28 @@ public Script getScript() { return script; } - public SupportedContext getContext() { + public ScriptContext getContext() { return context; } + public ContextSetup getContextSetup() { + return contextSetup; + } + @Override public ActionRequestValidationException validate() { ActionRequestValidationException validationException = null; if (script.getType() != ScriptType.INLINE) { validationException = addValidationError("only inline scripts are supported", validationException); } + if (needDocumentAndIndex(context)) { + if (contextSetup.index == null) { + validationException = addValidationError("index is a required parameter for current context", validationException); + } + if (contextSetup.document == null) { + validationException = addValidationError("document is a required parameter for current context", validationException); + } + } return validationException; } @@ -135,26 +322,35 @@ public ActionRequestValidationException validate() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); script = new Script(in); - context = SupportedContext.fromId(in.readByte()); + if (in.getVersion().onOrBefore(Version.V_6_4_0)) { + byte scriptContextId = in.readByte(); + assert scriptContextId == 0; + } else { + context = fromScriptContextName(in.readString()); + contextSetup = in.readOptionalWriteable(ContextSetup::new); + } } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); script.writeTo(out); - out.writeByte(context.id); + if (out.getVersion().onOrBefore(Version.V_6_4_0)) { + out.writeByte((byte) 0); + } else { + out.writeString(context.name); + out.writeOptionalWriteable(contextSetup); + } } // For testing only: @Override public XContentBuilder toXContent(XContentBuilder builder, Params params) throws IOException { builder.field(SCRIPT_FIELD.getPreferredName(), script); - builder.startObject(CONTEXT_FIELD.getPreferredName()); - { - builder.startObject(context.name()); - builder.endObject(); + builder.field(CONTEXT_FIELD.getPreferredName(), context.name); + if (contextSetup != null) { + builder.field(CONTEXT_SETUP_FIELD.getPreferredName(), contextSetup); } - builder.endObject(); return builder; } @@ -164,41 +360,28 @@ public boolean equals(Object o) { if (o == null || getClass() != o.getClass()) return false; Request request = (Request) o; return Objects.equals(script, request.script) && - context == request.context; + Objects.equals(context, request.context) && + Objects.equals(contextSetup, request.contextSetup); } @Override public int hashCode() { - return Objects.hash(script, context); + return Objects.hash(script, context, contextSetup); } - public enum SupportedContext { - - PAINLESS_TEST((byte) 0); - - private final byte id; - - SupportedContext(byte id) { - this.id = id; - } - - public static SupportedContext fromId(byte id) { - switch (id) { - case 0: - return PAINLESS_TEST; - default: - throw new IllegalArgumentException("unknown context [" + id + "]"); - } - } + @Override + public String toString() { + return "Request{" + + "script=" + script + + "context=" + context + + ", contextSetup=" + contextSetup + + '}'; } - } - - public static class RequestBuilder extends ActionRequestBuilder { - - RequestBuilder(ElasticsearchClient client) { - super(client, INSTANCE, new Request()); + static boolean needDocumentAndIndex(ScriptContext scriptContext) { + return scriptContext == FilterScript.CONTEXT || scriptContext == ScoreScript.CONTEXT; } + } public static class Response extends ActionResponse implements ToXContentObject { @@ -274,31 +457,139 @@ public interface Factory { } - public static class TransportAction extends HandledTransportAction { - + public static class TransportAction extends TransportSingleShardAction { private final ScriptService scriptService; + private final IndicesService indicesServices; @Inject - public TransportAction(Settings settings, TransportService transportService, - ActionFilters actionFilters, ScriptService scriptService) { - super(settings, NAME, transportService, actionFilters, Request::new); + public TransportAction(Settings settings, ThreadPool threadPool, TransportService transportService, + ActionFilters actionFilters, IndexNameExpressionResolver indexNameExpressionResolver, + ScriptService scriptService, ClusterService clusterService, IndicesService indicesServices) { + super(settings, NAME, threadPool, clusterService, transportService, actionFilters, indexNameExpressionResolver, + // Forking a thread here, because only light weight operations should happen on network thread and + // Creating a in-memory index is not light weight + // TODO: is MANAGEMENT TP the right TP? Right now this is an admin api (see action name). + Request::new, ThreadPool.Names.MANAGEMENT); this.scriptService = scriptService; + this.indicesServices = indicesServices; } + @Override - protected void doExecute(Task task, Request request, ActionListener listener) { - switch (request.context) { - case PAINLESS_TEST: - PainlessTestScript.Factory factory = scriptService.compile(request.script, PainlessTestScript.CONTEXT); - PainlessTestScript painlessTestScript = factory.newInstance(request.script.getParams()); - String result = Objects.toString(painlessTestScript.execute()); - listener.onResponse(new Response(result)); - break; - default: - throw new UnsupportedOperationException("unsupported context [" + request.context + "]"); + protected Response newResponse() { + return new Response(); + } + + @Override + protected ClusterBlockException checkRequestBlock(ClusterState state, InternalRequest request) { + if (request.concreteIndex() != null) { + return super.checkRequestBlock(state, request); } + return null; + } + + @Override + protected boolean resolveIndex(Request request) { + return request.contextSetup != null && request.contextSetup.getIndex() != null; } + @Override + protected ShardsIterator shards(ClusterState state, InternalRequest request) { + if (request.concreteIndex() == null) { + return null; + } + return state.routingTable().index(request.concreteIndex()).randomAllActiveShardsIt(); + } + + @Override + protected Response shardOperation(Request request, ShardId shardId) throws IOException { + IndexService indexService; + if (request.contextSetup != null && request.contextSetup.getIndex() != null) { + ClusterState clusterState = clusterService.state(); + IndicesOptions indicesOptions = IndicesOptions.strictSingleIndexNoExpandForbidClosed(); + String indexExpression = request.contextSetup.index; + Index[] concreteIndices = + indexNameExpressionResolver.concreteIndices(clusterState, indicesOptions, indexExpression); + if (concreteIndices.length != 1) { + throw new IllegalArgumentException("[" + indexExpression + "] does not resolve to a single index"); + } + Index concreteIndex = concreteIndices[0]; + indexService = indicesServices.indexServiceSafe(concreteIndex); + } else { + indexService = null; + } + return innerShardOperation(request, scriptService, indexService); + } + + static Response innerShardOperation(Request request, ScriptService scriptService, IndexService indexService) throws IOException { + final ScriptContext scriptContext = request.context; + if (scriptContext == PainlessTestScript.CONTEXT) { + PainlessTestScript.Factory factory = scriptService.compile(request.script, PainlessTestScript.CONTEXT); + PainlessTestScript painlessTestScript = factory.newInstance(request.script.getParams()); + String result = Objects.toString(painlessTestScript.execute()); + return new Response(result); + } else if (scriptContext == FilterScript.CONTEXT) { + return prepareRamIndex(request, (context, leafReaderContext) -> { + FilterScript.Factory factory = scriptService.compile(request.script, FilterScript.CONTEXT); + FilterScript.LeafFactory leafFactory = + factory.newFactory(request.getScript().getParams(), context.lookup()); + FilterScript filterScript = leafFactory.newInstance(leafReaderContext); + filterScript.setDocument(0); + boolean result = filterScript.execute(); + return new Response(result); + }, indexService); + } else if (scriptContext == ScoreScript.CONTEXT) { + return prepareRamIndex(request, (context, leafReaderContext) -> { + ScoreScript.Factory factory = scriptService.compile(request.script, ScoreScript.CONTEXT); + ScoreScript.LeafFactory leafFactory = + factory.newFactory(request.getScript().getParams(), context.lookup()); + ScoreScript scoreScript = leafFactory.newInstance(leafReaderContext); + scoreScript.setDocument(0); + + if (request.contextSetup.query != null) { + Query luceneQuery = request.contextSetup.query.rewrite(context).toQuery(context); + IndexSearcher indexSearcher = new IndexSearcher(leafReaderContext.reader()); + luceneQuery = indexSearcher.rewrite(luceneQuery); + Weight weight = indexSearcher.createWeight(luceneQuery, true, 1f); + Scorer scorer = weight.scorer(indexSearcher.getIndexReader().leaves().get(0)); + // Consume the first (and only) match. + int docID = scorer.iterator().nextDoc(); + assert docID == scorer.docID(); + scoreScript.setScorer(scorer); + } + + double result = scoreScript.execute(); + return new Response(result); + }, indexService); + } else { + throw new UnsupportedOperationException("unsupported context [" + scriptContext.name + "]"); + } + } + + private static Response prepareRamIndex(Request request, + CheckedBiFunction handler, + IndexService indexService) throws IOException { + + Analyzer defaultAnalyzer = indexService.getIndexAnalyzers().getDefaultIndexAnalyzer(); + + try (RAMDirectory ramDirectory = new RAMDirectory()) { + try (IndexWriter indexWriter = new IndexWriter(ramDirectory, new IndexWriterConfig(defaultAnalyzer))) { + String index = indexService.index().getName(); + String type = indexService.mapperService().documentMapper().type(); + BytesReference document = request.contextSetup.document; + XContentType xContentType = request.contextSetup.xContentType; + SourceToParse sourceToParse = SourceToParse.source(index, type, "_id", document, xContentType); + ParsedDocument parsedDocument = indexService.mapperService().documentMapper().parse(sourceToParse); + indexWriter.addDocuments(parsedDocument.docs()); + try (IndexReader indexReader = DirectoryReader.open(indexWriter)) { + final long absoluteStartMillis = System.currentTimeMillis(); + QueryShardContext context = + indexService.newQueryShardContext(0, indexReader, () -> absoluteStartMillis, null); + return handler.apply(context, indexReader.leaves().get(0)); + } + } + } + } } static class RestAction extends BaseRestHandler { diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java new file mode 100644 index 0000000000000..ce92a224f4e90 --- /dev/null +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteApiTests.java @@ -0,0 +1,113 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.elasticsearch.painless; + +import org.elasticsearch.common.bytes.BytesArray; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.XContentType; +import org.elasticsearch.index.IndexService; +import org.elasticsearch.index.query.MatchQueryBuilder; +import org.elasticsearch.painless.PainlessExecuteAction.Request; +import org.elasticsearch.painless.PainlessExecuteAction.Response; +import org.elasticsearch.plugins.Plugin; +import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptException; +import org.elasticsearch.script.ScriptService; +import org.elasticsearch.script.ScriptType; +import org.elasticsearch.test.ESSingleNodeTestCase; + +import java.io.IOException; +import java.util.Collection; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import static java.util.Collections.singletonMap; +import static org.elasticsearch.painless.PainlessExecuteAction.TransportAction.innerShardOperation; +import static org.hamcrest.Matchers.equalTo; + +public class PainlessExecuteApiTests extends ESSingleNodeTestCase { + + @Override + protected Collection> getPlugins() { + return Collections.singleton(PainlessPlugin.class); + } + + public void testDefaults() throws IOException { + ScriptService scriptService = getInstanceFromNode(ScriptService.class); + Request request = new Request(new Script("100.0 / 1000.0"), null, null); + Response response = innerShardOperation(request, scriptService, null); + assertThat(response.getResult(), equalTo("0.1")); + + Map params = new HashMap<>(); + params.put("count", 100.0D); + params.put("total", 1000.0D); + request = new Request(new Script(ScriptType.INLINE, "painless", "params.count / params.total", params), null, null); + response = innerShardOperation(request, scriptService, null); + assertThat(response.getResult(), equalTo("0.1")); + + Exception e = expectThrows(ScriptException.class, + () -> { + Request r = new Request(new Script(ScriptType.INLINE, + "painless", "params.count / params.total + doc['constant']", params), null, null); + innerShardOperation(r, scriptService, null); + }); + assertThat(e.getCause().getMessage(), equalTo("Variable [doc] is not defined.")); + } + + public void testFilterExecutionContext() throws IOException { + ScriptService scriptService = getInstanceFromNode(ScriptService.class); + IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "field", "type=long"); + + Request.ContextSetup contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 3}"), null); + contextSetup.setXContentType(XContentType.JSON); + Request request = new Request(new Script("doc['field'].value >= 3"), "filter", contextSetup); + Response response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(true)); + + contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 3}"), null); + contextSetup.setXContentType(XContentType.JSON); + request = new Request(new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", + singletonMap("max", 3)), "filter", contextSetup); + response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(true)); + + contextSetup = new Request.ContextSetup("index", new BytesArray("{\"field\": 2}"), null); + contextSetup.setXContentType(XContentType.JSON); + request = new Request(new Script(ScriptType.INLINE, "painless", "doc['field'].value >= params.max", + singletonMap("max", 3)), "filter", contextSetup); + response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(false)); + } + + public void testScoreExecutionContext() throws IOException { + ScriptService scriptService = getInstanceFromNode(ScriptService.class); + IndexService indexService = createIndex("index", Settings.EMPTY, "doc", "rank", "type=long", "text", "type=text"); + + Request.ContextSetup contextSetup = new Request.ContextSetup("index", + new BytesArray("{\"rank\": 4.0, \"text\": \"quick brown fox\"}"), new MatchQueryBuilder("text", "fox")); + contextSetup.setXContentType(XContentType.JSON); + Request request = new Request(new Script(ScriptType.INLINE, "painless", + "Math.round((_score + (doc['rank'].value / params.max_rank)) * 100.0) / 100.0", singletonMap("max_rank", 5.0)), "score", + contextSetup); + Response response = innerShardOperation(request, scriptService, indexService); + assertThat(response.getResult(), equalTo(1.09D)); + } + +} diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java index 488ae0e1643bc..44cd6b5304dc4 100644 --- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java +++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/PainlessExecuteRequestTests.java @@ -18,9 +18,18 @@ */ package org.elasticsearch.painless; +import org.elasticsearch.common.bytes.BytesReference; +import org.elasticsearch.common.io.stream.NamedWriteableRegistry; +import org.elasticsearch.common.settings.Settings; +import org.elasticsearch.common.xcontent.NamedXContentRegistry; import org.elasticsearch.common.xcontent.XContentParser; +import org.elasticsearch.index.query.MatchAllQueryBuilder; +import org.elasticsearch.index.query.QueryBuilder; +import org.elasticsearch.painless.PainlessExecuteAction.Request.ContextSetup; import org.elasticsearch.script.Script; +import org.elasticsearch.script.ScriptContext; import org.elasticsearch.script.ScriptType; +import org.elasticsearch.search.SearchModule; import org.elasticsearch.test.AbstractStreamableXContentTestCase; import java.io.IOException; @@ -28,12 +37,22 @@ public class PainlessExecuteRequestTests extends AbstractStreamableXContentTestCase { + @Override + protected NamedWriteableRegistry getNamedWriteableRegistry() { + return new NamedWriteableRegistry(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedWriteables()); + } + + @Override + protected NamedXContentRegistry xContentRegistry() { + return new NamedXContentRegistry(new SearchModule(Settings.EMPTY, false, Collections.emptyList()).getNamedXContents()); + } + @Override protected PainlessExecuteAction.Request createTestInstance() { Script script = new Script(randomAlphaOfLength(10)); - PainlessExecuteAction.Request.SupportedContext context = randomBoolean() ? - PainlessExecuteAction.Request.SupportedContext.PAINLESS_TEST : null; - return new PainlessExecuteAction.Request(script, context); + ScriptContext context = randomBoolean() ? randomFrom(PainlessExecuteAction.Request.SUPPORTED_CONTEXTS.values()) : null; + ContextSetup contextSetup = randomBoolean() ? randomContextSetup() : null; + return new PainlessExecuteAction.Request(script, context != null ? context.name : null, contextSetup); } @Override @@ -53,9 +72,26 @@ protected boolean supportsUnknownFields() { public void testValidate() { Script script = new Script(ScriptType.STORED, null, randomAlphaOfLength(10), Collections.emptyMap()); - PainlessExecuteAction.Request request = new PainlessExecuteAction.Request(script, null); + PainlessExecuteAction.Request request = new PainlessExecuteAction.Request(script, null, null); Exception e = request.validate(); assertNotNull(e); assertEquals("Validation Failed: 1: only inline scripts are supported;", e.getMessage()); } + + private static ContextSetup randomContextSetup() { + String index = randomBoolean() ? randomAlphaOfLength(4) : null; + QueryBuilder query = randomBoolean() ? new MatchAllQueryBuilder() : null; + // TODO: pass down XContextType to createTestInstance() method. + // otherwise the document itself is different causing test failures. + // This should be done in a seperate change as the test instance is created before xcontent type is randomly picked and + // all the createTestInstance() methods need to be changed, which will make this a big chnage +// BytesReference doc = randomBoolean() ? new BytesArray("{}") : null; + BytesReference doc = null; + + ContextSetup contextSetup = new ContextSetup(index, doc, query); +// if (doc != null) { +// contextSetup.setXContentType(XContentType.JSON); +// } + return contextSetup; + } } diff --git a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml index 7b915cc38dbc0..1e34a776189b8 100644 --- a/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml +++ b/modules/lang-painless/src/test/resources/rest-api-spec/test/painless/70_execute_painless_scripts.yml @@ -1,3 +1,18 @@ +setup: + - do: + indices.create: + index: my-index + body: + mappings: + doc: + properties: + rank: + type: long + field: + type: keyword + text: + type: text + --- "Execute with defaults": - do: @@ -11,7 +26,7 @@ - match: { result: "0.1" } --- -"Execute with execute_api_script context": +"Execute with painless_test context": - do: scripts_painless_execute: body: @@ -20,6 +35,37 @@ params: var1: 10 var2: 100 - context: - painless_test: {} + context: "painless_test" - match: { result: "-90" } + +--- +"Execute with filter context": + - do: + scripts_painless_execute: + body: + script: + source: "doc['field'].value.length() <= params.max_length" + params: + max_length: 4 + context: "filter" + context_setup: + document: + field: "four" + index: "my-index" + - match: { result: true } + +--- +"Execute with score context": + - do: + scripts_painless_execute: + body: + script: + source: "doc['rank'].value / params.max_rank" + params: + max_rank: 5.0 + context: "score" + context_setup: + document: + rank: 4 + index: "my-index" + - match: { result: 0.8 } From d2250482f4be556a444ad89249f3c08d6950939d Mon Sep 17 00:00:00 2001 From: Martijn van Groningen Date: Wed, 18 Jul 2018 13:33:57 +0200 Subject: [PATCH 097/107] use before instead of onOrBefore --- .../org/elasticsearch/painless/PainlessExecuteAction.java | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java index 229c919a2e65d..094a62d188baf 100644 --- a/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java +++ b/modules/lang-painless/src/main/java/org/elasticsearch/painless/PainlessExecuteAction.java @@ -322,7 +322,7 @@ public ActionRequestValidationException validate() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); script = new Script(in); - if (in.getVersion().onOrBefore(Version.V_6_4_0)) { + if (in.getVersion().before(Version.V_6_4_0)) { byte scriptContextId = in.readByte(); assert scriptContextId == 0; } else { @@ -335,7 +335,7 @@ public void readFrom(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); script.writeTo(out); - if (out.getVersion().onOrBefore(Version.V_6_4_0)) { + if (out.getVersion().before(Version.V_6_4_0)) { out.writeByte((byte) 0); } else { out.writeString(context.name); From 997ebe8861493804c78edc4b9c35e45c78b19370 Mon Sep 17 00:00:00 2001 From: David Turner Date: Wed, 18 Jul 2018 12:58:17 +0100 Subject: [PATCH 098/107] Improve docs for search preferences (#32159) Today it is unclear what guarantees are offered by the search preference feature, and we claim a guarantee that is stronger than what we really offer: > A custom value will be used to guarantee that the same shards will be used > for the same custom value. This commit clarifies this documentation. Forward-port of #32098 to `master`. --- .../search/request/preference.asciidoc | 67 +++++++++++++------ 1 file changed, 45 insertions(+), 22 deletions(-) diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc index 4fd801c5f76e3..5f3fcb2efa6b7 100644 --- a/docs/reference/search/request/preference.asciidoc +++ b/docs/reference/search/request/preference.asciidoc @@ -1,38 +1,55 @@ [[search-request-preference]] === Preference -Controls a `preference` of which shard copies on which to execute the -search. By default, the operation is randomized among the available shard -copies, unless allocation awareness is used. +Controls a `preference` of the shard copies on which to execute the search. By +default, Elasticsearch selects from the available shard copies in an +unspecified order, taking the <> and +<> configuration into +account. However, it may sometimes be desirable to try and route certain +searches to certain sets of shard copies, for instance to make better use of +per-copy caches. The `preference` is a query string parameter which can be set to: [horizontal] -`_local`:: - The operation will prefer to be executed on a local - allocated shard if possible. +`_only_local`:: + The operation will be executed only on shards allocated to the local + node. + +`_local`:: + The operation will be executed on shards allocated to the local node if + possible, and will fall back to other shards if not. `_prefer_nodes:abc,xyz`:: - Prefers execution on the nodes with the provided - node ids (`abc` or `xyz` in this case) if applicable. + The operation will be executed on nodes with one of the provided node + ids (`abc` or `xyz` in this case) if possible. If suitable shard copies + exist on more than one of the selected nodes then the order of + preference between these copies is unspecified. -`_shards:2,3`:: - Restricts the operation to the specified shards. (`2` - and `3` in this case). This preference can be combined with other - preferences but it has to appear first: `_shards:2,3|_local` +`_shards:2,3`:: + Restricts the operation to the specified shards. (`2` and `3` in this + case). This preference can be combined with other preferences but it + has to appear first: `_shards:2,3|_local` -`_only_nodes`:: - Restricts the operation to nodes specified in <> +`_only_nodes:abc*,x*yz,...`:: + Restricts the operation to nodes specified according to the + <>. If suitable shard copies exist on more + than one of the selected nodes then the order of preference between + these copies is unspecified. -Custom (string) value:: - A custom value will be used to guarantee that - the same shards will be used for the same custom value. This can help - with "jumping values" when hitting different shards in different refresh - states. A sample value can be something like the web session id, or the - user name. +Custom (string) value:: + Any value that does not start with `_`. If two searches both give the same + custom string value for their preference and the underlying cluster state + does not change then the same ordering of shards will be used for the + searches. This does not guarantee that the exact same shards will be used + each time: the cluster state, and therefore the selected shards, may change + for a number of reasons including shard relocations and shard failures, and + nodes may sometimes reject searches causing fallbacks to alternative nodes. + However, in practice the ordering of shards tends to remain stable for long + periods of time. A good candidate for a custom preference value is something + like the web session id or the user name. -For instance, use the user's session ID to ensure consistent ordering of results -for the user: +For instance, use the user's session ID `xyzabc123` as follows: [source,js] ------------------------------------------------ @@ -47,3 +64,9 @@ GET /_search?preference=xyzabc123 ------------------------------------------------ // CONSOLE +NOTE: The `_only_local` preference guarantees only to use shard copies on the +local node, which is sometimes useful for troubleshooting. All other options do +not _fully_ guarantee that any particular shard copies are used in a search, +and on a changing index this may mean that repeated searches may yield +different results if they are executed on different shard copies which are in +different refresh states. From f5910950601062526c493c6ccab2f2dd060a0c36 Mon Sep 17 00:00:00 2001 From: Armin Braun Date: Wed, 18 Jul 2018 15:32:36 +0200 Subject: [PATCH 099/107] Fix BwC Tests looking for UUID Pre 6.4 (#32158) * UUID field was added for #31791 and only went into 6.4 and 7.0 * Fixes #32119 --- qa/mixed-cluster/build.gradle | 1 - .../resources/rest-api-spec/test/indices.stats/10_index.yml | 4 ++++ 2 files changed, 4 insertions(+), 1 deletion(-) diff --git a/qa/mixed-cluster/build.gradle b/qa/mixed-cluster/build.gradle index db743cb12b19d..ac57d51def7c6 100644 --- a/qa/mixed-cluster/build.gradle +++ b/qa/mixed-cluster/build.gradle @@ -57,7 +57,6 @@ for (Version version : bwcVersions.wireCompatible) { tasks.getByName("${baseName}#mixedClusterTestRunner").configure { /* To support taking index snapshots, we have to set path.repo setting */ systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo") - systemProperty 'tests.rest.blacklist', ['indices.stats/10_index/Index - all'].join(',') } } diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml index 42847b05cd149..564a482727fa7 100644 --- a/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml +++ b/rest-api-spec/src/main/resources/rest-api-spec/test/indices.stats/10_index.yml @@ -41,6 +41,10 @@ setup: --- "Index - all": + - skip: + version: " - 6.3.99" + reason: "uuid is only available from 6.4.0 on" + - do: indices.stats: { index: _all } From 67a4dcb26d344045b1d46eec2a9377040a624a67 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 18 Jul 2018 14:43:20 +0100 Subject: [PATCH 100/107] Call setReferences() on custom referring tokenfilters in _analyze (#32157) When building custom tokenfilters without an index in the _analyze endpoint, we need to ensure that referring filters are correctly built by calling their #setReferences() method Fixes #32154 --- .../test/analysis-common/40_token_filters.yml | 15 ++++++++++++ .../analyze/TransportAnalyzeAction.java | 24 ++++++++++++++++++- 2 files changed, 38 insertions(+), 1 deletion(-) diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml index 3dca3bfd7770c..150fa39dcb956 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml @@ -1557,3 +1557,18 @@ filter: [my_bengali_stem] - length: { tokens: 1 } - match: { tokens.0.token: কর } + +--- +"multiplexer": + - do: + indices.analyze: + body: + text: "The quick fox" + tokenizer: "standard" + filter: + - type: multiplexer + filters: [ lowercase, uppercase ] + preserve_original: false + - length: { tokens: 6 } + - match: { tokens.0.token: the } + - match: { tokens.1.token: THE } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 30b212175ca25..2662be6bc5036 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -596,7 +596,9 @@ private static List parseTokenFilterFactories(AnalyzeRequest tokenFilterFactory = tokenFilterFactoryFactory.get(getNaIndexSettings(settings), environment, "_anonymous_tokenfilter", settings); tokenFilterFactory = CustomAnalyzerProvider.checkAndApplySynonymFilter(tokenFilterFactory, tokenizerFactory.v1(), tokenizerFactory.v2(), tokenFilterFactoryList, charFilterFactoryList, environment); - + if (tokenFilterFactory instanceof ReferringFilterFactory) { + referringFilters.add((ReferringFilterFactory)tokenFilterFactory); + } } else { AnalysisModule.AnalysisProvider tokenFilterFactoryFactory; @@ -650,6 +652,26 @@ private static List parseTokenFilterFactories(AnalyzeRequest rff.setReferences(prebuiltFilters); } + } + if (referringFilters.isEmpty() == false) { + // The request included at least one custom referring tokenfilter that has not already been built by the + // analysis registry, so we need to set its references. Note that this will only apply pre-built + // tokenfilters + if (indexSettings == null) { + Settings settings = Settings.builder() + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) + .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) + .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) + .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) + .build(); + IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); + indexSettings = new IndexSettings(metaData, Settings.EMPTY); + } + Map prebuiltFilters = analysisRegistry.buildTokenFilterFactories(indexSettings); + for (ReferringFilterFactory rff : referringFilters) { + rff.setReferences(prebuiltFilters); + } + } return tokenFilterFactoryList; } From 93ecf1d11f04cb0263b1468f993afe199b2c3408 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 18 Jul 2018 14:57:52 +0100 Subject: [PATCH 101/107] Merge conflicts --- .../analyze/TransportAnalyzeAction.java | 20 ------------------- 1 file changed, 20 deletions(-) diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java index 2662be6bc5036..5c5da62571f66 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/TransportAnalyzeAction.java @@ -631,27 +631,7 @@ private static List parseTokenFilterFactories(AnalyzeRequest tokenFilterFactory = (TokenFilterFactory) ((MultiTermAwareComponent) tokenFilterFactory).getMultiTermComponent(); } tokenFilterFactoryList.add(tokenFilterFactory); - if (tokenFilterFactory instanceof ReferringFilterFactory) { - referringFilters.add((ReferringFilterFactory)tokenFilterFactory); - } - } - } - if (referringFilters.isEmpty() == false) { - if (indexSettings == null) { - Settings settings = Settings.builder() - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - .build(); - IndexMetaData metaData = IndexMetaData.builder(IndexMetaData.INDEX_UUID_NA_VALUE).settings(settings).build(); - indexSettings = new IndexSettings(metaData, Settings.EMPTY); } - Map prebuiltFilters = analysisRegistry.buildTokenFilterFactories(indexSettings); - for (ReferringFilterFactory rff : referringFilters) { - rff.setReferences(prebuiltFilters); - } - } if (referringFilters.isEmpty() == false) { // The request included at least one custom referring tokenfilter that has not already been built by the From 945fadf20d589fbe5144a51c392e6ce22a7f2fa7 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 18 Jul 2018 15:35:57 +0100 Subject: [PATCH 102/107] more docs --- .../painless/painless-contexts/index.asciidoc | 2 + ...inless-analysis-predicate-context.asciidoc | 43 +++++++++++++++++++ .../condition-tokenfilter.asciidoc | 6 +-- 3 files changed, 48 insertions(+), 3 deletions(-) create mode 100644 docs/painless/painless-contexts/painless-analysis-predicate-context.asciidoc diff --git a/docs/painless/painless-contexts/index.asciidoc b/docs/painless/painless-contexts/index.asciidoc index 64e4326e052f2..ffa477df9c141 100644 --- a/docs/painless/painless-contexts/index.asciidoc +++ b/docs/painless/painless-contexts/index.asciidoc @@ -30,6 +30,8 @@ include::painless-bucket-agg-context.asciidoc[] include::painless-ingest-processor-context.asciidoc[] +include::painless-analysis-predicate-context.asciidoc[] + include::painless-watcher-condition-context.asciidoc[] include::painless-watcher-transform-context.asciidoc[] diff --git a/docs/painless/painless-contexts/painless-analysis-predicate-context.asciidoc b/docs/painless/painless-contexts/painless-analysis-predicate-context.asciidoc new file mode 100644 index 0000000000000..07914b671e781 --- /dev/null +++ b/docs/painless/painless-contexts/painless-analysis-predicate-context.asciidoc @@ -0,0 +1,43 @@ +[[painless-analysis-predicate-context]] +=== Analysis Predicate Context + +Use a painless script to determine whether or not the current token in an +analysis chain matches a predicate. + +*Variables* + +`params` (`Map`, read-only):: + User-defined parameters passed in as part of the query. + +`token.term` (`CharSequence`, read-only):: + The characters of the current token + +`token.position` (`int`, read-only):: + The position of the current token + +`token.positionIncrement` (`int`, read-only):: + The position increment of the current token + +`token.positionLength` (`int`, read-only):: + The position length of the current token + +`token.startOffset` (`int`, read-only):: + The start offset of the current token + +`token.endOffset` (`int`, read-only):: + The end offset of the current token + +`token.type` (`String`, read-only):: + The type of the current token + +`token.keyword` ('boolean`, read-only):: + Whether or not the current token is marked as a keyword + +*Return* + +`boolean`:: + Whether or not the current token matches the predicate + +*API* + +The standard <> is available. \ No newline at end of file diff --git a/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc index ab997adf2bd6a..cff05559ab9e6 100644 --- a/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc +++ b/docs/reference/analysis/tokenfilters/condition-tokenfilter.asciidoc @@ -7,7 +7,7 @@ only applies the subfilters to the current token if it matches the predicate. [float] === Options [horizontal] -filters:: a list of token filters to apply to the current token if the predicate +filter:: a chain of token filters to apply to the current token if the predicate matches. These can be any token filters defined elsewhere in the index mappings. script:: a predicate script that determines whether or not the filters will be applied @@ -35,7 +35,7 @@ PUT /condition_example "type" : "condition", "filter" : [ "lowercase" ], "script" : { - "source" : "term.term().length() < 5" <1> + "source" : "token.getTerm().length() < 5" <1> } } } @@ -79,7 +79,7 @@ And it'd respond: "start_offset": 5, "end_offset": 15, "type": "", - "position": 0 + "position": 1 } ] } From 303de4f6e191da630618ad07e76e91efbdaa94b8 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Wed, 18 Jul 2018 15:43:41 +0100 Subject: [PATCH 103/107] tests for all script variables --- .../analysis-common/60_analysis_scripting.yml | 20 ++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml index d3a1a36e633ee..fe33b09d13ea9 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml @@ -10,9 +10,27 @@ - type: condition filter: [ "lowercase" ] script: - source: "token.getTerm().length() > 5" + source: "token.term.length() > 5" - length: { tokens: 3 } - match: { tokens.0.token: "vorsprung" } - match: { tokens.1.token: "Durch" } - match: { tokens.2.token: "technik" } + +--- +"condition-vars": + - do: + indices.analyze: + body: + text: "Vorsprung Durch Technik" + tokenizer: "whitespace" + filter: + - type: condition + filter: [ "lowercase" ] + script: + source: "token.position > 1 and token.positionIncrement > 0 and token.startOffset > 0 and token.endOffset > 0 and (token.positionLength == 1 or token.type == \"a\" or token.keyword)" + + - length: { tokens: 3 } + - match: { tokens.0.token: "Vorsprung" } + - match: { tokens.1.token: "durch" } + - match: { tokens.2.token: "technik" } From 1dff0f642a498e726f59b072310c0afa02f0667b Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 21 Aug 2018 09:12:03 +0100 Subject: [PATCH 104/107] merge error --- docs/painless/painless-contexts/index.asciidoc | 2 -- 1 file changed, 2 deletions(-) diff --git a/docs/painless/painless-contexts/index.asciidoc b/docs/painless/painless-contexts/index.asciidoc index 780e385204e25..a71fde0be32a0 100644 --- a/docs/painless/painless-contexts/index.asciidoc +++ b/docs/painless/painless-contexts/index.asciidoc @@ -30,8 +30,6 @@ include::painless-metric-agg-reduce-context.asciidoc[] include::painless-bucket-agg-context.asciidoc[] -include::painless-ingest-processor-context.asciidoc[] - include::painless-analysis-predicate-context.asciidoc[] include::painless-watcher-condition-context.asciidoc[] From 546aa11585541332364c27dfb74590939afa0f1e Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 21 Aug 2018 11:37:36 +0100 Subject: [PATCH 105/107] checkstyle --- .../elasticsearch/analysis/common/CommonAnalysisPlugin.java | 5 ++++- .../analysis/common/ScriptedConditionTokenFilterFactory.java | 2 -- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java index f43b2c03d82f1..bbd721169c6c7 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java @@ -158,7 +158,10 @@ public class CommonAnalysisPlugin extends Plugin implements AnalysisPlugin, Scri private final SetOnce scriptService = new SetOnce<>(); @Override - public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, ResourceWatcherService resourceWatcherService, ScriptService scriptService, NamedXContentRegistry xContentRegistry, Environment environment, NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { + public Collection createComponents(Client client, ClusterService clusterService, ThreadPool threadPool, + ResourceWatcherService resourceWatcherService, ScriptService scriptService, + NamedXContentRegistry xContentRegistry, Environment environment, + NodeEnvironment nodeEnvironment, NamedWriteableRegistry namedWriteableRegistry) { this.scriptService.set(scriptService); return Collections.emptyList(); } diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java index ad4cc2dc51599..2ccf322e7d200 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -1,6 +1,5 @@ package org.elasticsearch.analysis.common; -import org.apache.logging.log4j.Logger; import org.apache.lucene.analysis.TokenStream; import org.apache.lucene.analysis.miscellaneous.ConditionalTokenFilter; import org.apache.lucene.analysis.tokenattributes.CharTermAttribute; @@ -9,7 +8,6 @@ import org.apache.lucene.analysis.tokenattributes.PositionIncrementAttribute; import org.apache.lucene.analysis.tokenattributes.PositionLengthAttribute; import org.apache.lucene.analysis.tokenattributes.TypeAttribute; -import org.elasticsearch.common.logging.ESLoggerFactory; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.IndexSettings; import org.elasticsearch.index.analysis.AbstractTokenFilterFactory; From 701fbf2dfb13b98d2fd8277dbfced2efee70cc53 Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 21 Aug 2018 13:06:08 +0100 Subject: [PATCH 106/107] headers --- .../common/AnalysisPainlessExtension.java | 19 +++++++++++++++++++ .../ScriptedConditionTokenFilterFactory.java | 19 +++++++++++++++++++ .../ScriptedConditionTokenFilterTests.java | 19 +++++++++++++++++++ 3 files changed, 57 insertions(+) diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java index ab2a9a4263391..85abec4ce915c 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/AnalysisPainlessExtension.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.analysis.common; import org.elasticsearch.painless.spi.PainlessExtension; diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java index 2ccf322e7d200..cf7fd5b047a89 100644 --- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java +++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterFactory.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.analysis.common; import org.apache.lucene.analysis.TokenStream; diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java index e61b7627a8eb0..39134ef1f532b 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/ScriptedConditionTokenFilterTests.java @@ -1,3 +1,22 @@ +/* + * Licensed to Elasticsearch under one or more contributor + * license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright + * ownership. Elasticsearch licenses this file to you under + * the Apache License, Version 2.0 (the "License"); you may + * not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + package org.elasticsearch.analysis.common; import org.elasticsearch.Version; From 396843f5e5d792049500eb2fb938a98be221b5de Mon Sep 17 00:00:00 2001 From: Alan Woodward Date: Tue, 21 Aug 2018 14:41:26 +0100 Subject: [PATCH 107/107] Use actual painless syntax, not my own made-up syntax --- .../test/analysis-common/60_analysis_scripting.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml index fe33b09d13ea9..4305e5db0af37 100644 --- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml +++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/60_analysis_scripting.yml @@ -28,7 +28,7 @@ - type: condition filter: [ "lowercase" ] script: - source: "token.position > 1 and token.positionIncrement > 0 and token.startOffset > 0 and token.endOffset > 0 and (token.positionLength == 1 or token.type == \"a\" or token.keyword)" + source: "token.position > 1 && token.positionIncrement > 0 && token.startOffset > 0 && token.endOffset > 0 && (token.positionLength == 1 || token.type == \"a\" || token.keyword)" - length: { tokens: 3 } - match: { tokens.0.token: "Vorsprung" }