INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING =
- Setting.timeSetting("index.unassigned.node_left.delayed_timeout", TimeValue.timeValueMinutes(1), Property.Dynamic,
- Property.IndexScope);
+ new Setting<>("index.unassigned.node_left.delayed_timeout", (s) -> TimeValue.timeValueMinutes(1).getStringRep(), (s) -> {
+ TimeValue parsedValue = TimeValue.parseTimeValue(s, "index.unassigned.node_left.delayed_timeout");
+ if (parsedValue.getNanos() < 0) {
+ DEPRECATION_LOGGER.deprecated(
+ "Negative values for index.unassigned.node_left.delayed_timeout [{}]" +
+ " are deprecated and should now be set to \"0\".", s);
+ }
+ return parsedValue;
+ }, Property.Dynamic, Property.IndexScope);
+
/**
* Reason why the shard is in unassigned state.
*
diff --git a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java
index 4160fd224aa14..f00e9cdc3ce8f 100644
--- a/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java
+++ b/core/src/main/java/org/elasticsearch/cluster/routing/allocation/decider/AwarenessAllocationDecider.java
@@ -20,6 +20,7 @@
package org.elasticsearch.cluster.routing.allocation.decider;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import com.carrotsearch.hppc.ObjectIntHashMap;
@@ -85,7 +86,7 @@ public class AwarenessAllocationDecider extends AllocationDecider {
private volatile String[] awarenessAttributes;
- private volatile Map forcedAwarenessAttributes;
+ private volatile Map> forcedAwarenessAttributes;
public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSettings) {
super(settings);
@@ -97,11 +98,11 @@ public AwarenessAllocationDecider(Settings settings, ClusterSettings clusterSett
}
private void setForcedAwarenessAttributes(Settings forceSettings) {
- Map forcedAwarenessAttributes = new HashMap<>();
+ Map> forcedAwarenessAttributes = new HashMap<>();
Map forceGroups = forceSettings.getAsGroups();
for (Map.Entry entry : forceGroups.entrySet()) {
- String[] aValues = entry.getValue().getAsArray("values");
- if (aValues.length > 0) {
+ List aValues = entry.getValue().getAsList("values");
+ if (aValues.size() > 0) {
forcedAwarenessAttributes.put(entry.getKey(), aValues);
}
}
@@ -169,7 +170,7 @@ private Decision underCapacity(ShardRouting shardRouting, RoutingNode node, Rout
}
int numberOfAttributes = nodesPerAttribute.size();
- String[] fullValues = forcedAwarenessAttributes.get(awarenessAttribute);
+ List fullValues = forcedAwarenessAttributes.get(awarenessAttribute);
if (fullValues != null) {
for (String fullValue : fullValues) {
if (!shardPerAttribute.containsKey(fullValue)) {
diff --git a/core/src/main/java/org/elasticsearch/common/settings/Setting.java b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
index ee6e422e82676..f35df27e3b338 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/Setting.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/Setting.java
@@ -804,14 +804,14 @@ private static class ListSetting extends Setting> {
private ListSetting(String key, Function> defaultStringValue, Function> parser,
Property... properties) {
- super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s).toArray(Strings.EMPTY_ARRAY)), parser,
+ super(new ListKey(key), (s) -> Setting.arrayToParsableString(defaultStringValue.apply(s)), parser,
properties);
this.defaultStringValue = defaultStringValue;
}
@Override
public String getRaw(Settings settings) {
- String[] array = settings.getAsArray(getKey(), null);
+ List array = settings.getAsList(getKey(), null);
return array == null ? defaultValue.apply(settings) : arrayToParsableString(array);
}
@@ -823,11 +823,11 @@ boolean hasComplexMatcher() {
@Override
public void diff(Settings.Builder builder, Settings source, Settings defaultSettings) {
if (exists(source) == false) {
- String[] asArray = defaultSettings.getAsArray(getKey(), null);
- if (asArray == null) {
- builder.putArray(getKey(), defaultStringValue.apply(defaultSettings));
+ List asList = defaultSettings.getAsList(getKey(), null);
+ if (asList == null) {
+ builder.putList(getKey(), defaultStringValue.apply(defaultSettings));
} else {
- builder.putArray(getKey(), asArray);
+ builder.putList(getKey(), asList);
}
}
}
@@ -1087,7 +1087,7 @@ private static List parseableStringToList(String parsableString) {
}
}
- private static String arrayToParsableString(String[] array) {
+ private static String arrayToParsableString(List array) {
try {
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
builder.startArray();
diff --git a/core/src/main/java/org/elasticsearch/common/settings/Settings.java b/core/src/main/java/org/elasticsearch/common/settings/Settings.java
index 0b03cdea01f4c..1ee76588d6564 100644
--- a/core/src/main/java/org/elasticsearch/common/settings/Settings.java
+++ b/core/src/main/java/org/elasticsearch/common/settings/Settings.java
@@ -396,48 +396,48 @@ public SizeValue getAsSize(String setting, SizeValue defaultValue) throws Settin
}
/**
- * The values associated with a setting key as an array.
+ * The values associated with a setting key as an immutable list.
*
* It will also automatically load a comma separated list under the settingPrefix and merge with
* the numbered format.
*
- * @param key The setting prefix to load the array by
- * @return The setting array values
+ * @param key The setting key to load the list by
+ * @return The setting list values
*/
- public String[] getAsArray(String key) throws SettingsException {
- return getAsArray(key, Strings.EMPTY_ARRAY, true);
+ public List getAsList(String key) throws SettingsException {
+ return getAsList(key, Collections.emptyList());
}
/**
- * The values associated with a setting key as an array.
+ * The values associated with a setting key as an immutable list.
*
* If commaDelimited is true, it will automatically load a comma separated list under the settingPrefix and merge with
* the numbered format.
*
- * @param key The setting key to load the array by
- * @return The setting array values
+ * @param key The setting key to load the list by
+ * @return The setting list values
*/
- public String[] getAsArray(String key, String[] defaultArray) throws SettingsException {
- return getAsArray(key, defaultArray, true);
+ public List getAsList(String key, List defaultValue) throws SettingsException {
+ return getAsList(key, defaultValue, true);
}
/**
- * The values associated with a setting key as an array.
+ * The values associated with a setting key as an immutable list.
*
* It will also automatically load a comma separated list under the settingPrefix and merge with
* the numbered format.
*
- * @param key The setting key to load the array by
- * @param defaultArray The default array to use if no value is specified
+ * @param key The setting key to load the list by
+ * @param defaultValue The default value to use if no value is specified
* @param commaDelimited Whether to try to parse a string as a comma-delimited value
- * @return The setting array values
+ * @return The setting list values
*/
- public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelimited) throws SettingsException {
+ public List getAsList(String key, List defaultValue, Boolean commaDelimited) throws SettingsException {
List result = new ArrayList<>();
final Object valueFromPrefix = settings.get(key);
if (valueFromPrefix != null) {
if (valueFromPrefix instanceof List) {
- result = ((List) valueFromPrefix);
+ return ((List) valueFromPrefix); // it's already unmodifiable since the builder puts it as a such
} else if (commaDelimited) {
String[] strings = Strings.splitStringByCommaToArray(get(key));
if (strings.length > 0) {
@@ -451,9 +451,9 @@ public String[] getAsArray(String key, String[] defaultArray, Boolean commaDelim
}
if (result.isEmpty()) {
- return defaultArray;
+ return defaultValue;
}
- return result.toArray(new String[result.size()]);
+ return Collections.unmodifiableList(result);
}
@@ -582,7 +582,7 @@ public static Settings readSettingsFromStream(StreamInput in) throws IOException
if (value == null) {
builder.putNull(key);
} else if (value instanceof List) {
- builder.putArray(key, (List) value);
+ builder.putList(key, (List) value);
} else {
builder.put(key, value.toString());
}
@@ -709,7 +709,7 @@ private static void fromXContent(XContentParser parser, StringBuilder keyBuilder
}
String key = keyBuilder.toString();
validateValue(key, list, builder, parser, allowNullValues);
- builder.putArray(key, list);
+ builder.putList(key, list);
} else if (parser.currentToken() == XContentParser.Token.VALUE_NULL) {
String key = keyBuilder.toString();
validateValue(key, null, builder, parser, allowNullValues);
@@ -928,7 +928,7 @@ public Builder copy(String key, String sourceKey, Settings source) {
}
final Object value = source.settings.get(sourceKey);
if (value instanceof List) {
- return putArray(key, (List)value);
+ return putList(key, (List)value);
} else if (value == null) {
return putNull(key);
} else {
@@ -1052,8 +1052,8 @@ public Builder put(String setting, long value, ByteSizeUnit sizeUnit) {
* @param values The values
* @return The builder
*/
- public Builder putArray(String setting, String... values) {
- return putArray(setting, Arrays.asList(values));
+ public Builder putList(String setting, String... values) {
+ return putList(setting, Arrays.asList(values));
}
/**
@@ -1063,7 +1063,7 @@ public Builder putArray(String setting, String... values) {
* @param values The values
* @return The builder
*/
- public Builder putArray(String setting, List values) {
+ public Builder putList(String setting, List values) {
remove(setting);
map.put(setting, Collections.unmodifiableList(new ArrayList<>(values)));
return this;
diff --git a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
index 1ce119636f734..95c08e8889857 100644
--- a/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
+++ b/core/src/main/java/org/elasticsearch/common/util/concurrent/ThreadContext.java
@@ -407,11 +407,10 @@ private ThreadContextStruct putHeaders(Map headers) {
if (headers.isEmpty()) {
return this;
} else {
- final Map newHeaders = new HashMap<>();
+ final Map newHeaders = new HashMap<>(this.requestHeaders);
for (Map.Entry entry : headers.entrySet()) {
putSingleHeader(entry.getKey(), entry.getValue(), newHeaders);
}
- newHeaders.putAll(this.requestHeaders);
return new ThreadContextStruct(newHeaders, responseHeaders, transientHeaders, isSystemContext);
}
}
diff --git a/core/src/main/java/org/elasticsearch/env/Environment.java b/core/src/main/java/org/elasticsearch/env/Environment.java
index 27fe23048d79c..31a67333a810f 100644
--- a/core/src/main/java/org/elasticsearch/env/Environment.java
+++ b/core/src/main/java/org/elasticsearch/env/Environment.java
@@ -153,7 +153,7 @@ public Environment(final Settings settings, final Path configPath) {
Settings.Builder finalSettings = Settings.builder().put(settings);
finalSettings.put(PATH_HOME_SETTING.getKey(), homeFile);
if (PATH_DATA_SETTING.exists(settings)) {
- finalSettings.putArray(PATH_DATA_SETTING.getKey(), dataPaths);
+ finalSettings.putList(PATH_DATA_SETTING.getKey(), dataPaths);
}
finalSettings.put(PATH_LOGS_SETTING.getKey(), logsFile.toString());
this.settings = finalSettings.build();
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
index a0d0df921de20..dad3768189f24 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/Analysis.java
@@ -71,7 +71,6 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
-import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.List;
@@ -110,10 +109,10 @@ public static CharArraySet parseStemExclusion(Settings settings, CharArraySet de
if ("_none_".equals(value)) {
return CharArraySet.EMPTY_SET;
}
- String[] stemExclusion = settings.getAsArray("stem_exclusion", null);
+ List stemExclusion = settings.getAsList("stem_exclusion", null);
if (stemExclusion != null) {
// LUCENE 4 UPGRADE: Should be settings.getAsBoolean("stem_exclusion_case", false)?
- return new CharArraySet(Arrays.asList(stemExclusion), false);
+ return new CharArraySet(stemExclusion, false);
} else {
return defaultStemExclusion;
}
@@ -166,7 +165,7 @@ public static CharArraySet parseWords(Environment env, Settings settings, String
if ("_none_".equals(value)) {
return CharArraySet.EMPTY_SET;
} else {
- return resolveNamedWords(Arrays.asList(settings.getAsArray(name)), namedWords, ignoreCase);
+ return resolveNamedWords(settings.getAsList(name), namedWords, ignoreCase);
}
}
List pathLoadedWords = getWordList(env, settings, name);
@@ -233,11 +232,11 @@ public static List getWordList(Environment env, Settings settings, Strin
String wordListPath = settings.get(settingPrefix + "_path", null);
if (wordListPath == null) {
- String[] explicitWordList = settings.getAsArray(settingPrefix, null);
+ List explicitWordList = settings.getAsList(settingPrefix, null);
if (explicitWordList == null) {
return null;
} else {
- return Arrays.asList(explicitWordList);
+ return explicitWordList;
}
}
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java
index e8134244f04d8..194dc0eb4034e 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java
@@ -571,7 +571,7 @@ private void processAnalyzerFactory(DeprecationLogger deprecationLogger,
// the setting is now removed but we only support it for loading indices created before v5.0
deprecationLogger.deprecated("setting [{}] is only allowed on index [{}] because it was created before 5.x; " +
"analyzer aliases can no longer be created on new indices.", analyzerAliasKey, indexSettings.getIndex().getName());
- Set aliases = Sets.newHashSet(indexSettings.getSettings().getAsArray(analyzerAliasKey));
+ Set aliases = Sets.newHashSet(indexSettings.getSettings().getAsList(analyzerAliasKey));
for (String alias : aliases) {
if (analyzerAliases.putIfAbsent(alias, analyzer) != null) {
throw new IllegalStateException("alias [" + alias + "] is already used by [" + analyzerAliases.get(alias).name() + "]");
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
index e9654719bdc11..4ba078051640a 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomAnalyzerProvider.java
@@ -58,8 +58,8 @@ public void build(final Map tokenizers, final Map charFiltersList = new ArrayList<>(charFilterNames.length);
+ List charFilterNames = analyzerSettings.getAsList("char_filter");
+ List charFiltersList = new ArrayList<>(charFilterNames.size());
for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) {
@@ -74,8 +74,8 @@ public void build(final Map tokenizers, final Map tokenFilterList = new ArrayList<>(tokenFilterNames.length);
+ List tokenFilterNames = analyzerSettings.getAsList("filter");
+ List tokenFilterList = new ArrayList<>(tokenFilterNames.size());
for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) {
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java b/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java
index a375c1e8e3b9d..a0a7859d50cfd 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/CustomNormalizerProvider.java
@@ -50,8 +50,8 @@ public void build(final TokenizerFactory keywordTokenizerFactory, final Map charFiltersList = new ArrayList<>(charFilterNames.length);
+ List charFilterNames = analyzerSettings.getAsList("char_filter");
+ List charFiltersList = new ArrayList<>(charFilterNames.size());
for (String charFilterName : charFilterNames) {
CharFilterFactory charFilter = charFilters.get(charFilterName);
if (charFilter == null) {
@@ -66,8 +66,8 @@ public void build(final TokenizerFactory keywordTokenizerFactory, final Map tokenFilterList = new ArrayList<>(tokenFilterNames.length);
+ List tokenFilterNames = analyzerSettings.getAsList("filter");
+ List tokenFilterList = new ArrayList<>(tokenFilterNames.size());
for (String tokenFilterName : tokenFilterNames) {
TokenFilterFactory tokenFilter = tokenFilters.get(tokenFilterName);
if (tokenFilter == null) {
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java
index cb696219f4ed6..8210883b2f8f5 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/EdgeNGramTokenizerFactory.java
@@ -41,7 +41,7 @@ public EdgeNGramTokenizerFactory(IndexSettings indexSettings, Environment enviro
super(indexSettings, name, settings);
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
- this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
+ this.matcher = parseTokenChars(settings.getAsList("token_chars"));
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java
index 17acddf55e1a3..2a31f1eb26a3e 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/NGramTokenizerFactory.java
@@ -28,6 +28,7 @@
import java.lang.reflect.Field;
import java.lang.reflect.Modifier;
import java.util.HashMap;
+import java.util.List;
import java.util.Locale;
import java.util.Map;
@@ -65,8 +66,8 @@ public class NGramTokenizerFactory extends AbstractTokenizerFactory {
MATCHERS = unmodifiableMap(matchers);
}
- static CharMatcher parseTokenChars(String[] characterClasses) {
- if (characterClasses == null || characterClasses.length == 0) {
+ static CharMatcher parseTokenChars(List characterClasses) {
+ if (characterClasses == null || characterClasses.isEmpty()) {
return null;
}
CharMatcher.Builder builder = new CharMatcher.Builder();
@@ -85,7 +86,7 @@ public NGramTokenizerFactory(IndexSettings indexSettings, Environment environmen
super(indexSettings, name, settings);
this.minGram = settings.getAsInt("min_gram", NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE);
this.maxGram = settings.getAsInt("max_gram", NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
- this.matcher = parseTokenChars(settings.getAsArray("token_chars"));
+ this.matcher = parseTokenChars(settings.getAsList("token_chars"));
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java
index 88190d88993fd..7b8d2e7a4a753 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/SynonymTokenFilterFactory.java
@@ -91,7 +91,7 @@ public TokenStream create(TokenStream tokenStream) {
protected Reader getRulesFromSettings(Environment env) {
Reader rulesReader;
- if (settings.getAsArray("synonyms", null) != null) {
+ if (settings.getAsList("synonyms", null) != null) {
List rulesList = Analysis.getWordList(env, settings, "synonyms");
StringBuilder sb = new StringBuilder();
for (String line : rulesList) {
diff --git a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
index 17f93803f6ec0..e6e32fb8acffd 100644
--- a/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
+++ b/core/src/main/java/org/elasticsearch/index/mapper/DocumentParser.java
@@ -27,6 +27,7 @@
import org.elasticsearch.common.joda.FormatDateTimeFormatter;
import org.elasticsearch.common.xcontent.XContentHelper;
import org.elasticsearch.common.xcontent.XContentParser;
+import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.mapper.DynamicTemplate.XContentFieldType;
import org.elasticsearch.index.mapper.KeywordFieldMapper.KeywordFieldType;
@@ -58,9 +59,10 @@ ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException
final Mapping mapping = docMapper.mapping();
final ParseContext.InternalParseContext context;
- try (XContentParser parser = XContentHelper.createParser(docMapperParser.getXContentRegistry(), source.source())) {
- context = new ParseContext.InternalParseContext(indexSettings.getSettings(),
- docMapperParser, docMapper, source, parser);
+ final XContentType xContentType = source.getXContentType();
+
+ try (XContentParser parser = XContentHelper.createParser(docMapperParser.getXContentRegistry(), source.source(), xContentType)) {
+ context = new ParseContext.InternalParseContext(indexSettings.getSettings(), docMapperParser, docMapper, source, parser);
validateStart(parser);
internalParseDocument(mapping, context, parser);
validateEnd(parser);
@@ -74,8 +76,7 @@ ParsedDocument parseDocument(SourceToParse source) throws MapperParsingException
reverseOrder(context);
- ParsedDocument doc = parsedDocument(source, context, createDynamicUpdate(mapping, docMapper, context.getDynamicMappers()));
- return doc;
+ return parsedDocument(source, context, createDynamicUpdate(mapping, docMapper, context.getDynamicMappers()));
}
private static void internalParseDocument(Mapping mapping, ParseContext.InternalParseContext context, XContentParser parser) throws IOException {
@@ -89,7 +90,7 @@ private static void internalParseDocument(Mapping mapping, ParseContext.Internal
// entire type is disabled
parser.skipChildren();
} else if (emptyDoc == false) {
- parseObjectOrNested(context, mapping.root, true);
+ parseObjectOrNested(context, mapping.root);
}
for (MetadataFieldMapper metadataMapper : mapping.metadataMappers) {
@@ -338,7 +339,7 @@ private static ObjectMapper createUpdate(ObjectMapper parent, String[] nameParts
return parent.mappingUpdate(mapper);
}
- static void parseObjectOrNested(ParseContext context, ObjectMapper mapper, boolean atRoot) throws IOException {
+ static void parseObjectOrNested(ParseContext context, ObjectMapper mapper) throws IOException {
if (mapper.isEnabled() == false) {
context.parser().skipChildren();
return;
@@ -473,7 +474,7 @@ private static ParseContext nestedContext(ParseContext context, ObjectMapper map
private static void parseObjectOrField(ParseContext context, Mapper mapper) throws IOException {
if (mapper instanceof ObjectMapper) {
- parseObjectOrNested(context, (ObjectMapper) mapper, false);
+ parseObjectOrNested(context, (ObjectMapper) mapper);
} else {
FieldMapper fieldMapper = (FieldMapper)mapper;
Mapper update = fieldMapper.parse(context);
@@ -487,14 +488,13 @@ private static void parseObjectOrField(ParseContext context, Mapper mapper) thro
private static void parseObject(final ParseContext context, ObjectMapper mapper, String currentFieldName) throws IOException {
assert currentFieldName != null;
- Mapper objectMapper = getMapper(mapper, currentFieldName);
+ final String[] paths = splitAndValidatePath(currentFieldName);
+ Mapper objectMapper = getMapper(mapper, currentFieldName, paths);
if (objectMapper != null) {
context.path().add(currentFieldName);
parseObjectOrField(context, objectMapper);
context.path().remove();
} else {
-
- final String[] paths = splitAndValidatePath(currentFieldName);
currentFieldName = paths[paths.length - 1];
Tuple parentMapperTuple = getDynamicParentMapper(context, paths, mapper);
ObjectMapper parentMapper = parentMapperTuple.v2();
@@ -524,7 +524,9 @@ private static void parseObject(final ParseContext context, ObjectMapper mapper,
private static void parseArray(ParseContext context, ObjectMapper parentMapper, String lastFieldName) throws IOException {
String arrayFieldName = lastFieldName;
- Mapper mapper = getMapper(parentMapper, lastFieldName);
+
+ final String[] paths = splitAndValidatePath(arrayFieldName);
+ Mapper mapper = getMapper(parentMapper, lastFieldName, paths);
if (mapper != null) {
// There is a concrete mapper for this field already. Need to check if the mapper
// expects an array, if so we pass the context straight to the mapper and if not
@@ -535,8 +537,6 @@ private static void parseArray(ParseContext context, ObjectMapper parentMapper,
parseNonDynamicArray(context, parentMapper, lastFieldName, arrayFieldName);
}
} else {
-
- final String[] paths = splitAndValidatePath(arrayFieldName);
arrayFieldName = paths[paths.length - 1];
lastFieldName = arrayFieldName;
Tuple parentMapperTuple = getDynamicParentMapper(context, paths, parentMapper);
@@ -595,12 +595,12 @@ private static void parseValue(final ParseContext context, ObjectMapper parentMa
if (currentFieldName == null) {
throw new MapperParsingException("object mapping [" + parentMapper.name() + "] trying to serialize a value with no field associated with it, current value [" + context.parser().textOrNull() + "]");
}
- Mapper mapper = getMapper(parentMapper, currentFieldName);
+
+ final String[] paths = splitAndValidatePath(currentFieldName);
+ Mapper mapper = getMapper(parentMapper, currentFieldName, paths);
if (mapper != null) {
parseObjectOrField(context, mapper);
} else {
-
- final String[] paths = splitAndValidatePath(currentFieldName);
currentFieldName = paths[paths.length - 1];
Tuple parentMapperTuple = getDynamicParentMapper(context, paths, parentMapper);
parentMapper = parentMapperTuple.v2();
@@ -613,7 +613,7 @@ private static void parseValue(final ParseContext context, ObjectMapper parentMa
private static void parseNullValue(ParseContext context, ObjectMapper parentMapper, String lastFieldName) throws IOException {
// we can only handle null values if we have mappings for them
- Mapper mapper = getMapper(parentMapper, lastFieldName);
+ Mapper mapper = getMapper(parentMapper, lastFieldName, splitAndValidatePath(lastFieldName));
if (mapper != null) {
// TODO: passing null to an object seems bogus?
parseObjectOrField(context, mapper);
@@ -899,7 +899,7 @@ private static Tuple getDynamicParentMapper(ParseContext
break;
case FALSE:
// Should not dynamically create any more mappers so return the last mapper
- return new Tuple(pathsAdded, parent);
+ return new Tuple<>(pathsAdded, parent);
}
}
@@ -907,7 +907,7 @@ private static Tuple getDynamicParentMapper(ParseContext
pathsAdded++;
parent = mapper;
}
- return new Tuple(pathsAdded, mapper);
+ return new Tuple<>(pathsAdded, mapper);
}
// find what the dynamic setting is given the current parse context and parent
@@ -935,8 +935,7 @@ private static ObjectMapper.Dynamic dynamicOrDefault(ObjectMapper parentMapper,
}
// looks up a child mapper, but takes into account field names that expand to objects
- static Mapper getMapper(ObjectMapper objectMapper, String fieldName) {
- String[] subfields = splitAndValidatePath(fieldName);
+ private static Mapper getMapper(ObjectMapper objectMapper, String fieldName, String[] subfields) {
for (int i = 0; i < subfields.length - 1; ++i) {
Mapper mapper = objectMapper.getMapper(subfields[i]);
if (mapper == null || (mapper instanceof ObjectMapper) == false) {
diff --git a/core/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java b/core/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java
index 3570ade09adec..2c60ebfac6b6c 100644
--- a/core/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java
+++ b/core/src/main/java/org/elasticsearch/index/seqno/GlobalCheckpointSyncAction.java
@@ -35,6 +35,7 @@
import org.elasticsearch.cluster.service.ClusterService;
import org.elasticsearch.common.inject.Inject;
import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.common.util.concurrent.ThreadContext;
import org.elasticsearch.index.shard.IndexShard;
import org.elasticsearch.index.shard.IndexShardClosedException;
import org.elasticsearch.index.shard.ShardId;
@@ -80,13 +81,18 @@ public GlobalCheckpointSyncAction(
}
public void updateGlobalCheckpointForShard(final ShardId shardId) {
- execute(
- new Request(shardId),
- ActionListener.wrap(r -> {}, e -> {
- if (ExceptionsHelper.unwrap(e, AlreadyClosedException.class, IndexShardClosedException.class) == null) {
- logger.info(new ParameterizedMessage("{} global checkpoint sync failed", shardId), e);
- }
- }));
+ final ThreadContext threadContext = threadPool.getThreadContext();
+ try (ThreadContext.StoredContext ignore = threadContext.stashContext()) {
+ threadContext.markAsSystemContext();
+ execute(
+ new Request(shardId),
+ ActionListener.wrap(r -> {
+ }, e -> {
+ if (ExceptionsHelper.unwrap(e, AlreadyClosedException.class, IndexShardClosedException.class) == null) {
+ logger.info(new ParameterizedMessage("{} global checkpoint sync failed", shardId), e);
+ }
+ }));
+ }
}
@Override
diff --git a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
index 6bcb073d1106e..52da10a378576 100644
--- a/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
+++ b/core/src/main/java/org/elasticsearch/rest/action/cat/RestIndicesAction.java
@@ -23,6 +23,7 @@
import org.elasticsearch.action.admin.cluster.health.ClusterHealthResponse;
import org.elasticsearch.action.admin.cluster.state.ClusterStateRequest;
import org.elasticsearch.action.admin.cluster.state.ClusterStateResponse;
+import org.elasticsearch.action.admin.indices.stats.CommonStats;
import org.elasticsearch.action.admin.indices.stats.IndexStats;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsRequest;
import org.elasticsearch.action.admin.indices.stats.IndicesStatsResponse;
@@ -363,6 +364,9 @@ Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse res
}
}
+ final CommonStats primaryStats = indexStats == null ? new CommonStats() : indexStats.getPrimaries();
+ final CommonStats totalStats = indexStats == null ? new CommonStats() : indexStats.getTotal();
+
table.startRow();
table.addCell(state == IndexMetaData.State.OPEN ? (indexHealth == null ? "red*" : indexHealth.getStatus().toString().toLowerCase(Locale.ROOT)) : null);
table.addCell(state.toString().toLowerCase(Locale.ROOT));
@@ -370,182 +374,183 @@ Table buildTable(RestRequest request, Index[] indices, ClusterHealthResponse res
table.addCell(index.getUUID());
table.addCell(indexHealth == null ? null : indexHealth.getNumberOfShards());
table.addCell(indexHealth == null ? null : indexHealth.getNumberOfReplicas());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getDocs().getCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getDocs().getDeleted());
+
+ table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getCount());
+ table.addCell(primaryStats.getDocs() == null ? null : primaryStats.getDocs().getDeleted());
table.addCell(indexMetaData.getCreationDate());
table.addCell(new DateTime(indexMetaData.getCreationDate(), DateTimeZone.UTC));
- table.addCell(indexStats == null ? null : indexStats.getTotal().getStore().size());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getStore().size());
+ table.addCell(totalStats.getStore() == null ? null : totalStats.getStore().size());
+ table.addCell(primaryStats.getStore() == null ? null : primaryStats.getStore().size());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getCompletion().getSize());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getCompletion().getSize());
+ table.addCell(totalStats.getCompletion() == null ? null : totalStats.getCompletion().getSize());
+ table.addCell(primaryStats.getCompletion() == null ? null : primaryStats.getCompletion().getSize());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getFieldData().getMemorySize());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getFieldData().getMemorySize());
+ table.addCell(totalStats.getFieldData() == null ? null : totalStats.getFieldData().getMemorySize());
+ table.addCell(primaryStats.getFieldData() == null ? null : primaryStats.getFieldData().getMemorySize());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getFieldData().getEvictions());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getFieldData().getEvictions());
+ table.addCell(totalStats.getFieldData() == null ? null : totalStats.getFieldData().getEvictions());
+ table.addCell(primaryStats.getFieldData() == null ? null : primaryStats.getFieldData().getEvictions());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getQueryCache().getMemorySize());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getQueryCache().getMemorySize());
+ table.addCell(totalStats.getQueryCache() == null ? null : totalStats.getQueryCache().getMemorySize());
+ table.addCell(primaryStats.getQueryCache() == null ? null : primaryStats.getQueryCache().getMemorySize());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getQueryCache().getEvictions());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getQueryCache().getEvictions());
+ table.addCell(totalStats.getQueryCache() == null ? null : totalStats.getQueryCache().getEvictions());
+ table.addCell(primaryStats.getQueryCache() == null ? null : primaryStats.getQueryCache().getEvictions());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getRequestCache().getMemorySize());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRequestCache().getMemorySize());
+ table.addCell(totalStats.getRequestCache() == null ? null : totalStats.getRequestCache().getMemorySize());
+ table.addCell(primaryStats.getRequestCache() == null ? null : primaryStats.getRequestCache().getMemorySize());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getRequestCache().getEvictions());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRequestCache().getEvictions());
+ table.addCell(totalStats.getRequestCache() == null ? null : totalStats.getRequestCache().getEvictions());
+ table.addCell(primaryStats.getRequestCache() == null ? null : primaryStats.getRequestCache().getEvictions());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getRequestCache().getHitCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRequestCache().getHitCount());
+ table.addCell(totalStats.getRequestCache() == null ? null : totalStats.getRequestCache().getHitCount());
+ table.addCell(primaryStats.getRequestCache() == null ? null : primaryStats.getRequestCache().getHitCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getRequestCache().getMissCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRequestCache().getMissCount());
+ table.addCell(totalStats.getRequestCache() == null ? null : totalStats.getRequestCache().getMissCount());
+ table.addCell(primaryStats.getRequestCache() == null ? null : primaryStats.getRequestCache().getMissCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getFlush().getTotal());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getFlush().getTotal());
+ table.addCell(totalStats.getFlush() == null ? null : totalStats.getFlush().getTotal());
+ table.addCell(primaryStats.getFlush() == null ? null : primaryStats.getFlush().getTotal());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getFlush().getTotalTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getFlush().getTotalTime());
+ table.addCell(totalStats.getFlush() == null ? null : totalStats.getFlush().getTotalTime());
+ table.addCell(primaryStats.getFlush() == null ? null : primaryStats.getFlush().getTotalTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getGet().current());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getGet().current());
+ table.addCell(totalStats.getGet() == null ? null : totalStats.getGet().current());
+ table.addCell(primaryStats.getGet() == null ? null : primaryStats.getGet().current());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getGet().getTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getGet().getTime());
+ table.addCell(totalStats.getGet() == null ? null : totalStats.getGet().getTime());
+ table.addCell(primaryStats.getGet() == null ? null : primaryStats.getGet().getTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getGet().getCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getGet().getCount());
+ table.addCell(totalStats.getGet() == null ? null : totalStats.getGet().getCount());
+ table.addCell(primaryStats.getGet() == null ? null : primaryStats.getGet().getCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getGet().getExistsTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getGet().getExistsTime());
+ table.addCell(totalStats.getGet() == null ? null : totalStats.getGet().getExistsTime());
+ table.addCell(primaryStats.getGet() == null ? null : primaryStats.getGet().getExistsTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getGet().getExistsCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getGet().getExistsCount());
+ table.addCell(totalStats.getGet() == null ? null : totalStats.getGet().getExistsCount());
+ table.addCell(primaryStats.getGet() == null ? null : primaryStats.getGet().getExistsCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getGet().getMissingTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getGet().getMissingTime());
+ table.addCell(totalStats.getGet() == null ? null : totalStats.getGet().getMissingTime());
+ table.addCell(primaryStats.getGet() == null ? null : primaryStats.getGet().getMissingTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getGet().getMissingCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getGet().getMissingCount());
+ table.addCell(totalStats.getGet() == null ? null : totalStats.getGet().getMissingCount());
+ table.addCell(primaryStats.getGet() == null ? null : primaryStats.getGet().getMissingCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getIndexing().getTotal().getDeleteCurrent());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getIndexing().getTotal().getDeleteCurrent());
+ table.addCell(totalStats.getIndexing() == null ? null : totalStats.getIndexing().getTotal().getDeleteCurrent());
+ table.addCell(primaryStats.getIndexing() == null ? null : primaryStats.getIndexing().getTotal().getDeleteCurrent());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getIndexing().getTotal().getDeleteTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getIndexing().getTotal().getDeleteTime());
+ table.addCell(totalStats.getIndexing() == null ? null : totalStats.getIndexing().getTotal().getDeleteTime());
+ table.addCell(primaryStats.getIndexing() == null ? null : primaryStats.getIndexing().getTotal().getDeleteTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getIndexing().getTotal().getDeleteCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getIndexing().getTotal().getDeleteCount());
+ table.addCell(totalStats.getIndexing() == null ? null : totalStats.getIndexing().getTotal().getDeleteCount());
+ table.addCell(primaryStats.getIndexing() == null ? null : primaryStats.getIndexing().getTotal().getDeleteCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getIndexing().getTotal().getIndexCurrent());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getIndexing().getTotal().getIndexCurrent());
+ table.addCell(totalStats.getIndexing() == null ? null : totalStats.getIndexing().getTotal().getIndexCurrent());
+ table.addCell(primaryStats.getIndexing() == null ? null : primaryStats.getIndexing().getTotal().getIndexCurrent());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getIndexing().getTotal().getIndexTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getIndexing().getTotal().getIndexTime());
+ table.addCell(totalStats.getIndexing() == null ? null : totalStats.getIndexing().getTotal().getIndexTime());
+ table.addCell(primaryStats.getIndexing() == null ? null : primaryStats.getIndexing().getTotal().getIndexTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getIndexing().getTotal().getIndexCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getIndexing().getTotal().getIndexCount());
+ table.addCell(totalStats.getIndexing() == null ? null : totalStats.getIndexing().getTotal().getIndexCount());
+ table.addCell(primaryStats.getIndexing() == null ? null : primaryStats.getIndexing().getTotal().getIndexCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getIndexing().getTotal().getIndexFailedCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getIndexing().getTotal().getIndexFailedCount());
+ table.addCell(totalStats.getIndexing() == null ? null : totalStats.getIndexing().getTotal().getIndexFailedCount());
+ table.addCell(primaryStats.getIndexing() == null ? null : primaryStats.getIndexing().getTotal().getIndexFailedCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getCurrent());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getCurrent());
+ table.addCell(totalStats.getMerge() == null ? null : totalStats.getMerge().getCurrent());
+ table.addCell(primaryStats.getMerge() == null ? null : primaryStats.getMerge().getCurrent());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getCurrentNumDocs());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getCurrentNumDocs());
+ table.addCell(totalStats.getMerge() == null ? null : totalStats.getMerge().getCurrentNumDocs());
+ table.addCell(primaryStats.getMerge() == null ? null : primaryStats.getMerge().getCurrentNumDocs());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getCurrentSize());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getCurrentSize());
+ table.addCell(totalStats.getMerge() == null ? null : totalStats.getMerge().getCurrentSize());
+ table.addCell(primaryStats.getMerge() == null ? null : primaryStats.getMerge().getCurrentSize());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getTotal());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getTotal());
+ table.addCell(totalStats.getMerge() == null ? null : totalStats.getMerge().getTotal());
+ table.addCell(primaryStats.getMerge() == null ? null : primaryStats.getMerge().getTotal());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getTotalNumDocs());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getTotalNumDocs());
+ table.addCell(totalStats.getMerge() == null ? null : totalStats.getMerge().getTotalNumDocs());
+ table.addCell(primaryStats.getMerge() == null ? null : primaryStats.getMerge().getTotalNumDocs());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getTotalSize());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getTotalSize());
+ table.addCell(totalStats.getMerge() == null ? null : totalStats.getMerge().getTotalSize());
+ table.addCell(primaryStats.getMerge() == null ? null : primaryStats.getMerge().getTotalSize());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getMerge().getTotalTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getMerge().getTotalTime());
+ table.addCell(totalStats.getMerge() == null ? null : totalStats.getMerge().getTotalTime());
+ table.addCell(primaryStats.getMerge() == null ? null : primaryStats.getMerge().getTotalTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getRefresh().getTotal());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRefresh().getTotal());
+ table.addCell(totalStats.getRefresh() == null ? null : totalStats.getRefresh().getTotal());
+ table.addCell(primaryStats.getRefresh() == null ? null : primaryStats.getRefresh().getTotal());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getRefresh().getTotalTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRefresh().getTotalTime());
+ table.addCell(totalStats.getRefresh() == null ? null : totalStats.getRefresh().getTotalTime());
+ table.addCell(primaryStats.getRefresh() == null ? null : primaryStats.getRefresh().getTotalTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getRefresh().getListeners());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getRefresh().getListeners());
+ table.addCell(totalStats.getRefresh() == null ? null : totalStats.getRefresh().getListeners());
+ table.addCell(primaryStats.getRefresh() == null ? null : primaryStats.getRefresh().getListeners());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getFetchCurrent());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getFetchCurrent());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getFetchCurrent());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getFetchCurrent());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getFetchTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getFetchTime());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getFetchTime());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getFetchTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getFetchCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getFetchCount());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getFetchCount());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getFetchCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getOpenContexts());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getOpenContexts());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getOpenContexts());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getOpenContexts());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getQueryCurrent());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getQueryCurrent());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getQueryCurrent());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getQueryCurrent());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getQueryTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getQueryTime());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getQueryTime());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getQueryTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getQueryCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getQueryCount());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getQueryCount());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getQueryCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getScrollCurrent());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getScrollCurrent());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getScrollCurrent());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getScrollCurrent());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getScrollTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getScrollTime());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getScrollTime());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getScrollTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getScrollCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getScrollCount());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getScrollCount());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getScrollCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSegments().getCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSegments().getCount());
+ table.addCell(totalStats.getSegments() == null ? null : totalStats.getSegments().getCount());
+ table.addCell(primaryStats.getSegments() == null ? null : primaryStats.getSegments().getCount());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSegments().getMemory());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSegments().getMemory());
+ table.addCell(totalStats.getSegments() == null ? null : totalStats.getSegments().getMemory());
+ table.addCell(primaryStats.getSegments() == null ? null : primaryStats.getSegments().getMemory());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSegments().getIndexWriterMemory());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSegments().getIndexWriterMemory());
+ table.addCell(totalStats.getSegments() == null ? null : totalStats.getSegments().getIndexWriterMemory());
+ table.addCell(primaryStats.getSegments() == null ? null : primaryStats.getSegments().getIndexWriterMemory());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSegments().getVersionMapMemory());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSegments().getVersionMapMemory());
+ table.addCell(totalStats.getSegments() == null ? null : totalStats.getSegments().getVersionMapMemory());
+ table.addCell(primaryStats.getSegments() == null ? null : primaryStats.getSegments().getVersionMapMemory());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSegments().getBitsetMemory());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSegments().getBitsetMemory());
+ table.addCell(totalStats.getSegments() == null ? null : totalStats.getSegments().getBitsetMemory());
+ table.addCell(primaryStats.getSegments() == null ? null : primaryStats.getSegments().getBitsetMemory());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getWarmer().current());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getWarmer().current());
+ table.addCell(totalStats.getWarmer() == null ? null : totalStats.getWarmer().current());
+ table.addCell(primaryStats.getWarmer() == null ? null : primaryStats.getWarmer().current());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getWarmer().total());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getWarmer().total());
+ table.addCell(totalStats.getWarmer() == null ? null : totalStats.getWarmer().total());
+ table.addCell(primaryStats.getWarmer() == null ? null : primaryStats.getWarmer().total());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getWarmer().totalTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getWarmer().totalTime());
+ table.addCell(totalStats.getWarmer() == null ? null : totalStats.getWarmer().totalTime());
+ table.addCell(primaryStats.getWarmer() == null ? null : primaryStats.getWarmer().totalTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getSuggestCurrent());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getSuggestCurrent());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getSuggestCurrent());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getSuggestCurrent());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getSuggestTime());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getSuggestTime());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getSuggestTime());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getSuggestTime());
- table.addCell(indexStats == null ? null : indexStats.getTotal().getSearch().getTotal().getSuggestCount());
- table.addCell(indexStats == null ? null : indexStats.getPrimaries().getSearch().getTotal().getSuggestCount());
+ table.addCell(totalStats.getSearch() == null ? null : totalStats.getSearch().getTotal().getSuggestCount());
+ table.addCell(primaryStats.getSearch() == null ? null : primaryStats.getSearch().getTotal().getSuggestCount());
table.addCell(indexStats == null ? null : indexStats.getTotal().getTotalMemory());
table.addCell(indexStats == null ? null : indexStats.getPrimaries().getTotalMemory());
diff --git a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java
index 2d85c379f54fd..b2ee4b8ffbd5f 100644
--- a/core/src/main/java/org/elasticsearch/search/MultiValueMode.java
+++ b/core/src/main/java/org/elasticsearch/search/MultiValueMode.java
@@ -416,11 +416,11 @@ public NumericDocValues select(final SortedNumericDocValues values, final long m
if (singleton != null) {
return new AbstractNumericDocValues() {
- private boolean hasValue;
+ private long value;
@Override
public boolean advanceExact(int target) throws IOException {
- hasValue = singleton.advanceExact(target);
+ this.value = singleton.advanceExact(target) ? singleton.longValue() : missingValue;
return true;
}
@@ -431,17 +431,17 @@ public int docID() {
@Override
public long longValue() throws IOException {
- return hasValue ? singleton.longValue() : missingValue;
+ return this.value;
}
};
} else {
return new AbstractNumericDocValues() {
- private boolean hasValue;
+ private long value;
@Override
public boolean advanceExact(int target) throws IOException {
- hasValue = values.advanceExact(target);
+ this.value = values.advanceExact(target) ? pick(values) : missingValue;
return true;
}
@@ -452,7 +452,7 @@ public int docID() {
@Override
public long longValue() throws IOException {
- return hasValue ? pick(values) : missingValue;
+ return value;
}
};
}
@@ -533,35 +533,33 @@ public NumericDoubleValues select(final SortedNumericDoubleValues values, final
final NumericDoubleValues singleton = FieldData.unwrapSingleton(values);
if (singleton != null) {
return new NumericDoubleValues() {
-
- private boolean hasValue;
+ private double value;
@Override
public boolean advanceExact(int doc) throws IOException {
- hasValue = singleton.advanceExact(doc);
+ this.value = singleton.advanceExact(doc) ? singleton.doubleValue() : missingValue;
return true;
}
@Override
public double doubleValue() throws IOException {
- return hasValue ? singleton.doubleValue() : missingValue;
+ return this.value;
}
-
};
} else {
return new NumericDoubleValues() {
- private boolean hasValue;
+ private double value;
@Override
public boolean advanceExact(int target) throws IOException {
- hasValue = values.advanceExact(target);
+ value = values.advanceExact(target) ? pick(values) : missingValue;
return true;
}
@Override
public double doubleValue() throws IOException {
- return hasValue ? pick(values) : missingValue;
+ return this.value;
}
};
}
@@ -638,17 +636,17 @@ public BinaryDocValues select(final SortedBinaryDocValues values, final BytesRef
}
return new AbstractBinaryDocValues() {
- private boolean hasValue;
+ private BytesRef value;
@Override
public boolean advanceExact(int target) throws IOException {
- hasValue = singleton.advanceExact(target);
+ this.value = singleton.advanceExact(target) ? singleton.binaryValue() : missingValue;
return true;
}
@Override
public BytesRef binaryValue() throws IOException {
- return hasValue ? singleton.binaryValue() : missingValue;
+ return this.value;
}
};
} else {
diff --git a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java
index fb312dc6e498c..b673b02496e35 100644
--- a/core/src/main/java/org/elasticsearch/transport/TcpTransport.java
+++ b/core/src/main/java/org/elasticsearch/transport/TcpTransport.java
@@ -599,6 +599,9 @@ public final NodeChannels openConnection(DiscoveryNode node, ConnectionProfile c
nodeChannels = new NodeChannels(nodeChannels, version); // clone the channels - we now have the correct version
transportService.onConnectionOpened(nodeChannels);
connectionRef.set(nodeChannels);
+ if (Arrays.stream(nodeChannels.channels).allMatch(this::isOpen) == false) {
+ throw new ConnectTransportException(node, "a channel closed while connecting");
+ }
success = true;
return nodeChannels;
} catch (ConnectTransportException e) {
@@ -1034,7 +1037,18 @@ protected void innerOnFailure(Exception e) {
*/
protected abstract void sendMessage(Channel channel, BytesReference reference, ActionListener listener);
- protected abstract NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile connectionProfile,
+ /**
+ * Connect to the node with channels as defined by the specified connection profile. Implementations must invoke the specified channel
+ * close callback when a channel is closed.
+ *
+ * @param node the node to connect to
+ * @param connectionProfile the connection profile
+ * @param onChannelClose callback to invoke when a channel is closed
+ * @return the channels
+ * @throws IOException if an I/O exception occurs while opening channels
+ */
+ protected abstract NodeChannels connectToChannels(DiscoveryNode node,
+ ConnectionProfile connectionProfile,
Consumer onChannelClose) throws IOException;
/**
diff --git a/core/src/main/java/org/elasticsearch/transport/TransportService.java b/core/src/main/java/org/elasticsearch/transport/TransportService.java
index 00564e37bd616..29e08a7d84b47 100644
--- a/core/src/main/java/org/elasticsearch/transport/TransportService.java
+++ b/core/src/main/java/org/elasticsearch/transport/TransportService.java
@@ -61,6 +61,7 @@
import java.util.Objects;
import java.util.concurrent.CopyOnWriteArrayList;
import java.util.concurrent.CountDownLatch;
+import java.util.concurrent.ExecutorService;
import java.util.concurrent.ScheduledFuture;
import java.util.function.Function;
import java.util.function.Predicate;
@@ -185,6 +186,15 @@ protected TaskManager createTaskManager() {
return new TaskManager(settings);
}
+ /**
+ * The executor service for this transport service.
+ *
+ * @return the executor service
+ */
+ protected ExecutorService getExecutorService() {
+ return threadPool.generic();
+ }
+
void setTracerLogInclude(List tracerLogInclude) {
this.tracerLogInclude = tracerLogInclude.toArray(Strings.EMPTY_ARRAY);
}
@@ -230,7 +240,7 @@ protected void doStop() {
if (holderToNotify != null) {
// callback that an exception happened, but on a different thread since we don't
// want handlers to worry about stack overflows
- threadPool.generic().execute(new AbstractRunnable() {
+ getExecutorService().execute(new AbstractRunnable() {
@Override
public void onRejection(Exception e) {
// if we get rejected during node shutdown we don't wanna bubble it up
@@ -839,7 +849,7 @@ void onNodeConnected(final DiscoveryNode node) {
// connectToNode(); connection is completed successfully
// addConnectionListener(); this listener shouldn't be called
final Stream listenersToNotify = TransportService.this.connectionListeners.stream();
- threadPool.generic().execute(() -> listenersToNotify.forEach(listener -> listener.onNodeConnected(node)));
+ getExecutorService().execute(() -> listenersToNotify.forEach(listener -> listener.onNodeConnected(node)));
}
void onConnectionOpened(Transport.Connection connection) {
@@ -847,12 +857,12 @@ void onConnectionOpened(Transport.Connection connection) {
// connectToNode(); connection is completed successfully
// addConnectionListener(); this listener shouldn't be called
final Stream listenersToNotify = TransportService.this.connectionListeners.stream();
- threadPool.generic().execute(() -> listenersToNotify.forEach(listener -> listener.onConnectionOpened(connection)));
+ getExecutorService().execute(() -> listenersToNotify.forEach(listener -> listener.onConnectionOpened(connection)));
}
public void onNodeDisconnected(final DiscoveryNode node) {
try {
- threadPool.generic().execute( () -> {
+ getExecutorService().execute( () -> {
for (final TransportConnectionListener connectionListener : connectionListeners) {
connectionListener.onNodeDisconnected(node);
}
@@ -871,7 +881,7 @@ void onConnectionClosed(Transport.Connection connection) {
if (holderToNotify != null) {
// callback that an exception happened, but on a different thread since we don't
// want handlers to worry about stack overflows
- threadPool.generic().execute(() -> holderToNotify.handler().handleException(new NodeDisconnectedException(
+ getExecutorService().execute(() -> holderToNotify.handler().handleException(new NodeDisconnectedException(
connection.getNode(), holderToNotify.action())));
}
}
diff --git a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
index 54c7ba3aab084..2cd8281282ca6 100644
--- a/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
+++ b/core/src/test/java/org/elasticsearch/action/admin/indices/TransportAnalyzeActionTests.java
@@ -41,7 +41,6 @@
import org.elasticsearch.indices.analysis.AnalysisModule.AnalysisProvider;
import org.elasticsearch.indices.analysis.AnalysisModuleTests.AppendCharFilter;
import org.elasticsearch.plugins.AnalysisPlugin;
-import static org.elasticsearch.plugins.AnalysisPlugin.requriesAnalysisSettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@@ -74,7 +73,7 @@ public void setUp() throws Exception {
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.put("index.analysis.analyzer.custom_analyzer.filter", "mock")
.put("index.analysis.normalizer.my_normalizer.type", "custom")
- .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase").build();
+ .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
environment = new Environment(settings);
AnalysisPlugin plugin = new AnalysisPlugin() {
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
index 15a2f9e74a461..bd76557f9a86f 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/AbstractTermVectorsTestCase.java
@@ -210,7 +210,7 @@ protected void createIndexBasedOnFieldSettings(String index, String alias, TestF
Settings.Builder settings = Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "standard")
- .putArray("index.analysis.analyzer.tv_test.filter", "lowercase");
+ .putList("index.analysis.analyzer.tv_test.filter", "lowercase");
assertAcked(prepareCreate(index).addMapping("type1", mappingBuilder).setSettings(settings).addAlias(new Alias(alias)));
}
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
index bbd7d5501783c..520c881aa7e62 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsIT.java
@@ -189,7 +189,7 @@ public void testSimpleTermVectors() throws IOException {
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
+ .putList("index.analysis.analyzer.tv_test.filter", "lowercase")));
for (int i = 0; i < 10; i++) {
client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
@@ -261,7 +261,7 @@ public void testRandomSingleTermVectors() throws IOException {
assertAcked(prepareCreate("test").addMapping("type1", mapping)
.setSettings(Settings.builder()
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
+ .putList("index.analysis.analyzer.tv_test.filter", "lowercase")));
for (int i = 0; i < 10; i++) {
client().prepareIndex("test", "type1", Integer.toString(i))
.setSource(jsonBuilder().startObject().field("field", "the quick brown fox jumps over the lazy dog")
@@ -395,7 +395,7 @@ public void testSimpleTermVectorsWithGenerate() throws IOException {
.setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.tv_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.tv_test.filter", "lowercase")));
+ .putList("index.analysis.analyzer.tv_test.filter", "lowercase")));
ensureGreen();
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
index e4d55da9f92b1..5e81949402055 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
@@ -152,7 +152,7 @@ public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOExceptio
.field("analyzer", "payload_test").endObject().endObject().endObject().endObject();
Settings setting = Settings.builder()
.put("index.analysis.analyzer.payload_test.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter")
+ .putList("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter")
.put("index.analysis.filter.my_delimited_payload_filter.delimiter", delimiter)
.put("index.analysis.filter.my_delimited_payload_filter.encoding", encodingString)
.put("index.analysis.filter.my_delimited_payload_filter.type", "mock_payload_filter").build();
diff --git a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java
index b7adc66a55705..e6626b5678718 100644
--- a/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/routing/UnassignedInfoTests.java
@@ -356,4 +356,11 @@ public void testAllocationStatusSerialization() throws IOException {
assertThat(readStatus, equalTo(allocationStatus));
}
}
+
+ public void testNegativeTimeoutDeprecated() {
+ Settings settings = settings(Version.CURRENT).put(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.getKey(), "-1s").build();
+ assertThat(UnassignedInfo.INDEX_DELAYED_NODE_LEFT_TIMEOUT_SETTING.get(settings).seconds(), equalTo(-1L));
+ assertWarnings("Negative values for index.unassigned.node_left.delayed_timeout [-1s]" +
+ " are deprecated and should now be set to \"0\".");
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
index 7b1ac5116f2dd..cdcaf4a1b9c20 100644
--- a/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
+++ b/core/src/test/java/org/elasticsearch/cluster/settings/ClusterSettingsIT.java
@@ -34,6 +34,8 @@
import org.elasticsearch.test.ESIntegTestCase;
import org.junit.After;
+import java.util.Arrays;
+
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertBlocked;
import static org.hamcrest.Matchers.containsString;
@@ -242,11 +244,11 @@ public void testClusterSettingsUpdateResponse() {
public void testCanUpdateTracerSettings() {
ClusterUpdateSettingsResponse clusterUpdateSettingsResponse = client().admin().cluster()
.prepareUpdateSettings()
- .setTransientSettings(Settings.builder().putArray("transport.tracer.include", "internal:index/shard/recovery/*",
+ .setTransientSettings(Settings.builder().putList("transport.tracer.include", "internal:index/shard/recovery/*",
"internal:gateway/local*"))
.get();
- assertArrayEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsArray("transport.tracer.include"), new String[] {"internal:index/shard/recovery/*",
- "internal:gateway/local*"});
+ assertEquals(clusterUpdateSettingsResponse.getTransientSettings().getAsList("transport.tracer.include"),
+ Arrays.asList("internal:index/shard/recovery/*", "internal:gateway/local*"));
}
public void testUpdateDiscoveryPublishTimeout() {
diff --git a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
index 172bcd6bd558b..05d37365b5c9c 100644
--- a/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
+++ b/core/src/test/java/org/elasticsearch/cluster/structure/RoutingIteratorTests.java
@@ -444,6 +444,7 @@ public void testReplicaShardPreferenceIters() throws Exception {
// When replicas haven't initialized, it comes back with the primary first, then initializing replicas
GroupShardsIterator shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first");
+ assertWarnings("[_replica_first] has been deprecated in 6.1+, and will be removed in 7.0; use [_only_nodes] or [_prefer_nodes]");
assertThat(shardIterators.size(), equalTo(2)); // two potential shards
ShardIterator iter = shardIterators.iterator().next();
assertThat(iter.size(), equalTo(3)); // three potential candidates for the shard
@@ -463,10 +464,8 @@ public void testReplicaShardPreferenceIters() throws Exception {
clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING));
- clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING));
-
-
shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica");
+ assertWarnings("[_replica] has been deprecated in 6.1+, and will be removed in 7.0; use [_only_nodes] or [_prefer_nodes]");
assertThat(shardIterators.size(), equalTo(2)); // two potential shards
iter = shardIterators.iterator().next();
assertThat(iter.size(), equalTo(2)); // two potential replicas for the shard
@@ -479,6 +478,7 @@ public void testReplicaShardPreferenceIters() throws Exception {
assertFalse(routing.primary());
shardIterators = operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first");
+ assertWarnings("[_replica_first] has been deprecated in 6.1+, and will be removed in 7.0; use [_only_nodes] or [_prefer_nodes]");
assertThat(shardIterators.size(), equalTo(2)); // two potential shards
iter = shardIterators.iterator().next();
assertThat(iter.size(), equalTo(3)); // three potential candidates for the shard
@@ -495,4 +495,45 @@ public void testReplicaShardPreferenceIters() throws Exception {
assertTrue(routing.primary());
}
+ public void testDeprecatedPreferences() throws Exception {
+ AllocationService strategy = createAllocationService(Settings.builder()
+ .put("cluster.routing.allocation.node_concurrent_recoveries", 10)
+ .build());
+
+ OperationRouting operationRouting = new OperationRouting(Settings.EMPTY, new ClusterSettings(Settings.EMPTY,
+ ClusterSettings.BUILT_IN_CLUSTER_SETTINGS));
+
+ MetaData metaData = MetaData.builder()
+ .put(IndexMetaData.builder("test").settings(settings(Version.CURRENT)).numberOfShards(2).numberOfReplicas(2))
+ .build();
+
+ RoutingTable routingTable = RoutingTable.builder()
+ .addAsNew(metaData.index("test"))
+ .build();
+
+ ClusterState clusterState = ClusterState.builder(ClusterName.CLUSTER_NAME_SETTING.getDefault(Settings.EMPTY))
+ .metaData(metaData)
+ .routingTable(routingTable)
+ .build();
+
+ clusterState = ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder()
+ .add(newNode("node1"))
+ .add(newNode("node2"))
+ .localNodeId("node1")
+ ).build();
+
+ clusterState = strategy.applyStartedShards(clusterState, clusterState.getRoutingNodes().shardsWithState(INITIALIZING));
+
+ operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_primary");
+ assertWarnings("[_primary] has been deprecated in 6.1+, and will be removed in 7.0; use [_only_nodes] or [_prefer_nodes]");
+
+ operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_primary_first");
+ assertWarnings("[_primary_first] has been deprecated in 6.1+, and will be removed in 7.0; use [_only_nodes] or [_prefer_nodes]");
+
+ operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica");
+ assertWarnings("[_replica] has been deprecated in 6.1+, and will be removed in 7.0; use [_only_nodes] or [_prefer_nodes]");
+
+ operationRouting.searchShards(clusterState, new String[]{"test"}, null, "_replica_first");
+ assertWarnings("[_replica_first] has been deprecated in 6.1+, and will be removed in 7.0; use [_only_nodes] or [_prefer_nodes]");
+ }
}
diff --git a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
index 24f9550a78de6..bd4ac25a8747b 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/ScopedSettingsTests.java
@@ -179,8 +179,8 @@ public void testAddConsumerAffix() {
service.applySettings(Settings.builder()
.put("foo.test.bar", 2)
.put("foo.test_1.bar", 7)
- .putArray("foo.test_list.list", "16", "17")
- .putArray("foo.test_list_1.list", "18", "19", "20")
+ .putList("foo.test_list.list", "16", "17")
+ .putList("foo.test_list_1.list", "18", "19", "20")
.build());
assertEquals(2, intResults.get("test").intValue());
assertEquals(7, intResults.get("test_1").intValue());
@@ -195,7 +195,7 @@ public void testAddConsumerAffix() {
service.applySettings(Settings.builder()
.put("foo.test.bar", 2)
.put("foo.test_1.bar", 8)
- .putArray("foo.test_list.list", "16", "17")
+ .putList("foo.test_list.list", "16", "17")
.putNull("foo.test_list_1.list")
.build());
assertNull("test wasn't changed", intResults.get("test"));
@@ -231,8 +231,8 @@ public void testAddConsumerAffixMap() {
service.applySettings(Settings.builder()
.put("foo.test.bar", 2)
.put("foo.test_1.bar", 7)
- .putArray("foo.test_list.list", "16", "17")
- .putArray("foo.test_list_1.list", "18", "19", "20")
+ .putList("foo.test_list.list", "16", "17")
+ .putList("foo.test_list_1.list", "18", "19", "20")
.build());
assertEquals(2, intResults.get("test").intValue());
assertEquals(7, intResults.get("test_1").intValue());
@@ -247,7 +247,7 @@ public void testAddConsumerAffixMap() {
service.applySettings(Settings.builder()
.put("foo.test.bar", 2)
.put("foo.test_1.bar", 8)
- .putArray("foo.test_list.list", "16", "17")
+ .putList("foo.test_list.list", "16", "17")
.putNull("foo.test_list_1.list")
.build());
assertNull("test wasn't changed", intResults.get("test"));
@@ -470,14 +470,14 @@ public void testDiff() throws IOException {
Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY);
assertEquals(2, diff.size());
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c"));
diff = settings.diff(
Settings.builder().put("foo.bar", 5).build(),
- Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build());
+ Settings.builder().put("foo.bar.baz", 17).putList("foo.bar.quux", "d", "e", "f").build());
assertEquals(2, diff.size());
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("d", "e", "f"));
diff = settings.diff(
Settings.builder().put("some.group.foo", 5).build(),
@@ -485,7 +485,7 @@ public void testDiff() throws IOException {
assertEquals(4, diff.size());
assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17));
assertNull(diff.get("some.group.foo"));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c"));
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
@@ -495,7 +495,7 @@ public void testDiff() throws IOException {
assertEquals(4, diff.size());
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
assertNull(diff.get("some.prefix.foo.somekey"));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"a", "b", "c"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("a", "b", "c"));
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
}
@@ -513,14 +513,14 @@ public void testDiffWithAffixAndComplexMatcher() {
Settings diff = settings.diff(Settings.builder().put("foo.bar", 5).build(), Settings.EMPTY);
assertEquals(1, diff.size());
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
- assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys
+ assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys
diff = settings.diff(
Settings.builder().put("foo.bar", 5).build(),
- Settings.builder().put("foo.bar.baz", 17).putArray("foo.bar.quux", "d", "e", "f").build());
+ Settings.builder().put("foo.bar.baz", 17).putList("foo.bar.quux", "d", "e", "f").build());
assertEquals(2, diff.size());
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(17));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"d", "e", "f"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("d", "e", "f"));
diff = settings.diff(
Settings.builder().put("some.group.foo", 5).build(),
@@ -528,7 +528,7 @@ public void testDiffWithAffixAndComplexMatcher() {
assertEquals(3, diff.size());
assertThat(diff.getAsInt("some.group.foobar", null), equalTo(17));
assertNull(diff.get("some.group.foo"));
- assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys
+ assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
@@ -538,21 +538,21 @@ public void testDiffWithAffixAndComplexMatcher() {
assertEquals(3, diff.size());
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
assertNull(diff.get("some.prefix.foo.somekey"));
- assertNull(diff.getAsArray("foo.bar.quux", null)); // affix settings don't know their concrete keys
+ assertNull(diff.getAsList("foo.bar.quux", null)); // affix settings don't know their concrete keys
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
diff = settings.diff(
Settings.builder().put("some.prefix.foo.somekey", 5).build(),
Settings.builder().put("some.prefix.foobar.somekey", 17).put("some.prefix.foo.somekey", 18)
- .putArray("foo.bar.quux", "x", "y", "z")
- .putArray("foo.baz.quux", "d", "e", "f")
+ .putList("foo.bar.quux", "x", "y", "z")
+ .putList("foo.baz.quux", "d", "e", "f")
.build());
assertEquals(5, diff.size());
assertThat(diff.getAsInt("some.prefix.foobar.somekey", null), equalTo(17));
assertNull(diff.get("some.prefix.foo.somekey"));
- assertArrayEquals(diff.getAsArray("foo.bar.quux", null), new String[] {"x", "y", "z"});
- assertArrayEquals(diff.getAsArray("foo.baz.quux", null), new String[] {"d", "e", "f"});
+ assertEquals(diff.getAsList("foo.bar.quux", null), Arrays.asList("x", "y", "z"));
+ assertEquals(diff.getAsList("foo.baz.quux", null), Arrays.asList("d", "e", "f"));
assertThat(diff.getAsInt("foo.bar.baz", null), equalTo(1));
assertThat(diff.getAsInt("foo.bar", null), equalTo(1));
}
@@ -562,7 +562,7 @@ public void testUpdateTracer() {
AtomicReference> ref = new AtomicReference<>();
settings.addSettingsUpdateConsumer(TransportService.TRACE_LOG_INCLUDE_SETTING, ref::set);
settings.applySettings(Settings.builder()
- .putArray("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build());
+ .putList("transport.tracer.include", "internal:index/shard/recovery/*", "internal:gateway/local*").build());
assertNotNull(ref.get().size());
assertEquals(ref.get().size(), 2);
assertTrue(ref.get().contains("internal:index/shard/recovery/*"));
diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
index 4dfedf519bd16..65d51e126c9f6 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/SettingTests.java
@@ -441,7 +441,7 @@ public void testListSettings() {
assertEquals("foo,bar", value.get(0));
List input = Arrays.asList("test", "test1, test2", "test", ",,,,");
- Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0]));
+ Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0]));
assertTrue(listSetting.exists(builder.build()));
value = listSetting.get(builder.build());
assertEquals(input.size(), value.size());
@@ -464,11 +464,11 @@ public void testListSettings() {
assertEquals(input.size(), ref.get().size());
assertArrayEquals(ref.get().toArray(new String[0]), input.toArray(new String[0]));
- settingUpdater.apply(Settings.builder().putArray("foo.bar", "123").build(), builder.build());
+ settingUpdater.apply(Settings.builder().putList("foo.bar", "123").build(), builder.build());
assertEquals(1, ref.get().size());
assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"123"});
- settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putArray("foo.bar", "123").build());
+ settingUpdater.apply(Settings.builder().put("foo.bar", "1,2,3").build(), Settings.builder().putList("foo.bar", "123").build());
assertEquals(3, ref.get().size());
assertArrayEquals(ref.get().toArray(new String[0]), new String[] {"1", "2", "3"});
@@ -492,17 +492,17 @@ public void testListSettings() {
assertEquals(1, value.size());
assertEquals("foo,bar", value.get(0));
- value = settingWithFallback.get(Settings.builder().putArray("foo.bar", "1", "2").build());
+ value = settingWithFallback.get(Settings.builder().putList("foo.bar", "1", "2").build());
assertEquals(2, value.size());
assertEquals("1", value.get(0));
assertEquals("2", value.get(1));
- value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").build());
+ value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").build());
assertEquals(2, value.size());
assertEquals("3", value.get(0));
assertEquals("4", value.get(1));
- value = settingWithFallback.get(Settings.builder().putArray("foo.baz", "3", "4").putArray("foo.bar", "1", "2").build());
+ value = settingWithFallback.get(Settings.builder().putList("foo.baz", "3", "4").putList("foo.bar", "1", "2").build());
assertEquals(2, value.size());
assertEquals("3", value.get(0));
assertEquals("4", value.get(1));
@@ -512,7 +512,7 @@ public void testListSettingAcceptsNumberSyntax() {
Setting> listSetting = Setting.listSetting("foo.bar", Arrays.asList("foo,bar"), (s) -> s.toString(),
Property.Dynamic, Property.NodeScope);
List input = Arrays.asList("test", "test1, test2", "test", ",,,,");
- Settings.Builder builder = Settings.builder().putArray("foo.bar", input.toArray(new String[0]));
+ Settings.Builder builder = Settings.builder().putList("foo.bar", input.toArray(new String[0]));
// try to parse this really annoying format
for (String key : builder.keys()) {
assertTrue("key: " + key + " doesn't match", listSetting.match(key));
@@ -601,11 +601,11 @@ public void testGetAllConcreteSettings() {
(key) -> Setting.listSetting(key, Collections.emptyList(), Function.identity(), Property.NodeScope));
Settings settings = Settings.builder()
- .putArray("foo.1.bar", "1", "2")
- .putArray("foo.2.bar", "3", "4", "5")
- .putArray("foo.bar", "6")
- .putArray("some.other", "6")
- .putArray("foo.3.bar", "6")
+ .putList("foo.1.bar", "1", "2")
+ .putList("foo.2.bar", "3", "4", "5")
+ .putList("foo.bar", "6")
+ .putList("some.other", "6")
+ .putList("foo.3.bar", "6")
.build();
Stream>> allConcreteSettings = listAffixSetting.getAllConcreteSettings(settings);
Map> collect = allConcreteSettings.collect(Collectors.toMap(Setting::getKey, (s) -> s.get(settings)));
diff --git a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java
index 0edb0801382ea..d2f1ac9a3f7ce 100644
--- a/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/common/settings/SettingsTests.java
@@ -38,12 +38,11 @@
import java.io.ByteArrayInputStream;
import java.io.IOException;
-import java.io.StringBufferInputStream;
-import java.io.StringReader;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
+import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
@@ -52,7 +51,7 @@
import java.util.NoSuchElementException;
import java.util.Set;
-import static org.hamcrest.Matchers.arrayContaining;
+import static org.hamcrest.Matchers.contains;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.equalTo;
@@ -236,99 +235,99 @@ public void testNames() {
public void testThatArraysAreOverriddenCorrectly() throws IOException {
// overriding a single value with an array
Settings settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1").build())
- .put(Settings.builder().putArray("value", "2", "3").build())
+ .put(Settings.builder().putList("value", "1").build())
+ .put(Settings.builder().putList("value", "2", "3").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
+ assertThat(settings.getAsList("value"), contains("2", "3"));
settings = Settings.builder()
.put(Settings.builder().put("value", "1").build())
- .put(Settings.builder().putArray("value", "2", "3").build())
+ .put(Settings.builder().putList("value", "2", "3").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
+ assertThat(settings.getAsList("value"), contains("2", "3"));
settings = Settings.builder().loadFromSource("value: 1", XContentType.YAML)
.loadFromSource("value: [ 2, 3 ]", XContentType.YAML)
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("2", "3"));
+ assertThat(settings.getAsList("value"), contains("2", "3"));
settings = Settings.builder()
.put(Settings.builder().put("value.with.deep.key", "1").build())
- .put(Settings.builder().putArray("value.with.deep.key", "2", "3").build())
+ .put(Settings.builder().putList("value.with.deep.key", "2", "3").build())
.build();
- assertThat(settings.getAsArray("value.with.deep.key"), arrayContaining("2", "3"));
+ assertThat(settings.getAsList("value.with.deep.key"), contains("2", "3"));
// overriding an array with a shorter array
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2").build())
- .put(Settings.builder().putArray("value", "3").build())
+ .put(Settings.builder().putList("value", "1", "2").build())
+ .put(Settings.builder().putList("value", "3").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("3"));
+ assertThat(settings.getAsList("value"), contains("3"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2", "3").build())
- .put(Settings.builder().putArray("value", "4", "5").build())
+ .put(Settings.builder().putList("value", "1", "2", "3").build())
+ .put(Settings.builder().putList("value", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("4", "5"));
+ assertThat(settings.getAsList("value"), contains("4", "5"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value.deep.key", "1", "2", "3").build())
- .put(Settings.builder().putArray("value.deep.key", "4", "5").build())
+ .put(Settings.builder().putList("value.deep.key", "1", "2", "3").build())
+ .put(Settings.builder().putList("value.deep.key", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5"));
+ assertThat(settings.getAsList("value.deep.key"), contains("4", "5"));
// overriding an array with a longer array
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2").build())
- .put(Settings.builder().putArray("value", "3", "4", "5").build())
+ .put(Settings.builder().putList("value", "1", "2").build())
+ .put(Settings.builder().putList("value", "3", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("3", "4", "5"));
+ assertThat(settings.getAsList("value"), contains("3", "4", "5"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value.deep.key", "1", "2", "3").build())
- .put(Settings.builder().putArray("value.deep.key", "4", "5").build())
+ .put(Settings.builder().putList("value.deep.key", "1", "2", "3").build())
+ .put(Settings.builder().putList("value.deep.key", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5"));
+ assertThat(settings.getAsList("value.deep.key"), contains("4", "5"));
// overriding an array with a single value
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2").build())
+ .put(Settings.builder().putList("value", "1", "2").build())
.put(Settings.builder().put("value", "3").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("3"));
+ assertThat(settings.getAsList("value"), contains("3"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value.deep.key", "1", "2").build())
+ .put(Settings.builder().putList("value.deep.key", "1", "2").build())
.put(Settings.builder().put("value.deep.key", "3").build())
.build();
- assertThat(settings.getAsArray("value.deep.key"), arrayContaining("3"));
+ assertThat(settings.getAsList("value.deep.key"), contains("3"));
// test that other arrays are not overridden
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "1", "2", "3").putArray("a", "b", "c").build())
- .put(Settings.builder().putArray("value", "4", "5").putArray("d", "e", "f").build())
+ .put(Settings.builder().putList("value", "1", "2", "3").putList("a", "b", "c").build())
+ .put(Settings.builder().putList("value", "4", "5").putList("d", "e", "f").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("4", "5"));
- assertThat(settings.getAsArray("a"), arrayContaining("b", "c"));
- assertThat(settings.getAsArray("d"), arrayContaining("e", "f"));
+ assertThat(settings.getAsList("value"), contains("4", "5"));
+ assertThat(settings.getAsList("a"), contains("b", "c"));
+ assertThat(settings.getAsList("d"), contains("e", "f"));
settings = Settings.builder()
- .put(Settings.builder().putArray("value.deep.key", "1", "2", "3").putArray("a", "b", "c").build())
- .put(Settings.builder().putArray("value.deep.key", "4", "5").putArray("d", "e", "f").build())
+ .put(Settings.builder().putList("value.deep.key", "1", "2", "3").putList("a", "b", "c").build())
+ .put(Settings.builder().putList("value.deep.key", "4", "5").putList("d", "e", "f").build())
.build();
- assertThat(settings.getAsArray("value.deep.key"), arrayContaining("4", "5"));
- assertThat(settings.getAsArray("a"), notNullValue());
- assertThat(settings.getAsArray("d"), notNullValue());
+ assertThat(settings.getAsList("value.deep.key"), contains("4", "5"));
+ assertThat(settings.getAsList("a"), notNullValue());
+ assertThat(settings.getAsList("d"), notNullValue());
// overriding a deeper structure with an array
settings = Settings.builder()
.put(Settings.builder().put("value.data", "1").build())
- .put(Settings.builder().putArray("value", "4", "5").build())
+ .put(Settings.builder().putList("value", "4", "5").build())
.build();
- assertThat(settings.getAsArray("value"), arrayContaining("4", "5"));
+ assertThat(settings.getAsList("value"), contains("4", "5"));
// overriding an array with a deeper structure
settings = Settings.builder()
- .put(Settings.builder().putArray("value", "4", "5").build())
+ .put(Settings.builder().putList("value", "4", "5").build())
.put(Settings.builder().put("value.data", "1").build())
.build();
assertThat(settings.get("value.data"), is("1"));
@@ -548,7 +547,7 @@ public void testWriteSettingsToStream() throws IOException {
Settings.Builder builder = Settings.builder();
builder.put("test.key1.baz", "blah1");
builder.putNull("test.key3.bar");
- builder.putArray("test.key4.foo", "1", "2");
+ builder.putList("test.key4.foo", "1", "2");
builder.setSecureSettings(secureSettings);
assertEquals(7, builder.build().size());
Settings.writeSettingsToStream(builder.build(), out);
@@ -558,7 +557,7 @@ public void testWriteSettingsToStream() throws IOException {
assertEquals("blah1", settings.get("test.key1.baz"));
assertNull(settings.get("test.key3.bar"));
assertTrue(settings.keySet().contains("test.key3.bar"));
- assertArrayEquals(new String[] {"1", "2"}, settings.getAsArray("test.key4.foo"));
+ assertEquals(Arrays.asList("1", "2"), settings.getAsList("test.key4.foo"));
}
public void testSecureSettingConflict() {
@@ -579,7 +578,7 @@ public void testGetAsArrayFailsOnDuplicates() {
public void testToAndFromXContent() throws IOException {
Settings settings = Settings.builder()
- .putArray("foo.bar.baz", "1", "2", "3")
+ .putList("foo.bar.baz", "1", "2", "3")
.put("foo.foobar", 2)
.put("rootfoo", "test")
.put("foo.baz", "1,2,3,4")
@@ -593,7 +592,7 @@ public void testToAndFromXContent() throws IOException {
XContentParser parser = createParser(builder);
Settings build = Settings.fromXContent(parser);
assertEquals(5, build.size());
- assertArrayEquals(new String[] {"1", "2", "3"}, build.getAsArray("foo.bar.baz"));
+ assertEquals(Arrays.asList("1", "2", "3"), build.getAsList("foo.bar.baz"));
assertEquals(2, build.getAsInt("foo.foobar", 0).intValue());
assertEquals("test", build.get("rootfoo"));
assertEquals("1,2,3,4", build.get("foo.baz"));
@@ -613,9 +612,9 @@ public void testSimpleJsonSettings() throws Exception {
// check array
assertNull(settings.get("test1.test3.0"));
assertNull(settings.get("test1.test3.1"));
- assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
- assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
- assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
+ assertThat(settings.getAsList("test1.test3").size(), equalTo(2));
+ assertThat(settings.getAsList("test1.test3").get(0), equalTo("test3-1"));
+ assertThat(settings.getAsList("test1.test3").get(1), equalTo("test3-2"));
}
public void testDuplicateKeysThrowsException() {
@@ -646,14 +645,14 @@ public void testDuplicateKeysThrowsException() {
public void testToXContent() throws IOException {
// this is just terrible but it's the existing behavior!
- Settings test = Settings.builder().putArray("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build();
+ Settings test = Settings.builder().putList("foo.bar", "1", "2", "3").put("foo.bar.baz", "test").build();
XContentBuilder builder = XContentBuilder.builder(XContentType.JSON.xContent());
builder.startObject();
test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap()));
builder.endObject();
assertEquals("{\"foo\":{\"bar.baz\":\"test\",\"bar\":[\"1\",\"2\",\"3\"]}}", builder.string());
- test = Settings.builder().putArray("foo.bar", "1", "2", "3").build();
+ test = Settings.builder().putList("foo.bar", "1", "2", "3").build();
builder = XContentBuilder.builder(XContentType.JSON.xContent());
builder.startObject();
test.toXContent(builder, new ToXContent.MapParams(Collections.emptyMap()));
@@ -686,18 +685,18 @@ public void testSimpleYamlSettings() throws Exception {
// check array
assertNull(settings.get("test1.test3.0"));
assertNull(settings.get("test1.test3.1"));
- assertThat(settings.getAsArray("test1.test3").length, equalTo(2));
- assertThat(settings.getAsArray("test1.test3")[0], equalTo("test3-1"));
- assertThat(settings.getAsArray("test1.test3")[1], equalTo("test3-2"));
+ assertThat(settings.getAsList("test1.test3").size(), equalTo(2));
+ assertThat(settings.getAsList("test1.test3").get(0), equalTo("test3-1"));
+ assertThat(settings.getAsList("test1.test3").get(1), equalTo("test3-2"));
}
public void testYamlLegacyList() throws IOException {
Settings settings = Settings.builder()
.loadFromStream("foo.yml", new ByteArrayInputStream("foo.bar.baz.0: 1\nfoo.bar.baz.1: 2".getBytes(StandardCharsets.UTF_8)),
false).build();
- assertThat(settings.getAsArray("foo.bar.baz").length, equalTo(2));
- assertThat(settings.getAsArray("foo.bar.baz")[0], equalTo("1"));
- assertThat(settings.getAsArray("foo.bar.baz")[1], equalTo("2"));
+ assertThat(settings.getAsList("foo.bar.baz").size(), equalTo(2));
+ assertThat(settings.getAsList("foo.bar.baz").get(0), equalTo("1"));
+ assertThat(settings.getAsList("foo.bar.baz").get(1), equalTo("2"));
}
public void testIndentation() throws Exception {
@@ -746,14 +745,14 @@ public void testReadLegacyFromStream() throws IOException {
in.setVersion(VersionUtils.getPreviousVersion(Version.CURRENT));
Settings settings = Settings.readSettingsFromStream(in);
assertEquals(2, settings.size());
- assertArrayEquals(new String[]{"0", "1", "2", "3"}, settings.getAsArray("foo.bar"));
+ assertEquals(Arrays.asList("0", "1", "2", "3"), settings.getAsList("foo.bar"));
assertEquals("baz", settings.get("foo.bar.baz"));
}
public void testWriteLegacyOutput() throws IOException {
BytesStreamOutput output = new BytesStreamOutput();
- output.setVersion(VersionUtils.getPreviousVersion(Version.CURRENT));
- Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3")
+ output.setVersion(VersionUtils.getPreviousVersion(Version.V_6_1_0));
+ Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3")
.put("foo.bar.baz", "baz").putNull("foo.null").build();
Settings.writeSettingsToStream(settings, output);
StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
@@ -774,7 +773,7 @@ public void testWriteLegacyOutput() throws IOException {
in.setVersion(output.getVersion());
Settings readSettings = Settings.readSettingsFromStream(in);
assertEquals(3, readSettings.size());
- assertArrayEquals(new String[] {"0", "1", "2", "3"}, readSettings.getAsArray("foo.bar"));
+ assertEquals(Arrays.asList("0", "1", "2", "3"), readSettings.getAsList("foo.bar"));
assertEquals(readSettings.get("foo.bar.baz"), "baz");
assertTrue(readSettings.keySet().contains("foo.null"));
assertNull(readSettings.get("foo.null"));
@@ -782,19 +781,19 @@ public void testWriteLegacyOutput() throws IOException {
public void testReadWriteArray() throws IOException {
BytesStreamOutput output = new BytesStreamOutput();
- output.setVersion(Version.CURRENT);
- Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build();
+ Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").build();
+ output.setVersion(randomFrom(Version.CURRENT, Version.V_6_1_0));
Settings.writeSettingsToStream(settings, output);
StreamInput in = StreamInput.wrap(BytesReference.toBytes(output.bytes()));
Settings build = Settings.readSettingsFromStream(in);
assertEquals(2, build.size());
- assertArrayEquals(build.getAsArray("foo.bar"), new String[] {"0", "1", "2", "3"});
+ assertEquals(build.getAsList("foo.bar"), Arrays.asList("0", "1", "2", "3"));
assertEquals(build.get("foo.bar.baz"), "baz");
}
public void testCopy() {
- Settings settings = Settings.builder().putArray("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("test").build();
- assertArrayEquals(new String[] {"0", "1", "2", "3"}, Settings.builder().copy("foo.bar", settings).build().getAsArray("foo.bar"));
+ Settings settings = Settings.builder().putList("foo.bar", "0", "1", "2", "3").put("foo.bar.baz", "baz").putNull("test").build();
+ assertEquals(Arrays.asList("0", "1", "2", "3"), Settings.builder().copy("foo.bar", settings).build().getAsList("foo.bar"));
assertEquals("baz", Settings.builder().copy("foo.bar.baz", settings).build().get("foo.bar.baz"));
assertNull(Settings.builder().copy("foo.bar.baz", settings).build().get("test"));
assertTrue(Settings.builder().copy("test", settings).build().keySet().contains("test"));
diff --git a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java
index bee56c229c02a..e71efa46424b2 100644
--- a/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java
+++ b/core/src/test/java/org/elasticsearch/common/util/concurrent/ThreadContextTests.java
@@ -29,7 +29,6 @@
import java.util.List;
import java.util.Map;
import java.util.function.Supplier;
-
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.hasItem;
import static org.hamcrest.Matchers.hasSize;
@@ -215,8 +214,8 @@ public void testResponseHeaders() {
public void testCopyHeaders() {
Settings build = Settings.builder().put("request.headers.default", "1").build();
ThreadContext threadContext = new ThreadContext(build);
- threadContext.copyHeaders(Collections.emptyMap().entrySet());
- threadContext.copyHeaders(Collections.singletonMap("foo", "bar").entrySet());
+ threadContext.copyHeaders(Collections.emptyMap().entrySet());
+ threadContext.copyHeaders(Collections.singletonMap("foo", "bar").entrySet());
assertEquals("bar", threadContext.getHeader("foo"));
}
@@ -443,7 +442,7 @@ public void onAfter() {
assertEquals("bar", threadContext.getHeader("foo"));
assertEquals("bar_transient", threadContext.getTransient("foo"));
assertNotNull(threadContext.getTransient("failure"));
- assertEquals("exception from doRun", ((RuntimeException)threadContext.getTransient("failure")).getMessage());
+ assertEquals("exception from doRun", ((RuntimeException) threadContext.getTransient("failure")).getMessage());
assertFalse(threadContext.isDefaultContext());
threadContext.putTransient("after", "after");
}
@@ -604,7 +603,7 @@ protected void doRun() throws Exception {
public void testMarkAsSystemContext() throws IOException {
try (ThreadContext threadContext = new ThreadContext(Settings.EMPTY)) {
assertFalse(threadContext.isSystemContext());
- try(ThreadContext.StoredContext context = threadContext.stashContext()){
+ try (ThreadContext.StoredContext context = threadContext.stashContext()) {
assertFalse(threadContext.isSystemContext());
threadContext.markAsSystemContext();
assertTrue(threadContext.isSystemContext());
@@ -613,6 +612,17 @@ public void testMarkAsSystemContext() throws IOException {
}
}
+ public void testPutHeaders() {
+ Settings build = Settings.builder().put("request.headers.default", "1").build();
+ ThreadContext threadContext = new ThreadContext(build);
+ threadContext.putHeader(Collections.emptyMap());
+ threadContext.putHeader(Collections.singletonMap("foo", "bar"));
+ assertEquals("bar", threadContext.getHeader("foo"));
+ IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () ->
+ threadContext.putHeader(Collections.singletonMap("foo", "boom")));
+ assertEquals("value for key [foo] already present", e.getMessage());
+ }
+
/**
* Sometimes wraps a Runnable in an AbstractRunnable.
*/
diff --git a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java
index ed7cdd4d4243d..1a837b825d867 100644
--- a/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/ZenFaultDetectionTests.java
@@ -59,7 +59,6 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
-import static java.util.Collections.singleton;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.greaterThanOrEqualTo;
@@ -137,7 +136,7 @@ protected MockTransportService build(Settings settings, Version version) {
Settings.builder()
.put(settings)
// trace zenfd actions but keep the default otherwise
- .putArray(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME)
+ .putList(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), TransportLivenessAction.NAME)
.build(),
new MockTcpTransport(settings, threadPool, BigArrays.NON_RECYCLING_INSTANCE, circuitBreakerService,
namedWriteableRegistry, new NetworkService(Collections.emptyList()), version),
diff --git a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java
index 0492bc82e5f73..3c7a49a176635 100644
--- a/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java
+++ b/core/src/test/java/org/elasticsearch/discovery/zen/UnicastZenPingTests.java
@@ -179,7 +179,7 @@ public void connectToNode(DiscoveryNode node, ConnectionProfile connectionProfil
final ClusterState stateMismatch = ClusterState.builder(new ClusterName("mismatch")).version(randomNonNegativeLong()).build();
Settings hostsSettings = Settings.builder()
- .putArray("discovery.zen.ping.unicast.hosts",
+ .putList("discovery.zen.ping.unicast.hosts",
NetworkAddress.format(new InetSocketAddress(handleA.address.address().getAddress(), handleA.address.address().getPort())),
NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort())),
NetworkAddress.format(new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort())),
@@ -305,7 +305,7 @@ public TransportAddress[] addressesFromString(String address, int perAddressLimi
new InetSocketAddress(handleC.address.address().getAddress(), handleC.address.address().getPort()))});
final Settings hostsSettings = Settings.builder()
- .putArray("discovery.zen.ping.unicast.hosts", "UZP_A", "UZP_B", "UZP_C")
+ .putList("discovery.zen.ping.unicast.hosts", "UZP_A", "UZP_B", "UZP_C")
.put("cluster.name", "test")
.build();
@@ -589,7 +589,7 @@ public void testResolveReuseExistingNodeConnections() throws ExecutionException,
final boolean useHosts = randomBoolean();
final Settings.Builder hostsSettingsBuilder = Settings.builder().put("cluster.name", "test");
if (useHosts) {
- hostsSettingsBuilder.putArray("discovery.zen.ping.unicast.hosts",
+ hostsSettingsBuilder.putList("discovery.zen.ping.unicast.hosts",
NetworkAddress.format(new InetSocketAddress(handleB.address.address().getAddress(), handleB.address.address().getPort()))
);
} else {
diff --git a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java
index 51391a8643b48..70df7d33f291c 100644
--- a/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java
+++ b/core/src/test/java/org/elasticsearch/env/EnvironmentTests.java
@@ -42,7 +42,7 @@ public Environment newEnvironment(Settings settings) throws IOException {
Settings build = Settings.builder()
.put(settings)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
return new Environment(build);
}
@@ -50,7 +50,7 @@ public void testRepositoryResolution() throws IOException {
Environment environment = newEnvironment();
assertThat(environment.resolveRepoFile("/test/repos/repo1"), nullValue());
assertThat(environment.resolveRepoFile("test/repos/repo1"), nullValue());
- environment = newEnvironment(Settings.builder().putArray(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
+ environment = newEnvironment(Settings.builder().putList(Environment.PATH_REPO_SETTING.getKey(), "/test/repos", "/another/repos", "/test/repos/../other").build());
assertThat(environment.resolveRepoFile("/test/repos/repo1"), notNullValue());
assertThat(environment.resolveRepoFile("test/repos/repo1"), notNullValue());
assertThat(environment.resolveRepoFile("/another/repos/repo1"), notNullValue());
diff --git a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
index 42cb4a5811b2e..615a75dda025a 100644
--- a/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
+++ b/core/src/test/java/org/elasticsearch/env/NodeEnvironmentTests.java
@@ -483,7 +483,7 @@ public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException
public Settings buildEnvSettings(Settings settings) {
return Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
.put(settings).build();
}
@@ -491,7 +491,7 @@ public NodeEnvironment newNodeEnvironment(String[] dataPaths, Settings settings)
Settings build = Settings.builder()
.put(settings)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
return new NodeEnvironment(build, new Environment(build));
}
@@ -500,7 +500,7 @@ public NodeEnvironment newNodeEnvironment(String[] dataPaths, String sharedDataP
.put(settings)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), sharedDataPath)
- .putArray(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), dataPaths).build();
return new NodeEnvironment(build, new Environment(build));
}
}
diff --git a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java
index 8d8878fa28251..6be786aff88b5 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexSettingsTests.java
@@ -543,7 +543,7 @@ public void testQueryDefaultField() {
);
assertThat(index.getDefaultFields(), equalTo(Collections.singletonList("body")));
index.updateIndexMetaData(
- newIndexMeta("index", Settings.builder().putArray("index.query.default_field", "body", "title").build())
+ newIndexMeta("index", Settings.builder().putList("index.query.default_field", "body", "title").build())
);
assertThat(index.getDefaultFields(), equalTo(Arrays.asList("body", "title")));
}
diff --git a/core/src/test/java/org/elasticsearch/index/IndexSortIT.java b/core/src/test/java/org/elasticsearch/index/IndexSortIT.java
index bb59bc948805c..c981d88a3d1a8 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexSortIT.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexSortIT.java
@@ -26,8 +26,6 @@
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.test.ESIntegTestCase;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
import java.io.IOException;
@@ -80,7 +78,7 @@ public void testIndexSort() {
.put(indexSettings())
.put("index.number_of_shards", "1")
.put("index.number_of_replicas", "1")
- .putArray("index.sort.field", "date", "numeric_dv", "keyword_dv")
+ .putList("index.sort.field", "date", "numeric_dv", "keyword_dv")
)
.addMapping("test", TEST_MAPPING)
.get();
@@ -99,7 +97,7 @@ public void testInvalidIndexSort() {
() -> prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
- .putArray("index.sort.field", "invalid_field")
+ .putList("index.sort.field", "invalid_field")
)
.addMapping("test", TEST_MAPPING)
.get()
@@ -110,7 +108,7 @@ public void testInvalidIndexSort() {
() -> prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
- .putArray("index.sort.field", "numeric")
+ .putList("index.sort.field", "numeric")
)
.addMapping("test", TEST_MAPPING)
.get()
@@ -121,7 +119,7 @@ public void testInvalidIndexSort() {
() -> prepareCreate("test")
.setSettings(Settings.builder()
.put(indexSettings())
- .putArray("index.sort.field", "keyword")
+ .putList("index.sort.field", "keyword")
)
.addMapping("test", TEST_MAPPING)
.get()
diff --git a/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java
index 74ec1cc02d93f..78569d927be76 100644
--- a/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java
+++ b/core/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java
@@ -76,9 +76,9 @@ public void testSimpleIndexSort() throws IOException {
public void testIndexSortWithArrays() throws IOException {
Settings settings = Settings.builder()
- .putArray("index.sort.field", "field1", "field2")
- .putArray("index.sort.order", "asc", "desc")
- .putArray("index.sort.missing", "_last", "_first")
+ .putList("index.sort.field", "field1", "field2")
+ .putList("index.sort.order", "asc", "desc")
+ .putList("index.sort.missing", "_last", "_first")
.build();
IndexSettings indexSettings = indexSettings(settings);
IndexSortConfig config = indexSettings.getIndexSortConfig();
@@ -108,7 +108,7 @@ public void testInvalidIndexSort() throws IOException {
public void testInvalidIndexSortWithArray() throws IOException {
final Settings settings = Settings.builder()
.put("index.sort.field", "field1")
- .putArray("index.sort.order", new String[] {"asc", "desc"})
+ .putList("index.sort.order", new String[] {"asc", "desc"})
.build();
IllegalArgumentException exc =
expectThrows(IllegalArgumentException.class, () -> indexSettings(settings));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
index 9303159c265b9..d93533ffc80d3 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java
@@ -129,9 +129,9 @@ public void testConfigureCamelCaseTokenFilter() throws IOException {
.put("index.analysis.filter.testFilter.type", "mock")
.put("index.analysis.filter.test_filter.type", "mock")
.put("index.analysis.analyzer.custom_analyzer_with_camel_case.tokenizer", "standard")
- .putArray("index.analysis.analyzer.custom_analyzer_with_camel_case.filter", "lowercase", "testFilter")
+ .putList("index.analysis.analyzer.custom_analyzer_with_camel_case.filter", "lowercase", "testFilter")
.put("index.analysis.analyzer.custom_analyzer_with_snake_case.tokenizer", "standard")
- .putArray("index.analysis.analyzer.custom_analyzer_with_snake_case.filter", "lowercase", "test_filter").build();
+ .putList("index.analysis.analyzer.custom_analyzer_with_snake_case.filter", "lowercase", "test_filter").build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", indexSettings);
@@ -209,8 +209,8 @@ public void testNoTypeOrTokenizerErrorMessage() throws IOException {
.builder()
.put(IndexMetaData.SETTING_VERSION_CREATED, version)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .putArray("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"})
- .putArray("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"})
+ .putList("index.analysis.analyzer.test_analyzer.filter", new String[] {"lowercase", "stop", "shingle"})
+ .putList("index.analysis.analyzer.test_analyzer.char_filter", new String[] {"html_strip"})
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java
index 4073bbdbbc9c7..e07b4e5b9d435 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/AnalysisTests.java
@@ -29,7 +29,6 @@
import java.io.IOException;
import java.io.OutputStream;
import java.nio.charset.CharacterCodingException;
-import java.nio.charset.Charset;
import java.nio.charset.MalformedInputException;
import java.nio.charset.StandardCharsets;
import java.nio.file.Files;
@@ -50,7 +49,7 @@ public void testParseStemExclusion() {
assertThat(set.contains("baz"), is(false));
/* Array */
- settings = Settings.builder().putArray("stem_exclusion", "foo","bar").build();
+ settings = Settings.builder().putList("stem_exclusion", "foo","bar").build();
set = Analysis.parseStemExclusion(settings, CharArraySet.EMPTY_SET);
assertThat(set.contains("foo"), is(true));
assertThat(set.contains("bar"), is(true));
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java
index 66b28ec419a7f..7d8d64e6962d5 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/CustomNormalizerTests.java
@@ -42,7 +42,7 @@ public class CustomNormalizerTests extends ESTokenStreamTestCase {
public void testBasics() throws IOException {
Settings settings = Settings.builder()
- .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase")
+ .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN);
@@ -57,7 +57,7 @@ public void testBasics() throws IOException {
public void testUnknownType() {
Settings settings = Settings.builder()
.put("index.analysis.normalizer.my_normalizer.type", "foobar")
- .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase", "asciifolding")
+ .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase", "asciifolding")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
@@ -78,7 +78,7 @@ public void testTokenizer() throws IOException {
public void testCharFilters() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.char_filter.my_mapping.type", "mock_char_filter")
- .putArray("index.analysis.normalizer.my_normalizer.char_filter", "my_mapping")
+ .putList("index.analysis.normalizer.my_normalizer.char_filter", "my_mapping")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, MOCK_ANALYSIS_PLUGIN);
@@ -92,7 +92,7 @@ public void testCharFilters() throws IOException {
public void testIllegalFilters() throws IOException {
Settings settings = Settings.builder()
- .putArray("index.analysis.normalizer.my_normalizer.filter", "mock_forbidden")
+ .putList("index.analysis.normalizer.my_normalizer.filter", "mock_forbidden")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
@@ -102,7 +102,7 @@ public void testIllegalFilters() throws IOException {
public void testIllegalCharFilters() throws IOException {
Settings settings = Settings.builder()
- .putArray("index.analysis.normalizer.my_normalizer.char_filter", "mock_forbidden")
+ .putList("index.analysis.normalizer.my_normalizer.char_filter", "mock_forbidden")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
IllegalArgumentException e = expectThrows(IllegalArgumentException.class,
diff --git a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
index 5cf591516cedc..0bc229c9328cf 100644
--- a/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
+++ b/core/src/test/java/org/elasticsearch/index/analysis/synonyms/SynonymsAnalysisTests.java
@@ -85,11 +85,11 @@ public void testSynonymWordDeleteByAnalyzer() throws IOException {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!")
+ .putList("index.analysis.filter.synonym.synonyms", "kimchy => shay", "dude => elasticsearch", "abides => man!")
.put("index.analysis.filter.stop_within_synonym.type", "stop")
- .putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
+ .putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
.put("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym")
+ .putList("index.analysis.analyzer.synonymAnalyzerWithStopSynonymBeforeSynonym.filter", "stop_within_synonym","synonym")
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
try {
@@ -106,11 +106,11 @@ public void testExpandSynonymWordDeleteByAnalyzer() throws IOException {
.put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT)
.put("path.home", createTempDir().toString())
.put("index.analysis.filter.synonym_expand.type", "synonym")
- .putArray("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!")
+ .putList("index.analysis.filter.synonym_expand.synonyms", "kimchy, shay", "dude, elasticsearch", "abides, man!")
.put("index.analysis.filter.stop_within_synonym.type", "stop")
- .putArray("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
+ .putList("index.analysis.filter.stop_within_synonym.stopwords", "kimchy", "elasticsearch")
.put("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand")
+ .putList("index.analysis.analyzer.synonymAnalyzerExpandWithStopBeforeSynonym.filter", "stop_within_synonym","synonym_expand")
.build();
IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
try {
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
index 06c31f4dd1849..023d2249f2f82 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/DynamicMappingTests.java
@@ -217,7 +217,7 @@ private Mapper parse(DocumentMapper mapper, DocumentMapperParser parser, XConten
ParseContext.InternalParseContext ctx = new ParseContext.InternalParseContext(settings, parser, mapper, source, xContentParser);
assertEquals(XContentParser.Token.START_OBJECT, ctx.parser().nextToken());
ctx.parser().nextToken();
- DocumentParser.parseObjectOrNested(ctx, mapper.root(), true);
+ DocumentParser.parseObjectOrNested(ctx, mapper.root());
Mapping mapping = DocumentParser.createDynamicUpdate(mapper.mapping(), mapper, ctx.getDynamicMappers());
return mapping == null ? null : mapping.root();
}
@@ -639,8 +639,7 @@ private void doTestDefaultFloatingPointMappings(DocumentMapper mapper, XContentB
.field("baz", (double) 3.2f) // double that can be accurately represented as a float
.field("quux", "3.2") // float detected through numeric detection
.endObject().bytes();
- ParsedDocument parsedDocument = mapper.parse(SourceToParse.source("index", "type", "id", source,
- XContentType.JSON));
+ ParsedDocument parsedDocument = mapper.parse(SourceToParse.source("index", "type", "id", source, builder.contentType()));
Mapping update = parsedDocument.dynamicMappingsUpdate();
assertNotNull(update);
assertThat(((FieldMapper) update.root().getMapper("foo")).fieldType().typeName(), equalTo("float"));
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java
index 3ecef3aa0f514..e67b25b051b4e 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/KeywordFieldMapperTests.java
@@ -70,9 +70,9 @@ protected Collection> getPlugins() {
public void setup() {
indexService = createIndex("test", Settings.builder()
.put("index.analysis.normalizer.my_lowercase.type", "custom")
- .putArray("index.analysis.normalizer.my_lowercase.filter", "lowercase")
+ .putList("index.analysis.normalizer.my_lowercase.filter", "lowercase")
.put("index.analysis.normalizer.my_other_lowercase.type", "custom")
- .putArray("index.analysis.normalizer.my_other_lowercase.filter", "mock_other_lowercase").build());
+ .putList("index.analysis.normalizer.my_other_lowercase.filter", "mock_other_lowercase").build());
parser = indexService.mapperService().documentMapperParser();
}
diff --git a/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java b/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java
index 3b73b5dfd3770..85017cb35cd39 100644
--- a/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java
+++ b/core/src/test/java/org/elasticsearch/index/mapper/SourceFieldMapperTests.java
@@ -65,7 +65,7 @@ public void testNoFormat() throws Exception {
doc = documentMapper.parse(SourceToParse.source("test", "type", "1", XContentFactory.smileBuilder().startObject()
.field("field", "value")
.endObject().bytes(),
- XContentType.JSON));
+ XContentType.SMILE));
assertThat(XContentFactory.xContentType(doc.source()), equalTo(XContentType.SMILE));
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
index d800d60665063..d7d9a04d62b32 100644
--- a/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/QueryStringQueryBuilderTests.java
@@ -999,7 +999,7 @@ public void testDefaultField() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryShardContext context = createShardContext();
context.getIndexSettings().updateIndexMetaData(
- newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field",
+ newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field",
STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build())
);
Query query = new QueryStringQueryBuilder("hello")
@@ -1014,7 +1014,7 @@ public void testDefaultField() throws Exception {
// Reset the default value
context.getIndexSettings().updateIndexMetaData(
newIndexMeta("index",
- context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", "*").build())
+ context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build())
);
}
diff --git a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java
index b5ef2c81d1c47..74793e83e644c 100644
--- a/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java
+++ b/core/src/test/java/org/elasticsearch/index/query/SimpleQueryStringBuilderTests.java
@@ -576,7 +576,7 @@ public void testDefaultField() throws Exception {
assumeTrue("test runs only when at least a type is registered", getCurrentTypes().length > 0);
QueryShardContext context = createShardContext();
context.getIndexSettings().updateIndexMetaData(
- newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field",
+ newIndexMeta("index", context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field",
STRING_FIELD_NAME, STRING_FIELD_NAME_2 + "^5").build())
);
Query query = new SimpleQueryStringBuilder("hello")
@@ -591,7 +591,7 @@ public void testDefaultField() throws Exception {
// Reset the default value
context.getIndexSettings().updateIndexMetaData(
newIndexMeta("index",
- context.getIndexSettings().getSettings(), Settings.builder().putArray("index.query.default_field", "*").build())
+ context.getIndexSettings().getSettings(), Settings.builder().putList("index.query.default_field", "*").build())
);
}
diff --git a/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java
index ec5e92ef6e376..aa154d9392574 100644
--- a/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/index/search/MatchQueryIT.java
@@ -52,15 +52,15 @@ public void setUp() throws Exception {
Settings.builder()
.put(indexSettings())
.put("index.analysis.filter.syns.type", "synonym")
- .putArray("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz")
+ .putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge", "foo, bar baz")
.put("index.analysis.analyzer.lower_syns.type", "custom")
.put("index.analysis.analyzer.lower_syns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns")
+ .putList("index.analysis.analyzer.lower_syns.filter", "lowercase", "syns")
.put("index.analysis.filter.graphsyns.type", "synonym_graph")
- .putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
+ .putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
.put("index.analysis.analyzer.lower_graphsyns.type", "custom")
.put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
+ .putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
);
assertAcked(builder.addMapping(INDEX, createMapping()));
diff --git a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java
index 5b63fc4bdb011..d18e4307d36de 100644
--- a/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java
+++ b/core/src/test/java/org/elasticsearch/index/search/MultiMatchQueryTests.java
@@ -57,7 +57,7 @@ public class MultiMatchQueryTests extends ESSingleNodeTestCase {
public void setup() throws IOException {
Settings settings = Settings.builder()
.put("index.analysis.filter.syns.type","synonym")
- .putArray("index.analysis.filter.syns.synonyms","quick,fast")
+ .putList("index.analysis.filter.syns.synonyms","quick,fast")
.put("index.analysis.analyzer.syns.tokenizer","standard")
.put("index.analysis.analyzer.syns.filter","syns").build();
IndexService indexService = createIndex("test", settings);
diff --git a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java
index fc8fc12e75d6a..c3d309b486fa0 100644
--- a/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java
+++ b/core/src/test/java/org/elasticsearch/index/shard/NewPathForShardTests.java
@@ -26,7 +26,6 @@
import org.elasticsearch.env.Environment;
import org.elasticsearch.env.NodeEnvironment;
import org.elasticsearch.env.NodeEnvironment.NodePath;
-import org.elasticsearch.index.Index;
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.IndexSettingsModule;
@@ -168,7 +167,7 @@ public void testSelectNewPathForShard() throws Exception {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), path)
- .putArray(Environment.PATH_DATA_SETTING.getKey(), paths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), paths).build();
NodeEnvironment nodeEnv = new NodeEnvironment(settings, new Environment(settings));
// Make sure all our mocking above actually worked:
diff --git a/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java b/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java
index 0a72037b7d8c0..24ce9b487cc24 100644
--- a/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/index/store/FsDirectoryServiceTests.java
@@ -21,9 +21,7 @@
import org.apache.lucene.store.Directory;
import org.apache.lucene.store.FileSwitchDirectory;
import org.apache.lucene.store.MMapDirectory;
-import org.apache.lucene.store.SimpleFSDirectory;
import org.apache.lucene.store.SleepingLockWrapper;
-import org.elasticsearch.cluster.metadata.IndexMetaData;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.index.IndexModule;
import org.elasticsearch.index.IndexSettings;
@@ -48,7 +46,7 @@ public void testPreload() throws IOException {
private void doTestPreload(String...preload) throws IOException {
Settings build = Settings.builder()
.put(IndexModule.INDEX_STORE_TYPE_SETTING.getKey(), "mmapfs")
- .putArray(IndexModule.INDEX_STORE_PRE_LOAD_SETTING.getKey(), preload)
+ .putList(IndexModule.INDEX_STORE_PRE_LOAD_SETTING.getKey(), preload)
.build();
IndexSettings settings = IndexSettingsModule.newIndexSettings("foo", build);
IndexStore store = new IndexStore(settings);
diff --git a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
index d53dba67e0dc4..9f214082d4b22 100644
--- a/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
+++ b/core/src/test/java/org/elasticsearch/indices/analyze/AnalyzeActionIT.java
@@ -117,9 +117,9 @@ public void testAnalyzeWithNonDefaultPostionLength() throws Exception {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(Settings.builder().put(indexSettings())
.put("index.analysis.filter.syns.type", "synonym")
- .putArray("index.analysis.filter.syns.synonyms", "wtf, what the fudge")
+ .putList("index.analysis.filter.syns.synonyms", "wtf, what the fudge")
.put("index.analysis.analyzer.custom_syns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns")));
+ .putList("index.analysis.analyzer.custom_syns.filter", "lowercase", "syns")));
ensureGreen();
AnalyzeResponse analyzeResponse = client().admin().indices().prepareAnalyze("say what the fudge").setIndex("test").setAnalyzer("custom_syns").get();
@@ -446,7 +446,7 @@ public void testAnalyzeNormalizedKeywordField() throws IOException {
assertAcked(prepareCreate("test").addAlias(new Alias("alias"))
.setSettings(Settings.builder().put(indexSettings())
.put("index.analysis.normalizer.my_normalizer.type", "custom")
- .putArray("index.analysis.normalizer.my_normalizer.filter", "lowercase"))
+ .putList("index.analysis.normalizer.my_normalizer.filter", "lowercase"))
.addMapping("test", "keyword", "type=keyword,normalizer=my_normalizer"));
ensureGreen("test");
diff --git a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java
index 998020cbd2659..cd592c9ed1e9c 100644
--- a/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java
+++ b/core/src/test/java/org/elasticsearch/rest/action/cat/RestIndicesActionTests.java
@@ -136,11 +136,14 @@ public void testBuildTable() {
private IndicesStatsResponse randomIndicesStatsResponse(final Index[] indices) {
List shardStats = new ArrayList<>();
for (final Index index : indices) {
- for (int i = 0; i < 2; i++) {
+ int numShards = randomInt(5);
+ int primaryIdx = randomIntBetween(-1, numShards - 1); // -1 means there is no primary shard.
+ for (int i = 0; i < numShards; i++) {
ShardId shardId = new ShardId(index, i);
+ boolean primary = (i == primaryIdx);
Path path = createTempDir().resolve("indices").resolve(index.getUUID()).resolve(String.valueOf(i));
- ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, i == 0,
- i == 0 ? StoreRecoverySource.EMPTY_STORE_INSTANCE : PeerRecoverySource.INSTANCE,
+ ShardRouting shardRouting = ShardRouting.newUnassigned(shardId, primary,
+ primary ? StoreRecoverySource.EMPTY_STORE_INSTANCE : PeerRecoverySource.INSTANCE,
new UnassignedInfo(UnassignedInfo.Reason.INDEX_CREATED, null)
);
shardRouting = shardRouting.initialize("node-0", null, ShardRouting.UNAVAILABLE_EXPECTED_SHARD_SIZE);
diff --git a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java
index df18b00528c66..d9eb45013263d 100644
--- a/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java
+++ b/core/src/test/java/org/elasticsearch/search/MultiValueModeTests.java
@@ -19,8 +19,6 @@
package org.elasticsearch.search;
-import com.carrotsearch.randomizedtesting.generators.RandomStrings;
-
import org.apache.lucene.index.BinaryDocValues;
import org.apache.lucene.index.DocValues;
import org.apache.lucene.index.NumericDocValues;
@@ -160,6 +158,8 @@ private void verifySortedNumeric(Supplier supplier, int
for (int i = 0; i < maxDoc; ++i) {
assertTrue(selected.advanceExact(i));
final long actual = selected.longValue();
+ verifyLongValueCanCalledMoreThanOnce(selected, actual);
+
long expected = 0;
if (values.advanceExact(i) == false) {
expected = missingValue;
@@ -203,6 +203,12 @@ private void verifySortedNumeric(Supplier supplier, int
}
}
+ private void verifyLongValueCanCalledMoreThanOnce(NumericDocValues values, long expected) throws IOException {
+ for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) {
+ assertEquals(expected, values.longValue());
+ }
+ }
+
private void verifySortedNumeric(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) {
@@ -212,6 +218,8 @@ private void verifySortedNumeric(Supplier supplier, int
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {
assertTrue(selected.advanceExact(root));
final long actual = selected.longValue();
+ verifyLongValueCanCalledMoreThanOnce(selected, actual);
+
long expected = 0;
if (mode == MultiValueMode.MAX) {
expected = Long.MIN_VALUE;
@@ -320,14 +328,13 @@ public int docValueCount() {
private void verifySortedNumericDouble(Supplier supplier, int maxDoc) throws IOException {
for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : MultiValueMode.values()) {
- if (MultiValueMode.MEDIAN.equals(mode)) {
- continue;
- }
SortedNumericDoubleValues values = supplier.get();
final NumericDoubleValues selected = mode.select(values, missingValue);
for (int i = 0; i < maxDoc; ++i) {
assertTrue(selected.advanceExact(i));
final double actual = selected.doubleValue();
+ verifyDoubleValueCanCalledMoreThanOnce(selected, actual);
+
double expected = 0.0;
if (values.advanceExact(i) == false) {
expected = missingValue;
@@ -371,6 +378,12 @@ private void verifySortedNumericDouble(Supplier suppl
}
}
+ private void verifyDoubleValueCanCalledMoreThanOnce(NumericDoubleValues values, double expected) throws IOException {
+ for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) {
+ assertTrue(Double.compare(values.doubleValue(), expected) == 0);
+ }
+ }
+
private void verifySortedNumericDouble(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
for (long missingValue : new long[] { 0, randomLong() }) {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX, MultiValueMode.SUM, MultiValueMode.AVG}) {
@@ -379,7 +392,9 @@ private void verifySortedNumericDouble(Supplier suppl
int prevRoot = -1;
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {
assertTrue(selected.advanceExact(root));
- final double actual = selected.doubleValue();;
+ final double actual = selected.doubleValue();
+ verifyDoubleValueCanCalledMoreThanOnce(selected, actual);
+
double expected = 0.0;
if (mode == MultiValueMode.MAX) {
expected = Long.MIN_VALUE;
@@ -421,7 +436,7 @@ public void testSingleValuedStrings() throws Exception {
final FixedBitSet docsWithValue = randomBoolean() ? null : new FixedBitSet(numDocs);
for (int i = 0; i < array.length; ++i) {
if (randomBoolean()) {
- array[i] = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8));
+ array[i] = new BytesRef(randomAlphaOfLengthBetween(8, 8));
if (docsWithValue != null) {
docsWithValue.set(i);
}
@@ -456,7 +471,7 @@ public void testMultiValuedStrings() throws Exception {
for (int i = 0; i < numDocs; ++i) {
final BytesRef[] values = new BytesRef[randomInt(4)];
for (int j = 0; j < values.length; ++j) {
- values[j] = new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8));
+ values[j] = new BytesRef(randomAlphaOfLengthBetween(8, 8));
}
Arrays.sort(values);
array[i] = values;
@@ -489,13 +504,15 @@ public int docValueCount() {
}
private void verifySortedBinary(Supplier supplier, int maxDoc) throws IOException {
- for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) {
+ for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(randomAlphaOfLengthBetween(8, 8)) }) {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) {
SortedBinaryDocValues values = supplier.get();
final BinaryDocValues selected = mode.select(values, missingValue);
for (int i = 0; i < maxDoc; ++i) {
assertTrue(selected.advanceExact(i));
final BytesRef actual = selected.binaryValue();
+ verifyBinaryValueCanCalledMoreThanOnce(selected, actual);
+
BytesRef expected = null;
if (values.advanceExact(i) == false) {
expected = missingValue;
@@ -524,8 +541,14 @@ private void verifySortedBinary(Supplier supplier, int ma
}
}
+ private void verifyBinaryValueCanCalledMoreThanOnce(BinaryDocValues values, BytesRef expected) throws IOException {
+ for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) {
+ assertEquals(values.binaryValue(), expected);
+ }
+ }
+
private void verifySortedBinary(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
- for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(RandomStrings.randomAsciiOfLength(random(), 8)) }) {
+ for (BytesRef missingValue : new BytesRef[] { new BytesRef(), new BytesRef(randomAlphaOfLengthBetween(8, 8)) }) {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) {
SortedBinaryDocValues values = supplier.get();
final BinaryDocValues selected = mode.select(values, missingValue, rootDocs, new BitSetIterator(innerDocs, 0L), maxDoc);
@@ -533,6 +556,8 @@ private void verifySortedBinary(Supplier supplier, int ma
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {
assertTrue(selected.advanceExact(root));
final BytesRef actual = selected.binaryValue();
+ verifyBinaryValueCanCalledMoreThanOnce(selected, actual);
+
BytesRef expected = null;
for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) {
if (values.advanceExact(child)) {
@@ -658,7 +683,11 @@ private void verifySortedSet(Supplier supplier, int maxDoc)
SortedSetDocValues values = supplier.get();
final SortedDocValues selected = mode.select(values);
for (int i = 0; i < maxDoc; ++i) {
- final long actual = selected.advanceExact(i) ? selected.ordValue() : -1;
+ long actual = -1;
+ if (selected.advanceExact(i)) {
+ actual = selected.ordValue();
+ verifyOrdValueCanCalledMoreThanOnce(selected, selected.ordValue());
+ }
int expected = -1;
if (values.advanceExact(i)) {
for (long ord = values.nextOrd(); ord != SortedSetDocValues.NO_MORE_ORDS; ord = values.nextOrd()) {
@@ -679,13 +708,23 @@ private void verifySortedSet(Supplier supplier, int maxDoc)
}
}
+ private void verifyOrdValueCanCalledMoreThanOnce(SortedDocValues values, long expected) throws IOException {
+ for (int j = 0, numCall = randomIntBetween(1, 10); j < numCall; j++) {
+ assertEquals(values.ordValue(), expected);
+ }
+ }
+
private void verifySortedSet(Supplier supplier, int maxDoc, FixedBitSet rootDocs, FixedBitSet innerDocs) throws IOException {
for (MultiValueMode mode : new MultiValueMode[] {MultiValueMode.MIN, MultiValueMode.MAX}) {
SortedSetDocValues values = supplier.get();
final SortedDocValues selected = mode.select(values, rootDocs, new BitSetIterator(innerDocs, 0L));
int prevRoot = -1;
for (int root = rootDocs.nextSetBit(0); root != -1; root = root + 1 < maxDoc ? rootDocs.nextSetBit(root + 1) : -1) {
- final int actual = selected.advanceExact(root) ? selected.ordValue() : -1;
+ int actual = -1;
+ if (selected.advanceExact(root)) {
+ actual = selected.ordValue();
+ verifyOrdValueCanCalledMoreThanOnce(selected, actual);
+ }
int expected = -1;
for (int child = innerDocs.nextSetBit(prevRoot + 1); child != -1 && child < root; child = innerDocs.nextSetBit(child + 1)) {
if (values.advanceExact(child)) {
diff --git a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
index 34aa2ab3117aa..faf1f65f34bda 100644
--- a/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/fetch/subphase/highlight/HighlighterSearchIT.java
@@ -1358,9 +1358,9 @@ public void testPhrasePrefix() throws IOException {
Builder builder = Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.synonym.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
+ .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "quick => fast");
+ .putList("index.analysis.filter.synonym.synonyms", "quick => fast");
assertAcked(prepareCreate("first_test_index").setSettings(builder.build()).addMapping("type1", type1TermVectorMapping()));
@@ -2773,9 +2773,9 @@ public void testSynonyms() throws IOException {
Builder builder = Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.synonym.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
+ .putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase")
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "fast,quick");
+ .putList("index.analysis.filter.synonym.synonyms", "fast,quick");
assertAcked(prepareCreate("test").setSettings(builder.build())
.addMapping("type1", "field1",
diff --git a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java
index ab23dfbe21928..58565b5f264b7 100644
--- a/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java
+++ b/core/src/test/java/org/elasticsearch/search/functionscore/QueryRescorerIT.java
@@ -38,9 +38,7 @@
import org.elasticsearch.search.SearchHits;
import org.elasticsearch.search.rescore.QueryRescoreMode;
import org.elasticsearch.search.rescore.QueryRescorerBuilder;
-import org.elasticsearch.search.sort.SortBuilder;
import org.elasticsearch.search.sort.SortBuilders;
-import org.elasticsearch.search.sort.SortOrder;
import org.elasticsearch.test.ESIntegTestCase;
import java.util.Arrays;
@@ -159,9 +157,9 @@ public void testRescorePhrase() throws Exception {
public void testMoreDocs() throws Exception {
Builder builder = Settings.builder();
builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace");
- builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
+ builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
builder.put("index.analysis.filter.synonym.type", "synonym");
- builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
+ builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
@@ -237,9 +235,9 @@ public void testMoreDocs() throws Exception {
public void testSmallRescoreWindow() throws Exception {
Builder builder = Settings.builder();
builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace");
- builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
+ builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
builder.put("index.analysis.filter.synonym.type", "synonym");
- builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
+ builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
@@ -309,9 +307,9 @@ public void testSmallRescoreWindow() throws Exception {
public void testRescorerMadeScoresWorse() throws Exception {
Builder builder = Settings.builder();
builder.put("index.analysis.analyzer.synonym.tokenizer", "whitespace");
- builder.putArray("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
+ builder.putList("index.analysis.analyzer.synonym.filter", "synonym", "lowercase");
builder.put("index.analysis.filter.synonym.type", "synonym");
- builder.putArray("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
+ builder.putList("index.analysis.filter.synonym.synonyms", "ave => ave, avenue", "street => str, street");
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties")
.startObject("field1").field("type", "text").field("analyzer", "whitespace").field("search_analyzer", "synonym")
diff --git a/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java
index 04e260e9b0fbd..e976437ceb9e2 100644
--- a/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/QueryStringIT.java
@@ -19,7 +19,6 @@
package org.elasticsearch.search.query;
-import org.apache.lucene.util.LuceneTestCase;
import org.elasticsearch.ExceptionsHelper;
import org.elasticsearch.action.admin.indices.create.CreateIndexRequestBuilder;
import org.elasticsearch.action.index.IndexRequestBuilder;
@@ -50,7 +49,6 @@
import static org.elasticsearch.test.StreamsUtils.copyToStringFromClasspath;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertAcked;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertHitCount;
-import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertNoSearchHits;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertSearchHits;
import static org.hamcrest.Matchers.containsInAnyOrder;
import static org.hamcrest.Matchers.containsString;
@@ -277,10 +275,10 @@ private void setupIndexWithGraph(String index) throws Exception {
Settings.builder()
.put(indexSettings())
.put("index.analysis.filter.graphsyns.type", "synonym_graph")
- .putArray("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
+ .putList("index.analysis.filter.graphsyns.synonyms", "wtf, what the fudge", "foo, bar baz")
.put("index.analysis.analyzer.lower_graphsyns.type", "custom")
.put("index.analysis.analyzer.lower_graphsyns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
+ .putList("index.analysis.analyzer.lower_graphsyns.filter", "lowercase", "graphsyns")
);
XContentBuilder mapping = XContentFactory.jsonBuilder().startObject().startObject(index).startObject("properties")
diff --git a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
index 9f7aaff4489d8..3ad7a83ef19db 100644
--- a/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/search/query/SearchQueryIT.java
@@ -349,7 +349,7 @@ public void testCommonTermsQueryStackedTokens() throws Exception {
.put(indexSettings())
.put(SETTING_NUMBER_OF_SHARDS,1)
.put("index.analysis.filter.syns.type","synonym")
- .putArray("index.analysis.filter.syns.synonyms","quick,fast")
+ .putList("index.analysis.filter.syns.synonyms","quick,fast")
.put("index.analysis.analyzer.syns.tokenizer","whitespace")
.put("index.analysis.analyzer.syns.filter","syns")
)
@@ -1572,9 +1572,9 @@ public void testMatchQueryWithSynonyms() throws IOException {
.put("index.analysis.analyzer.index.filter", "lowercase")
.put("index.analysis.analyzer.search.type", "custom")
.put("index.analysis.analyzer.search.tokenizer", "standard")
- .putArray("index.analysis.analyzer.search.filter", "lowercase", "synonym")
+ .putList("index.analysis.analyzer.search.filter", "lowercase", "synonym")
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "fast, quick"));
+ .putList("index.analysis.filter.synonym.synonyms", "fast, quick"));
assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search"));
client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get();
@@ -1602,9 +1602,9 @@ public void testQueryStringWithSynonyms() throws IOException {
.put("index.analysis.analyzer.index.filter", "lowercase")
.put("index.analysis.analyzer.search.type", "custom")
.put("index.analysis.analyzer.search.tokenizer", "standard")
- .putArray("index.analysis.analyzer.search.filter", "lowercase", "synonym")
+ .putList("index.analysis.analyzer.search.filter", "lowercase", "synonym")
.put("index.analysis.filter.synonym.type", "synonym")
- .putArray("index.analysis.filter.synonym.synonyms", "fast, quick"));
+ .putList("index.analysis.filter.synonym.synonyms", "fast, quick"));
assertAcked(builder.addMapping("test", "text", "type=text,analyzer=index,search_analyzer=search"));
client().prepareIndex("test", "test", "1").setSource("text", "quick brown fox").get();
@@ -1807,7 +1807,7 @@ public void testNGramCopyField() {
.put("index.analysis.tokenizer.my_ngram_tokenizer.type", "nGram")
.put("index.analysis.tokenizer.my_ngram_tokenizer.min_gram", "1")
.put("index.analysis.tokenizer.my_ngram_tokenizer.max_gram", "10")
- .putArray("index.analysis.tokenizer.my_ngram_tokenizer.token_chars", new String[0]));
+ .putList("index.analysis.tokenizer.my_ngram_tokenizer.token_chars", new String[0]));
assertAcked(builder.addMapping("test", "origin", "type=text,copy_to=meta", "meta", "type=text,analyzer=my_ngram_analyzer"));
// we only have ngrams as the index analyzer so searches will get standard analyzer
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
index 3cbee6adc4161..01b16bb9fb698 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/CompletionSuggestSearchIT.java
@@ -528,9 +528,9 @@ public void testThatSynonymsWork() throws Exception {
Settings.Builder settingsBuilder = Settings.builder()
.put("analysis.analyzer.suggest_analyzer_synonyms.type", "custom")
.put("analysis.analyzer.suggest_analyzer_synonyms.tokenizer", "standard")
- .putArray("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms")
+ .putList("analysis.analyzer.suggest_analyzer_synonyms.filter", "standard", "lowercase", "my_synonyms")
.put("analysis.filter.my_synonyms.type", "synonym")
- .putArray("analysis.filter.my_synonyms.synonyms", "foo,renamed");
+ .putList("analysis.filter.my_synonyms.synonyms", "foo,renamed");
completionMappingBuilder.searchAnalyzer("suggest_analyzer_synonyms").indexAnalyzer("suggest_analyzer_synonyms");
createIndexAndMappingAndSettings(settingsBuilder.build(), completionMappingBuilder);
@@ -806,7 +806,7 @@ public void testThatSortingOnCompletionFieldReturnsUsefulException() throws Exce
public void testThatSuggestStopFilterWorks() throws Exception {
Settings.Builder settingsBuilder = Settings.builder()
.put("index.analysis.analyzer.stoptest.tokenizer", "standard")
- .putArray("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter")
+ .putList("index.analysis.analyzer.stoptest.filter", "standard", "suggest_stop_filter")
.put("index.analysis.filter.suggest_stop_filter.type", "stop")
.put("index.analysis.filter.suggest_stop_filter.remove_trailing", false);
diff --git a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
index 541cafc7962b2..b0b655b0f8b2a 100644
--- a/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
+++ b/core/src/test/java/org/elasticsearch/search/suggest/SuggestSearchIT.java
@@ -30,7 +30,6 @@
import org.elasticsearch.common.xcontent.XContentFactory;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.ScriptPlugin;
-import org.elasticsearch.script.ExecutableScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.TemplateScript;
@@ -173,7 +172,7 @@ public void testSuggestModes() throws IOException {
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put(SETTING_NUMBER_OF_REPLICAS, 0)
.put("index.analysis.analyzer.biword.tokenizer", "standard")
- .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
+ .putList("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
.put("index.analysis.filter.shingler.type", "shingle")
.put("index.analysis.filter.shingler.min_shingle_size", 2)
.put("index.analysis.filter.shingler.max_shingle_size", 3));
@@ -253,7 +252,7 @@ public void testUnmappedField() throws IOException, InterruptedException, Execut
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.biword.tokenizer", "standard")
- .putArray("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
+ .putList("index.analysis.analyzer.biword.filter", "shingler", "lowercase")
.put("index.analysis.filter.shingler.type", "shingle")
.put("index.analysis.filter.shingler.min_shingle_size", 2)
.put("index.analysis.filter.shingler.max_shingle_size", 3));
@@ -427,7 +426,7 @@ public void testStopwordsOnlyPhraseSuggest() throws IOException {
assertAcked(prepareCreate("test").addMapping("typ1", "body", "type=text,analyzer=stopwd").setSettings(
Settings.builder()
.put("index.analysis.analyzer.stopwd.tokenizer", "whitespace")
- .putArray("index.analysis.analyzer.stopwd.filter", "stop")
+ .putList("index.analysis.analyzer.stopwd.filter", "stop")
));
ensureGreen();
index("test", "typ1", "1", "body", "this is a test");
@@ -444,9 +443,9 @@ public void testPrefixLength() throws IOException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .putList("index.analysis.analyzer.body.filter", "lowercase")
.put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", false)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -482,9 +481,9 @@ public void testBasicPhraseSuggest() throws IOException, URISyntaxException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .putList("index.analysis.analyzer.body.filter", "lowercase")
.put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", false)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -615,9 +614,9 @@ public void testSizeParam() throws IOException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(SETTING_NUMBER_OF_SHARDS, 1)
.put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase")
+ .putList("index.analysis.analyzer.body.filter", "lowercase")
.put("index.analysis.analyzer.bigram.tokenizer", "standard")
- .putArray("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
+ .putList("index.analysis.analyzer.bigram.filter", "my_shingle", "lowercase")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", false)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -685,7 +684,7 @@ public void testShardFailures() throws IOException, InterruptedException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.suggest.tokenizer", "standard")
- .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
+ .putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
.put("index.analysis.filter.shingler.type", "shingle")
.put("index.analysis.filter.shingler.min_shingle_size", 2)
.put("index.analysis.filter.shingler.max_shingle_size", 5)
@@ -745,7 +744,7 @@ public void testEmptyShards() throws IOException, InterruptedException {
assertAcked(prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.suggest.tokenizer", "standard")
- .putArray("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
+ .putList("index.analysis.analyzer.suggest.filter", "standard", "lowercase", "shingler")
.put("index.analysis.filter.shingler.type", "shingle")
.put("index.analysis.filter.shingler.min_shingle_size", 2)
.put("index.analysis.filter.shingler.max_shingle_size", 5)
@@ -781,7 +780,7 @@ public void testSearchForRarePhrase() throws IOException {
CreateIndexRequestBuilder builder = prepareCreate("test").setSettings(Settings.builder()
.put(indexSettings())
.put("index.analysis.analyzer.body.tokenizer", "standard")
- .putArray("index.analysis.analyzer.body.filter", "lowercase", "my_shingle")
+ .putList("index.analysis.analyzer.body.filter", "lowercase", "my_shingle")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", true)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -836,7 +835,7 @@ public void testSuggestWithManyCandidates() throws InterruptedException, Executi
.put(indexSettings())
.put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
.put("index.analysis.analyzer.text.tokenizer", "standard")
- .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
+ .putList("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", true)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
@@ -1026,7 +1025,7 @@ public void testPhraseSuggesterCollate() throws InterruptedException, ExecutionE
.put(indexSettings())
.put(SETTING_NUMBER_OF_SHARDS, 1) // A single shard will help to keep the tests repeatable.
.put("index.analysis.analyzer.text.tokenizer", "standard")
- .putArray("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
+ .putList("index.analysis.analyzer.text.filter", "lowercase", "my_shingle")
.put("index.analysis.filter.my_shingle.type", "shingle")
.put("index.analysis.filter.my_shingle.output_unigrams", true)
.put("index.analysis.filter.my_shingle.min_shingle_size", 2)
diff --git a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
index 5883d3a5645cd..a5e92d89906cc 100644
--- a/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
+++ b/core/src/test/java/org/elasticsearch/snapshots/SharedClusterSnapshotRestoreIT.java
@@ -1827,7 +1827,7 @@ public void testChangeSettingsOnRestore() throws Exception {
.put(INDEX_REFRESH_INTERVAL_SETTING.getKey(), "10s")
.put("index.analysis.analyzer.my_analyzer.type", "custom")
.put("index.analysis.analyzer.my_analyzer.tokenizer", "standard")
- .putArray("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym")
+ .putList("index.analysis.analyzer.my_analyzer.filter", "lowercase", "my_synonym")
.put("index.analysis.filter.my_synonym.type", "synonym")
.put("index.analysis.filter.my_synonym.synonyms", "foo => bar");
diff --git a/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
index aa4c7415a4c45..8e0c039176207 100644
--- a/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java
@@ -125,8 +125,8 @@ public void testGroupClusterIndices() throws IOException {
transportService.start();
transportService.acceptIncomingRequests();
Settings.Builder builder = Settings.builder();
- builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
- builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
+ builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
+ builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
try (RemoteClusterService service = new RemoteClusterService(builder.build(), transportService)) {
assertFalse(service.isCrossClusterSearchEnabled());
service.initializeRemoteClusters();
@@ -171,8 +171,8 @@ public void testIncrementallyAddClusters() throws IOException {
transportService.start();
transportService.acceptIncomingRequests();
Settings.Builder builder = Settings.builder();
- builder.putArray("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
- builder.putArray("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
+ builder.putList("search.remote.cluster_1.seeds", seedNode.getAddress().toString());
+ builder.putList("search.remote.cluster_2.seeds", otherSeedNode.getAddress().toString());
try (RemoteClusterService service = new RemoteClusterService(Settings.EMPTY, transportService)) {
assertFalse(service.isCrossClusterSearchEnabled());
service.initializeRemoteClusters();
@@ -225,9 +225,9 @@ public void testRemoteNodeAttribute() throws IOException, InterruptedException {
transportService.start();
transportService.acceptIncomingRequests();
final Settings.Builder builder = Settings.builder();
- builder.putArray(
+ builder.putList(
"search.remote.cluster_1.seeds", c1N1Node.getAddress().toString());
- builder.putArray(
+ builder.putList(
"search.remote.cluster_2.seeds", c2N1Node.getAddress().toString());
try (RemoteClusterService service =
new RemoteClusterService(settings, transportService)) {
@@ -302,9 +302,9 @@ public void testCollectNodes() throws InterruptedException, IOException {
transportService.start();
transportService.acceptIncomingRequests();
final Settings.Builder builder = Settings.builder();
- builder.putArray(
+ builder.putList(
"search.remote.cluster_1.seeds", c1N1Node.getAddress().toString());
- builder.putArray(
+ builder.putList(
"search.remote.cluster_2.seeds", c2N1Node.getAddress().toString());
try (RemoteClusterService service =
new RemoteClusterService(settings, transportService)) {
diff --git a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java b/core/src/test/java/org/elasticsearch/transport/TcpTransportTests.java
similarity index 98%
rename from core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java
rename to core/src/test/java/org/elasticsearch/transport/TcpTransportTests.java
index 55457cc8ae431..e67324fffe868 100644
--- a/core/src/test/java/org/elasticsearch/transport/TCPTransportTests.java
+++ b/core/src/test/java/org/elasticsearch/transport/TcpTransportTests.java
@@ -46,7 +46,7 @@
import static org.hamcrest.Matchers.equalTo;
/** Unit tests for TCPTransport */
-public class TCPTransportTests extends ESTestCase {
+public class TcpTransportTests extends ESTestCase {
/** Test ipv4 host with a default port works */
public void testParseV4DefaultPort() throws Exception {
@@ -175,7 +175,7 @@ public void testCompressRequest() throws IOException {
final boolean compressed = randomBoolean();
final AtomicBoolean called = new AtomicBoolean(false);
Req request = new Req(randomRealisticUnicodeOfLengthBetween(10, 100));
- ThreadPool threadPool = new TestThreadPool(TCPTransportTests.class.getName());
+ ThreadPool threadPool = new TestThreadPool(TcpTransportTests.class.getName());
AtomicReference exceptionReference = new AtomicReference<>();
try {
TcpTransport transport = new TcpTransport("test", Settings.builder().put("transport.tcp.compress", compressed).build(),
@@ -224,8 +224,8 @@ protected void sendMessage(Object o, BytesReference reference, ActionListener li
}
@Override
- protected NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile profile,
- Consumer onChannelClose) throws IOException {
+ protected NodeChannels connectToChannels(
+ DiscoveryNode node, ConnectionProfile profile, Consumer onChannelClose) throws IOException {
return new NodeChannels(node, new Object[profile.getNumConnections()], profile);
}
diff --git a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
index 6f8cbb6a222d2..a87f428fec51e 100644
--- a/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
+++ b/core/src/test/java/org/elasticsearch/validate/SimpleValidateQueryIT.java
@@ -180,9 +180,9 @@ public void testExplainMatchPhrasePrefix() {
assertAcked(prepareCreate("test").setSettings(
Settings.builder().put(indexSettings())
.put("index.analysis.filter.syns.type", "synonym")
- .putArray("index.analysis.filter.syns.synonyms", "one,two")
+ .putList("index.analysis.filter.syns.synonyms", "one,two")
.put("index.analysis.analyzer.syns.tokenizer", "standard")
- .putArray("index.analysis.analyzer.syns.filter", "syns")
+ .putList("index.analysis.analyzer.syns.filter", "syns")
).addMapping("test", "field","type=text,analyzer=syns"));
ensureGreen();
diff --git a/distribution/bwc/build.gradle b/distribution/bwc/build.gradle
index 01f5c04f076d7..3bd67b745da76 100644
--- a/distribution/bwc/build.gradle
+++ b/distribution/bwc/build.gradle
@@ -66,42 +66,44 @@ if (enabled) {
}
File checkoutDir = file("${buildDir}/bwc/checkout-${bwcBranch}")
+ final String remote = System.getProperty("tests.bwc.remote", "elastic")
+
task createClone(type: LoggedExec) {
onlyIf { checkoutDir.exists() == false }
commandLine = ['git', 'clone', rootDir, checkoutDir]
}
- task findUpstream(type: LoggedExec) {
+ task findRemote(type: LoggedExec) {
dependsOn createClone
workingDir = checkoutDir
commandLine = ['git', 'remote', '-v']
doLast {
- project.ext.upstreamExists = false
+ project.ext.remoteExists = false
output.toString('UTF-8').eachLine {
- if (it.contains("upstream")) {
- project.ext.upstreamExists = true
+ if (it.contains("${remote}\thttps://github.com/${remote}/elasticsearch.git")) {
+ project.ext.remoteExists = true
}
}
}
}
- task addUpstream(type: LoggedExec) {
- dependsOn findUpstream
- onlyIf { project.ext.upstreamExists == false }
+ task addRemote(type: LoggedExec) {
+ dependsOn findRemote
+ onlyIf { project.ext.remoteExists == false }
workingDir = checkoutDir
- commandLine = ['git', 'remote', 'add', 'upstream', 'https://github.com/elastic/elasticsearch.git']
+ commandLine = ['git', 'remote', 'add', "${remote}", "https://github.com/${remote}/elasticsearch.git"]
}
task fetchLatest(type: LoggedExec) {
onlyIf { project.gradle.startParameter.isOffline() == false }
- dependsOn addUpstream
+ dependsOn addRemote
workingDir = checkoutDir
commandLine = ['git', 'fetch', '--all']
}
String buildMetadataKey = "bwc_refspec_${project.path.substring(1)}"
task checkoutBwcBranch(type: LoggedExec) {
- String refspec = System.getProperty("tests.bwc.refspec", buildMetadata.get(buildMetadataKey, "upstream/${bwcBranch}"))
+ String refspec = System.getProperty("tests.bwc.refspec", buildMetadata.get(buildMetadataKey, "${remote}/${bwcBranch}"))
dependsOn fetchLatest
workingDir = checkoutDir
commandLine = ['git', 'checkout', refspec]
diff --git a/distribution/src/main/resources/bin/elasticsearch-service.bat b/distribution/src/main/resources/bin/elasticsearch-service.bat
index 8326c689b2877..dfb854a47087e 100644
--- a/distribution/src/main/resources/bin/elasticsearch-service.bat
+++ b/distribution/src/main/resources/bin/elasticsearch-service.bat
@@ -163,15 +163,15 @@ for %%a in ("%ES_JAVA_OPTS:;=","%") do (
@endlocal & set JVM_MS=%JVM_MS% & set JVM_MX=%JVM_MX% & set JVM_SS=%JVM_SS%
if "%JVM_MS%" == "" (
- echo minimum heap size not set; configure using -Xms via %ES_JVM_OPTIONS% or ES_JAVA_OPTS
+ echo minimum heap size not set; configure using -Xms via "%ES_JVM_OPTIONS%" or ES_JAVA_OPTS
goto:eof
)
if "%JVM_MX%" == "" (
- echo maximum heap size not set; configure using -Xmx via %ES_JVM_OPTIONS% or ES_JAVA_OPTS
+ echo maximum heap size not set; configure using -Xmx via "%ES_JVM_OPTIONS%" or ES_JAVA_OPTS
goto:eof
)
if "%JVM_SS%" == "" (
- echo thread stack size not set; configure using -Xss via %ES_JVM_OPTIONS% or ES_JAVA_OPTS
+ echo thread stack size not set; configure using -Xss via "%ES_JVM_OPTIONS%" or ES_JAVA_OPTS
goto:eof
)
diff --git a/docs/plugins/plugin-script.asciidoc b/docs/plugins/plugin-script.asciidoc
index e7f4d25802384..329a65afbc59a 100644
--- a/docs/plugins/plugin-script.asciidoc
+++ b/docs/plugins/plugin-script.asciidoc
@@ -64,21 +64,42 @@ sudo bin/elasticsearch-plugin install [url] <1>
-----------------------------------
<1> must be a valid URL, the plugin name is determined from its descriptor.
-For instance, to install a plugin from your local file system, you could run:
-
+--
+Unix::
+To install a plugin from your local file system at `/path/to/plugin.zip`, you could run:
++
[source,shell]
-----------------------------------
sudo bin/elasticsearch-plugin install file:///path/to/plugin.zip
-----------------------------------
+Windows::
+To install a plugin from your local file system at `C:\path\to\plugin.zip`, you could run:
++
+[source,shell]
+-----------------------------------
+bin\elasticsearch-plugin install file:///C:/path/to/plugin.zip
+-----------------------------------
++
+NOTE: Any path that contains spaces must be wrapped in quotes!
+
+HTTP::
+To install a plugin from a HTTP URL:
++
+[source,shell]
+-----------------------------------
+sudo bin/elasticsearch-plugin install http://some.domain/path/to/plugin.zip
+-----------------------------------
++
The plugin script will refuse to talk to an HTTPS URL with an untrusted
certificate. To use a self-signed HTTPS cert, you will need to add the CA cert
to a local Java truststore and pass the location to the script as follows:
-
++
[source,shell]
-----------------------------------
-sudo ES_JAVA_OPTS="-Djavax.net.ssl.trustStore=/path/to/trustStore.jks" bin/elasticsearch-plugin install https://....
+sudo ES_JAVA_OPTS="-Djavax.net.ssl.trustStore=/path/to/trustStore.jks" bin/elasticsearch-plugin install https://host/plugin.zip
-----------------------------------
+--
[[listing-removing-updating]]
=== Listing, Removing and Updating Installed Plugins
diff --git a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
index 11953cce537a2..ea72e07e337b8 100644
--- a/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
+++ b/docs/reference/aggregations/bucket/datehistogram-aggregation.asciidoc
@@ -402,7 +402,7 @@ deprecated[6.0.0, Use `_key` instead of `_time` to order buckets by their dates/
There are some cases where date histogram can't help us, like for example, when we need
to aggregate the results by day of the week.
-In this case to overcame the problem, we can use a script that returns the day of the week:
+In this case to overcome the problem, we can use a script that returns the day of the week:
[source,js]
diff --git a/docs/reference/getting-started.asciidoc b/docs/reference/getting-started.asciidoc
index bb7908a80c7b8..00876365a27de 100755
--- a/docs/reference/getting-started.asciidoc
+++ b/docs/reference/getting-started.asciidoc
@@ -148,6 +148,16 @@ And now we are ready to start our node and single cluster:
./elasticsearch
--------------------------------------------------
+[float]
+=== Instalation with Homebrew
+
+On macOS, Elasticsearch can also be installed via https://brew.sh[Homebrew]:
+
+["source","sh"]
+--------------------------------------------------
+brew install elasticsearch
+--------------------------------------------------
+
[float]
=== Installation example with MSI Windows Installer
diff --git a/docs/reference/search/request/preference.asciidoc b/docs/reference/search/request/preference.asciidoc
index d0f60d700a82c..325730f70a981 100644
--- a/docs/reference/search/request/preference.asciidoc
+++ b/docs/reference/search/request/preference.asciidoc
@@ -9,18 +9,21 @@ The `preference` is a query string parameter which can be set to:
[horizontal]
`_primary`::
The operation will go and be executed only on the primary
- shards.
+ shards. deprecated[6.1.0, will be removed in 7.0, use `_only_nodes` or `_prefer_nodes`]
`_primary_first`::
The operation will go and be executed on the primary
shard, and if not available (failover), will execute on other shards.
+ deprecated[6.1.0, will be removed in 7.0, use `_only_nodes` or `_prefer_nodes`]
`_replica`::
The operation will go and be executed only on a replica shard.
+ deprecated[6.1.0, will be removed in 7.0, use `_only_nodes` or `_prefer_nodes`]
`_replica_first`::
The operation will go and be executed only on a replica shard, and if
not available (failover), will execute on other shards.
+ deprecated[6.1.0, will be removed in 7.0, use `_only_nodes` or `_prefer_nodes`]
`_local`::
The operation will prefer to be executed on a local
@@ -33,7 +36,7 @@ The `preference` is a query string parameter which can be set to:
`_shards:2,3`::
Restricts the operation to the specified shards. (`2`
and `3` in this case). This preference can be combined with other
- preferences but it has to appear first: `_shards:2,3|_primary`
+ preferences but it has to appear first: `_shards:2,3|_local`
`_only_nodes`::
Restricts the operation to nodes specified in <>
diff --git a/docs/reference/setup/sysconfig/configuring.asciidoc b/docs/reference/setup/sysconfig/configuring.asciidoc
index 0473bed3a767d..d2cb534c57756 100644
--- a/docs/reference/setup/sysconfig/configuring.asciidoc
+++ b/docs/reference/setup/sysconfig/configuring.asciidoc
@@ -92,9 +92,11 @@ specified via systemd.
The systemd service file (`/usr/lib/systemd/system/elasticsearch.service`)
contains the limits that are applied by default.
-To override these, add a file called
-`/etc/systemd/system/elasticsearch.service.d/elasticsearch.conf` and specify
-any changes in that file, such as:
+To override them, add a file called
+`/etc/systemd/system/elasticsearch.service.d/override.conf` (alternatively,
+you may run `sudo systemctl edit elasticsearch` which opens the file
+automatically inside your default editor). Set any changes in this file,
+such as:
[source,sh]
---------------------------------
@@ -102,6 +104,13 @@ any changes in that file, such as:
LimitMEMLOCK=infinity
---------------------------------
+Once finished, run the following command to reload units:
+
+[source,sh]
+---------------------------------
+sudo systemctl daemon-reload
+---------------------------------
+
[[jvm-options]]
==== Setting JVM options
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java
index 75323eac10774..01ca2125faebd 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CJKBigramFilterFactory.java
@@ -29,6 +29,7 @@
import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import java.util.Set;
/**
@@ -54,10 +55,10 @@ public final class CJKBigramFilterFactory extends AbstractTokenFilterFactory {
super(indexSettings, name, settings);
outputUnigrams = settings.getAsBooleanLenientForPreEs6Indices(
indexSettings.getIndexVersionCreated(), "output_unigrams", false, deprecationLogger);
- final String[] asArray = settings.getAsArray("ignored_scripts");
+ final List asArray = settings.getAsList("ignored_scripts");
Set scripts = new HashSet<>(Arrays.asList("han", "hiragana", "katakana", "hangul"));
if (asArray != null) {
- scripts.removeAll(Arrays.asList(asArray));
+ scripts.removeAll(asArray);
}
int flags = 0;
for (String script : scripts) {
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java
index 9ee889e3af610..760c1c79ba4cd 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactory.java
@@ -26,6 +26,7 @@
import org.elasticsearch.index.analysis.AbstractCharFilterFactory;
import java.io.Reader;
+import java.util.List;
import java.util.Set;
import static java.util.Collections.unmodifiableSet;
@@ -36,8 +37,8 @@ public class HtmlStripCharFilterFactory extends AbstractCharFilterFactory {
HtmlStripCharFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name);
- String[] escapedTags = settings.getAsArray("escaped_tags");
- if (escapedTags.length > 0) {
+ List escapedTags = settings.getAsList("escaped_tags");
+ if (escapedTags.size() > 0) {
this.escapedTags = unmodifiableSet(newHashSet(escapedTags));
} else {
this.escapedTags = null;
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java
index 4da560836eb13..0f94b521e4b7d 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepTypesFilterFactory.java
@@ -27,8 +27,8 @@
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
-import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import java.util.Set;
/**
@@ -48,12 +48,12 @@ public class KeepTypesFilterFactory extends AbstractTokenFilterFactory {
KeepTypesFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
- final String[] arrayKeepTypes = settings.getAsArray(KEEP_TYPES_KEY, null);
+ final List arrayKeepTypes = settings.getAsList(KEEP_TYPES_KEY, null);
if ((arrayKeepTypes == null)) {
throw new IllegalArgumentException("keep_types requires `" + KEEP_TYPES_KEY + "` to be configured");
}
- this.keepTypes = new HashSet<>(Arrays.asList(arrayKeepTypes));
+ this.keepTypes = new HashSet<>(arrayKeepTypes);
}
@Override
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java
index f42797e0ff644..70fb18f888e8d 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/KeepWordFilterFactory.java
@@ -22,7 +22,6 @@
import org.apache.lucene.analysis.CharArraySet;
import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.miscellaneous.KeepWordFilter;
-import org.apache.lucene.util.Version;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.env.Environment;
import org.elasticsearch.index.IndexSettings;
@@ -31,6 +30,8 @@
import org.elasticsearch.index.analysis.StopTokenFilterFactory;
import org.elasticsearch.index.analysis.TokenFilterFactory;
+import java.util.List;
+
/**
* A {@link TokenFilterFactory} for {@link KeepWordFilter}. This filter only
* keep tokens that are contained in the term set configured via
@@ -61,7 +62,7 @@ public class KeepWordFilterFactory extends AbstractTokenFilterFactory {
KeepWordFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
super(indexSettings, name, settings);
- final String[] arrayKeepWords = settings.getAsArray(KEEP_WORDS_KEY, null);
+ final List arrayKeepWords = settings.getAsList(KEEP_WORDS_KEY, null);
final String keepWordsPath = settings.get(KEEP_WORDS_PATH_KEY, null);
if ((arrayKeepWords == null && keepWordsPath == null) || (arrayKeepWords != null && keepWordsPath != null)) {
// we don't allow both or none
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java
index 7c58bc1491ade..35e20942c3595 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/PatternCaptureGroupTokenFilterFactory.java
@@ -27,6 +27,7 @@
import org.elasticsearch.index.IndexSettings;
import org.elasticsearch.index.analysis.AbstractTokenFilterFactory;
+import java.util.List;
import java.util.regex.Pattern;
public class PatternCaptureGroupTokenFilterFactory extends AbstractTokenFilterFactory {
@@ -37,13 +38,13 @@ public class PatternCaptureGroupTokenFilterFactory extends AbstractTokenFilterFa
PatternCaptureGroupTokenFilterFactory(IndexSettings indexSettings, Environment environment, String name, Settings settings) {
super(indexSettings, name, settings);
- String[] regexes = settings.getAsArray(PATTERNS_KEY, null, false);
+ List regexes = settings.getAsList(PATTERNS_KEY, null, false);
if (regexes == null) {
throw new IllegalArgumentException("required setting '" + PATTERNS_KEY + "' is missing for token filter [" + name + "]");
}
- patterns = new Pattern[regexes.length];
- for (int i = 0; i < regexes.length; i++) {
- patterns[i] = Pattern.compile(regexes[i]);
+ patterns = new Pattern[regexes.size()];
+ for (int i = 0; i < regexes.size(); i++) {
+ patterns[i] = Pattern.compile(regexes.get(i));
}
preserveOriginal = settings.getAsBooleanLenientForPreEs6Indices(
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java
index da9ab1090c174..8efc0d5941f9e 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonGramsTokenFilterFactoryTests.java
@@ -56,7 +56,7 @@ public void testDefault() throws IOException {
public void testWithoutCommonWordsMatch() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_default.type", "common_grams")
- .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
+ .putList("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
@@ -75,7 +75,7 @@ public void testWithoutCommonWordsMatch() throws IOException {
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_default.type", "common_grams")
.put("index.analysis.filter.common_grams_default.query_mode", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .putArray("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
+ .putList("index.analysis.filter.common_grams_default.common_words", "chromosome", "protein")
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
{
@@ -94,7 +94,7 @@ public void testSettings() throws IOException {
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_1.type", "common_grams")
.put("index.analysis.filter.common_grams_1.ignore_case", true)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_1");
@@ -109,7 +109,7 @@ public void testSettings() throws IOException {
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_2.type", "common_grams")
.put("index.analysis.filter.common_grams_2.ignore_case", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
- .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("common_grams_2");
@@ -122,7 +122,7 @@ public void testSettings() throws IOException {
}
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_3.type", "common_grams")
- .putArray("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_3.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
@@ -166,7 +166,7 @@ public void testQueryModeSettings() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_1.type", "common_grams")
.put("index.analysis.filter.common_grams_1.query_mode", true)
- .putArray("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_1.common_words", "the", "Or", "Not", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_1.ignore_case", true)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
@@ -181,7 +181,7 @@ public void testQueryModeSettings() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_2.type", "common_grams")
.put("index.analysis.filter.common_grams_2.query_mode", true)
- .putArray("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_2.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.put("index.analysis.filter.common_grams_2.ignore_case", false)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
@@ -196,7 +196,7 @@ public void testQueryModeSettings() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_3.type", "common_grams")
.put("index.analysis.filter.common_grams_3.query_mode", true)
- .putArray("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_3.common_words", "the", "Or", "noT", "a", "is", "an", "they", "are")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
@@ -210,7 +210,7 @@ public void testQueryModeSettings() throws IOException {
{
Settings settings = Settings.builder().put("index.analysis.filter.common_grams_4.type", "common_grams")
.put("index.analysis.filter.common_grams_4.query_mode", true)
- .putArray("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
+ .putList("index.analysis.filter.common_grams_4.common_words", "the", "or", "not", "a", "is", "an", "they", "are")
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.build();
ESTestCase.TestAnalysis analysis = createTestAnalysisFromSettings(settings);
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java
index bb1f2a55f7cb4..6b4682d04a128 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HighlighterWithAnalyzersTests.java
@@ -71,7 +71,7 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException {
.put("analysis.tokenizer.autocomplete.token_chars", "letter,digit")
.put("analysis.tokenizer.autocomplete.type", "nGram")
.put("analysis.filter.wordDelimiter.type", "word_delimiter")
- .putArray("analysis.filter.wordDelimiter.type_table",
+ .putList("analysis.filter.wordDelimiter.type_table",
"& => ALPHANUM", "| => ALPHANUM", "! => ALPHANUM",
"? => ALPHANUM", ". => ALPHANUM", "- => ALPHANUM",
"# => ALPHANUM", "% => ALPHANUM", "+ => ALPHANUM",
@@ -88,10 +88,10 @@ public void testNgramHighlightingWithBrokenPositions() throws IOException {
.put("analysis.filter.wordDelimiter.catenate_all", false)
.put("analysis.analyzer.autocomplete.tokenizer", "autocomplete")
- .putArray("analysis.analyzer.autocomplete.filter",
+ .putList("analysis.analyzer.autocomplete.filter",
"lowercase", "wordDelimiter")
.put("analysis.analyzer.search_autocomplete.tokenizer", "whitespace")
- .putArray("analysis.analyzer.search_autocomplete.filter",
+ .putList("analysis.analyzer.search_autocomplete.filter",
"lowercase", "wordDelimiter")));
client().prepareIndex("test", "test", "1")
.setSource("name", "ARCOTEL Hotels Deutschland").get();
@@ -121,7 +121,7 @@ public void testMultiPhraseCutoff() throws IOException {
.put("analysis.filter.wordDelimiter.catenate_numbers", true)
.put("analysis.filter.wordDelimiter.catenate_all", false)
.put("analysis.analyzer.custom_analyzer.tokenizer", "whitespace")
- .putArray("analysis.analyzer.custom_analyzer.filter",
+ .putList("analysis.analyzer.custom_analyzer.filter",
"lowercase", "wordDelimiter"))
);
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java
index a7b3c6e61f006..e9248c3d21289 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepFilterFactoryTests.java
@@ -76,7 +76,7 @@ public void testKeepWordsPathSettings() {
}
settings = Settings.builder().put(settings)
- .putArray("index.analysis.filter.non_broken_keep_filter.keep_words", "test")
+ .putList("index.analysis.filter.non_broken_keep_filter.keep_words", "test")
.build();
try {
// test our none existing setup is picked up
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java
index 4df1fb780e932..a19882d6faa00 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/KeepTypesFilterFactoryTests.java
@@ -38,7 +38,7 @@ public void testKeepTypes() throws IOException {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString())
.put("index.analysis.filter.keep_numbers.type", "keep_types")
- .putArray("index.analysis.filter.keep_numbers.types", new String[] {"", ""})
+ .putList("index.analysis.filter.keep_numbers.types", new String[] {"", ""})
.build();
ESTestCase.TestAnalysis analysis = AnalysisTestsHelper.createTestAnalysisFromSettings(settings, new CommonAnalysisPlugin());
TokenFilterFactory tokenFilter = analysis.tokenFilter.get("keep_numbers");
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java
index 081580a6ae93a..f454e8c776c12 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/MassiveWordListTests.java
@@ -42,9 +42,9 @@ public void testCreateIndexWithMassiveWordList() {
.put("index.number_of_shards", 1)
.put("analysis.analyzer.test_analyzer.type", "custom")
.put("analysis.analyzer.test_analyzer.tokenizer", "standard")
- .putArray("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase")
+ .putList("analysis.analyzer.test_analyzer.filter", "dictionary_decompounder", "lowercase")
.put("analysis.filter.dictionary_decompounder.type", "dictionary_decompounder")
- .putArray("analysis.filter.dictionary_decompounder.word_list", wordList)
+ .putList("analysis.filter.dictionary_decompounder.word_list", wordList)
).get();
}
}
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java
index 24efd89b7e0c8..3f4641c7c189b 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/NGramTokenizerFactoryTests.java
@@ -78,7 +78,7 @@ public void testNoTokenChars() throws IOException {
final String name = "ngr";
final Settings indexSettings = newAnalysisSettingsBuilder().build();
final Settings settings = newAnalysisSettingsBuilder().put("min_gram", 2).put("max_gram", 4)
- .putArray("token_chars", new String[0]).build();
+ .putList("token_chars", new String[0]).build();
Tokenizer tokenizer = new NGramTokenizerFactory(IndexSettingsModule.newIndexSettings(index, indexSettings), null, name, settings)
.create();
tokenizer.setReader(new StringReader("1.34"));
diff --git a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
index 6ed5f0b66cf9b..e25891aca4e35 100644
--- a/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
+++ b/modules/ingest-common/src/main/java/org/elasticsearch/ingest/common/DateIndexNameProcessor.java
@@ -25,6 +25,7 @@
import java.util.List;
import java.util.Locale;
import java.util.Map;
+import java.util.Objects;
import java.util.function.Function;
import org.elasticsearch.ExceptionsHelper;
@@ -61,7 +62,8 @@ public final class DateIndexNameProcessor extends AbstractProcessor {
@Override
public void execute(IngestDocument ingestDocument) throws Exception {
- String date = ingestDocument.getFieldValue(field, String.class);
+ // Date can be specified as a string or long:
+ String date = Objects.toString(ingestDocument.getFieldValue(field, Object.class));
DateTime dateTime = null;
Exception lastException = null;
diff --git a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
index 19d791dd8648c..6736594613954 100644
--- a/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
+++ b/modules/ingest-common/src/test/java/org/elasticsearch/ingest/common/DateIndexNameProcessorTests.java
@@ -62,6 +62,11 @@ public void testUnixMs()throws Exception {
Collections.singletonMap("_field", "1000500"));
dateProcessor.execute(document);
assertThat(document.getSourceAndMetadata().get("_index"), equalTo(""));
+
+ document = new IngestDocument("_index", "_type", "_id", null, null,
+ Collections.singletonMap("_field", 1000500L));
+ dateProcessor.execute(document);
+ assertThat(document.getSourceAndMetadata().get("_index"), equalTo(""));
}
public void testUnix()throws Exception {
diff --git a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java
index 17dc2740ee426..b50eb788c6f57 100644
--- a/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java
+++ b/modules/lang-expression/src/main/java/org/elasticsearch/script/expression/ExpressionScriptEngine.java
@@ -38,6 +38,7 @@
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.script.ClassPermission;
import org.elasticsearch.script.ExecutableScript;
+import org.elasticsearch.script.FilterScript;
import org.elasticsearch.script.ScriptContext;
import org.elasticsearch.script.ScriptEngine;
import org.elasticsearch.script.ScriptException;
@@ -107,6 +108,9 @@ protected Class> loadClass(String name, boolean resolve) throws ClassNotFoundE
} else if (context.instanceClazz.equals(ExecutableScript.class)) {
ExecutableScript.Factory factory = (p) -> new ExpressionExecutableScript(expr, p);
return context.factoryClazz.cast(factory);
+ } else if (context.instanceClazz.equals(FilterScript.class)) {
+ FilterScript.Factory factory = (p, lookup) -> newFilterScript(expr, lookup, p);
+ return context.factoryClazz.cast(factory);
}
throw new IllegalArgumentException("expression engine does not know how to handle script context [" + context.name + "]");
}
@@ -236,6 +240,27 @@ private SearchScript.LeafFactory newSearchScript(Expression expr, SearchLookup l
return new ExpressionSearchScript(expr, bindings, specialValue, needsScores);
}
+ /**
+ * This is a hack for filter scripts, which must return booleans instead of doubles as expression do.
+ * See https://github.com/elastic/elasticsearch/issues/26429.
+ */
+ private FilterScript.LeafFactory newFilterScript(Expression expr, SearchLookup lookup, @Nullable Map vars) {
+ SearchScript.LeafFactory searchLeafFactory = newSearchScript(expr, lookup, vars);
+ return ctx -> {
+ SearchScript script = searchLeafFactory.newInstance(ctx);
+ return new FilterScript(vars, lookup, ctx) {
+ @Override
+ public boolean execute() {
+ return script.runAsDouble() != 0.0;
+ }
+ @Override
+ public void setDocument(int docid) {
+ script.setDocument(docid);
+ }
+ };
+ };
+ }
+
/**
* converts a ParseException at compile-time or link-time to a ScriptException
*/
diff --git a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java
index d8d09ffba790a..9a91fccf4ad30 100644
--- a/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java
+++ b/modules/lang-expression/src/test/java/org/elasticsearch/script/expression/MoreExpressionTests.java
@@ -700,4 +700,19 @@ public void testBoolean() throws Exception {
assertEquals(2.0D, rsp.getHits().getAt(1).field("foo").getValue(), 1.0D);
assertEquals(2.0D, rsp.getHits().getAt(2).field("foo").getValue(), 1.0D);
}
+
+ public void testFilterScript() throws Exception {
+ createIndex("test");
+ ensureGreen("test");
+ indexRandom(true,
+ client().prepareIndex("test", "doc", "1").setSource("foo", 1.0),
+ client().prepareIndex("test", "doc", "2").setSource("foo", 0.0));
+ SearchRequestBuilder builder = buildRequest("doc['foo'].value");
+ Script script = new Script(ScriptType.INLINE, "expression", "doc['foo'].value", Collections.emptyMap());
+ builder.setQuery(QueryBuilders.boolQuery().filter(QueryBuilders.scriptQuery(script)));
+ SearchResponse rsp = builder.get();
+ assertSearchResponse(rsp);
+ assertEquals(1, rsp.getHits().getTotalHits());
+ assertEquals(1.0D, rsp.getHits().getAt(0).field("foo").getValue(), 0.0D);
+ }
}
diff --git a/modules/lang-painless/build.gradle b/modules/lang-painless/build.gradle
index dddc98ae138ad..bc8a118ba1d64 100644
--- a/modules/lang-painless/build.gradle
+++ b/modules/lang-painless/build.gradle
@@ -146,7 +146,7 @@ task regen {
fileset(dir: outputPath, includes: 'Painless*.java')
}
// fix line endings
- ant.fixcrlf(srcdir: outputPath) {
+ ant.fixcrlf(srcdir: outputPath, eol: 'lf') {
patternset(includes: 'Painless*.java')
}
}
diff --git a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java
index 9045a390f2ae3..30f098d28b800 100644
--- a/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java
+++ b/modules/lang-painless/src/test/java/org/elasticsearch/painless/EqualsTests.java
@@ -23,7 +23,6 @@
import static java.util.Collections.singletonMap;
-// TODO: Figure out a way to test autobox caching properly from methods such as Integer.valueOf(int);
public class EqualsTests extends ScriptTestCase {
public void testTypesEquals() {
assertEquals(true, exec("return false === false;"));
@@ -133,7 +132,7 @@ public void testBranchEquals() {
assertEquals(0, exec("def a = 1; Object b = new HashMap(); if (a === (Object)b) return 1; else return 0;"));
}
- public void testBranchEqualsDefAndPrimitive() {
+ public void testEqualsDefAndPrimitive() {
/* This test needs an Integer that isn't cached by Integer.valueOf so we draw one randomly. We can't use any fixed integer because
* we can never be sure that the JVM hasn't configured itself to cache that Integer. It is sneaky like that. */
int uncachedAutoboxedInt = randomValueOtherThanMany(i -> Integer.valueOf(i) == Integer.valueOf(i), ESTestCase::randomInt);
@@ -141,6 +140,15 @@ public void testBranchEqualsDefAndPrimitive() {
assertEquals(false, exec("def x = params.i; int y = params.i; return x === y;", singletonMap("i", uncachedAutoboxedInt), true));
assertEquals(true, exec("def x = params.i; int y = params.i; return y == x;", singletonMap("i", uncachedAutoboxedInt), true));
assertEquals(false, exec("def x = params.i; int y = params.i; return y === x;", singletonMap("i", uncachedAutoboxedInt), true));
+
+ /* Now check that we use valueOf with the boxing used for comparing primitives to def. For this we need an
+ * integer that is cached by Integer.valueOf. The JLS says 0 should always be cached. */
+ int cachedAutoboxedInt = 0;
+ assertSame(Integer.valueOf(cachedAutoboxedInt), Integer.valueOf(cachedAutoboxedInt));
+ assertEquals(true, exec("def x = params.i; int y = params.i; return x == y;", singletonMap("i", cachedAutoboxedInt), true));
+ assertEquals(true, exec("def x = params.i; int y = params.i; return x === y;", singletonMap("i", cachedAutoboxedInt), true));
+ assertEquals(true, exec("def x = params.i; int y = params.i; return y == x;", singletonMap("i", cachedAutoboxedInt), true));
+ assertEquals(true, exec("def x = params.i; int y = params.i; return y === x;", singletonMap("i", cachedAutoboxedInt), true));
}
public void testBranchNotEquals() {
@@ -153,7 +161,7 @@ public void testBranchNotEquals() {
assertEquals(1, exec("def a = 1; Object b = new HashMap(); if (a !== (Object)b) return 1; else return 0;"));
}
- public void testBranchNotEqualsDefAndPrimitive() {
+ public void testNotEqualsDefAndPrimitive() {
/* This test needs an Integer that isn't cached by Integer.valueOf so we draw one randomly. We can't use any fixed integer because
* we can never be sure that the JVM hasn't configured itself to cache that Integer. It is sneaky like that. */
int uncachedAutoboxedInt = randomValueOtherThanMany(i -> Integer.valueOf(i) == Integer.valueOf(i), ESTestCase::randomInt);
@@ -161,6 +169,15 @@ public void testBranchNotEqualsDefAndPrimitive() {
assertEquals(true, exec("def x = params.i; int y = params.i; return x !== y;", singletonMap("i", uncachedAutoboxedInt), true));
assertEquals(false, exec("def x = params.i; int y = params.i; return y != x;", singletonMap("i", uncachedAutoboxedInt), true));
assertEquals(true, exec("def x = params.i; int y = params.i; return y !== x;", singletonMap("i", uncachedAutoboxedInt), true));
+
+ /* Now check that we use valueOf with the boxing used for comparing primitives to def. For this we need an
+ * integer that is cached by Integer.valueOf. The JLS says 0 should always be cached. */
+ int cachedAutoboxedInt = 0;
+ assertSame(Integer.valueOf(cachedAutoboxedInt), Integer.valueOf(cachedAutoboxedInt));
+ assertEquals(false, exec("def x = params.i; int y = params.i; return x != y;", singletonMap("i", cachedAutoboxedInt), true));
+ assertEquals(false, exec("def x = params.i; int y = params.i; return x !== y;", singletonMap("i", cachedAutoboxedInt), true));
+ assertEquals(false, exec("def x = params.i; int y = params.i; return y != x;", singletonMap("i", cachedAutoboxedInt), true));
+ assertEquals(false, exec("def x = params.i; int y = params.i; return y !== x;", singletonMap("i", cachedAutoboxedInt), true));
}
public void testRightHandNull() {
diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
index c08069b1af775..54d6c69112571 100644
--- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
+++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolatorQuerySearchIT.java
@@ -805,10 +805,10 @@ public void testPercolatorQueryViaMultiSearch() throws Exception {
jsonBuilder().startObject().field("field1", "b").endObject().bytes(), XContentType.JSON)))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query",
- yamlBuilder().startObject().field("field1", "c").endObject().bytes(), XContentType.JSON)))
+ yamlBuilder().startObject().field("field1", "c").endObject().bytes(), XContentType.YAML)))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query",
- smileBuilder().startObject().field("field1", "b c").endObject().bytes(), XContentType.JSON)))
+ smileBuilder().startObject().field("field1", "b c").endObject().bytes(), XContentType.SMILE)))
.add(client().prepareSearch("test")
.setQuery(new PercolateQueryBuilder("query",
jsonBuilder().startObject().field("field1", "d").endObject().bytes(), XContentType.JSON)))
diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java
index 3ddd3618ab812..a3f7111053bb5 100644
--- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java
+++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/RestUpdateByQueryAction.java
@@ -67,7 +67,7 @@ protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOExcept
Map> consumers = new HashMap<>();
consumers.put("conflicts", o -> internal.setConflicts((String) o));
- consumers.put("script", o -> internal.setScript(parseScript((Map)o)));
+ consumers.put("script", o -> internal.setScript(parseScript(o)));
parseInternalRequest(internal, request, consumers);
@@ -76,49 +76,58 @@ protected UpdateByQueryRequest buildRequest(RestRequest request) throws IOExcept
}
@SuppressWarnings("unchecked")
- private static Script parseScript(Map config) {
- String script = null;
- ScriptType type = null;
- String lang = DEFAULT_SCRIPT_LANG;
- Map params = Collections.emptyMap();
- for (Iterator> itr = config.entrySet().iterator(); itr.hasNext();) {
- Map.Entry entry = itr.next();
- String parameterName = entry.getKey();
- Object parameterValue = entry.getValue();
- if (Script.LANG_PARSE_FIELD.match(parameterName)) {
- if (parameterValue instanceof String || parameterValue == null) {
- lang = (String) parameterValue;
- } else {
- throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
- }
- } else if (Script.PARAMS_PARSE_FIELD.match(parameterName)) {
- if (parameterValue instanceof Map || parameterValue == null) {
- params = (Map) parameterValue;
- } else {
- throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
- }
- } else if (ScriptType.INLINE.getParseField().match(parameterName)) {
- if (parameterValue instanceof String || parameterValue == null) {
- script = (String) parameterValue;
- type = ScriptType.INLINE;
- } else {
- throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
- }
- } else if (ScriptType.STORED.getParseField().match(parameterName)) {
- if (parameterValue instanceof String || parameterValue == null) {
- script = (String) parameterValue;
- type = ScriptType.STORED;
- } else {
- throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
+ private static Script parseScript(Object config) {
+ assert config != null : "Script should not be null";
+
+ if (config instanceof String) {
+ return new Script((String) config);
+ } else if (config instanceof Map) {
+ Map configMap = (Map) config;
+ String script = null;
+ ScriptType type = null;
+ String lang = DEFAULT_SCRIPT_LANG;
+ Map params = Collections.emptyMap();
+ for (Iterator> itr = configMap.entrySet().iterator(); itr.hasNext();) {
+ Map.Entry entry = itr.next();
+ String parameterName = entry.getKey();
+ Object parameterValue = entry.getValue();
+ if (Script.LANG_PARSE_FIELD.match(parameterName)) {
+ if (parameterValue instanceof String || parameterValue == null) {
+ lang = (String) parameterValue;
+ } else {
+ throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
+ }
+ } else if (Script.PARAMS_PARSE_FIELD.match(parameterName)) {
+ if (parameterValue instanceof Map || parameterValue == null) {
+ params = (Map) parameterValue;
+ } else {
+ throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
+ }
+ } else if (ScriptType.INLINE.getParseField().match(parameterName)) {
+ if (parameterValue instanceof String || parameterValue == null) {
+ script = (String) parameterValue;
+ type = ScriptType.INLINE;
+ } else {
+ throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
+ }
+ } else if (ScriptType.STORED.getParseField().match(parameterName)) {
+ if (parameterValue instanceof String || parameterValue == null) {
+ script = (String) parameterValue;
+ type = ScriptType.STORED;
+ } else {
+ throw new ElasticsearchParseException("Value must be of type String: [" + parameterName + "]");
+ }
}
}
- }
- if (script == null) {
- throw new ElasticsearchParseException("expected one of [{}] or [{}] fields, but found none",
+ if (script == null) {
+ throw new ElasticsearchParseException("expected one of [{}] or [{}] fields, but found none",
ScriptType.INLINE.getParseField().getPreferredName(), ScriptType.STORED.getParseField().getPreferredName());
- }
- assert type != null : "if script is not null, type should definitely not be null";
+ }
+ assert type != null : "if script is not null, type should definitely not be null";
- return new Script(type, lang, script, params);
+ return new Script(type, lang, script, params);
+ } else {
+ throw new IllegalArgumentException("Script value should be a String or a Map");
+ }
}
}
diff --git a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java
index 5636833442d9f..b56225fb6fabe 100644
--- a/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java
+++ b/modules/transport-netty4/src/main/java/org/elasticsearch/http/netty4/Netty4HttpServerTransport.java
@@ -132,10 +132,22 @@ public class Netty4HttpServerTransport extends AbstractLifecycleComponent implem
public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE =
Setting.byteSizeSetting("http.netty.receive_predictor_size", new ByteSizeValue(64, ByteSizeUnit.KB), Property.NodeScope);
+
+ /**
+ * @deprecated This (undocumented) setting is deprecated to reduce complexity and is removed in 7.0. See #26165 for details.
+ */
+ @Deprecated
public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MIN =
- byteSizeSetting("http.netty.receive_predictor_min", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope);
+ byteSizeSetting("http.netty.receive_predictor_min", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE,
+ Property.NodeScope, Property.Deprecated);
+
+ /**
+ * @deprecated This (undocumented) setting is deprecated to reduce complexity and is removed in 7.0. See #26165 for details.
+ */
+ @Deprecated
public static final Setting SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_MAX =
- byteSizeSetting("http.netty.receive_predictor_max", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE, Property.NodeScope);
+ byteSizeSetting("http.netty.receive_predictor_max", SETTING_HTTP_NETTY_RECEIVE_PREDICTOR_SIZE,
+ Property.NodeScope, Property.Deprecated);
protected final NetworkService networkService;
diff --git a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java
index 92c21f942c292..bdf4adb5ea91c 100644
--- a/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java
+++ b/modules/transport-netty4/src/test/java/org/elasticsearch/transport/netty4/SimpleNetty4TransportTests.java
@@ -52,7 +52,7 @@
public class SimpleNetty4TransportTests extends AbstractSimpleTransportTestCase {
public static MockTransportService nettyFromThreadPool(Settings settings, ThreadPool threadPool, final Version version,
- ClusterSettings clusterSettings, boolean doHandshake) {
+ ClusterSettings clusterSettings, boolean doHandshake) {
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList());
Transport transport = new Netty4Transport(settings, threadPool, new NetworkService(Collections.emptyList()),
BigArrays.NON_RECYCLING_INSTANCE, namedWriteableRegistry, new NoneCircuitBreakerService()) {
@@ -86,6 +86,13 @@ protected MockTransportService build(Settings settings, Version version, Cluster
return transportService;
}
+ @Override
+ protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
+ final Netty4Transport t = (Netty4Transport) transport;
+ @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection;
+ t.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true, false);
+ }
+
public void testConnectException() throws UnknownHostException {
try {
serviceA.connectToNode(new DiscoveryNode("C", new TransportAddress(InetAddress.getByName("localhost"), 9876),
@@ -108,7 +115,8 @@ public void testBindUnavailableAddress() {
.build();
ClusterSettings clusterSettings = new ClusterSettings(settings, ClusterSettings.BUILT_IN_CLUSTER_SETTINGS);
BindTransportException bindTransportException = expectThrows(BindTransportException.class, () -> {
- MockTransportService transportService = nettyFromThreadPool(settings, threadPool, Version.CURRENT, clusterSettings, true);
+ MockTransportService transportService =
+ nettyFromThreadPool(settings, threadPool, Version.CURRENT, clusterSettings, true);
try {
transportService.start();
} finally {
diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java
index 14fa5922c1d90..fa1999cf17e39 100644
--- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java
+++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/analysis/IcuTokenizerFactory.java
@@ -37,6 +37,7 @@
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.HashMap;
+import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
@@ -63,7 +64,7 @@ private ICUTokenizerConfig getIcuConfig(Environment env, Settings settings) {
Map tailored = new HashMap<>();
try {
- String[] ruleFiles = settings.getAsArray(RULE_FILES);
+ List ruleFiles = settings.getAsList(RULE_FILES);
for (String scriptAndResourcePath : ruleFiles) {
int colonPos = scriptAndResourcePath.indexOf(":");
diff --git a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java
index 5c9a6a463d3b1..4f5a97dd342f0 100644
--- a/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java
+++ b/plugins/analysis-phonetic/src/main/java/org/elasticsearch/index/analysis/PhoneticTokenFilterFactory.java
@@ -19,8 +19,8 @@
package org.elasticsearch.index.analysis;
-import java.util.Arrays;
import java.util.HashSet;
+import java.util.List;
import org.apache.commons.codec.Encoder;
import org.apache.commons.codec.language.Caverphone1;
@@ -50,7 +50,7 @@ public class PhoneticTokenFilterFactory extends AbstractTokenFilterFactory {
private final Encoder encoder;
private final boolean replace;
private int maxcodelength;
- private String[] languageset;
+ private List languageset;
private NameType nametype;
private RuleType ruletype;
@@ -82,7 +82,7 @@ public PhoneticTokenFilterFactory(IndexSettings indexSettings, Environment envir
this.maxcodelength = settings.getAsInt("max_code_len", 4);
} else if ("bm".equalsIgnoreCase(encodername) || "beider_morse".equalsIgnoreCase(encodername) || "beidermorse".equalsIgnoreCase(encodername)) {
this.encoder = null;
- this.languageset = settings.getAsArray("languageset");
+ this.languageset = settings.getAsList("languageset");
String ruleType = settings.get("rule_type", "approx");
if ("approx".equalsIgnoreCase(ruleType)) {
ruletype = RuleType.APPROX;
@@ -117,7 +117,7 @@ public TokenStream create(TokenStream tokenStream) {
if (encoder == null) {
if (ruletype != null && nametype != null) {
if (languageset != null) {
- final LanguageSet languages = LanguageSet.from(new HashSet<>(Arrays.asList(languageset)));
+ final LanguageSet languages = LanguageSet.from(new HashSet<>(languageset));
return new BeiderMorseFilter(tokenStream, new PhoneticEngine(nametype, ruletype, true), languages);
}
return new BeiderMorseFilter(tokenStream, new PhoneticEngine(nametype, ruletype, true));
diff --git a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java
index f3685278dc6b9..e7986cb878e41 100644
--- a/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java
+++ b/plugins/discovery-ec2/src/test/java/org/elasticsearch/discovery/ec2/Ec2DiscoveryTests.java
@@ -229,7 +229,7 @@ public void testFilterByTags() throws InterruptedException {
public void testFilterByMultipleTags() throws InterruptedException {
int nodes = randomIntBetween(5, 10);
Settings nodeSettings = Settings.builder()
- .putArray(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod")
+ .putList(AwsEc2Service.TAG_SETTING.getKey() + "stage", "prod", "preprod")
.build();
int prodInstances = 0;
diff --git a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
index 5ae30c74a3226..31ea9bdb1c21e 100644
--- a/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
+++ b/plugins/discovery-gce/src/test/java/org/elasticsearch/discovery/gce/GceDiscoveryTests.java
@@ -128,7 +128,7 @@ public void testNodesWithDifferentTagsAndOneTagSet() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
- .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
+ .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -140,7 +140,7 @@ public void testNodesWithDifferentTagsAndTwoTagSet() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
- .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
+ .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -162,7 +162,7 @@ public void testNodesWithSameTagsAndOneTagSet() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
- .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
+ .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -173,7 +173,7 @@ public void testNodesWithSameTagsAndTwoTagsSet() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
.put(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b")
- .putArray(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
+ .putList(GceUnicastHostsProvider.TAGS_SETTING.getKey(), "elasticsearch", "dev")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -183,7 +183,7 @@ public void testNodesWithSameTagsAndTwoTagsSet() {
public void testMultipleZonesAndTwoNodesInSameZone() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -193,7 +193,7 @@ public void testMultipleZonesAndTwoNodesInSameZone() {
public void testMultipleZonesAndTwoNodesInDifferentZones() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "europe-west1-b")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -206,7 +206,7 @@ public void testMultipleZonesAndTwoNodesInDifferentZones() {
public void testZeroNode43() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
@@ -226,7 +226,7 @@ public void testIllegalSettingsMissingAllRequired() {
public void testIllegalSettingsMissingProject() {
Settings nodeSettings = Settings.builder()
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "us-central1-a", "us-central1-b")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
try {
@@ -258,7 +258,7 @@ public void testIllegalSettingsMissingZone() {
public void testNoRegionReturnsEmptyList() {
Settings nodeSettings = Settings.builder()
.put(GceInstancesServiceImpl.PROJECT_SETTING.getKey(), projectName)
- .putArray(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a")
+ .putList(GceInstancesServiceImpl.ZONE_SETTING.getKey(), "europe-west1-b", "us-central1-a")
.build();
mock = new GceInstancesServiceMock(nodeSettings);
List discoveryNodes = buildDynamicNodes(mock, nodeSettings);
diff --git a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java
index 75ef13d7d8745..a5b246fa53f20 100644
--- a/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java
+++ b/plugins/repository-azure/src/test/java/org/elasticsearch/repositories/azure/AzureRepositorySettingsTests.java
@@ -40,7 +40,7 @@ public class AzureRepositorySettingsTests extends ESTestCase {
private AzureRepository azureRepository(Settings settings) throws StorageException, IOException, URISyntaxException {
Settings internalSettings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths())
.put(settings)
.build();
return new AzureRepository(new RepositoryMetaData("foo", "azure", internalSettings), new Environment(internalSettings),
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java
index 50a9f3426acbd..9ba59f8d49727 100644
--- a/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/bootstrap/EvilSecurityTests.java
@@ -80,7 +80,7 @@ public void testEnvironmentPaths() throws Exception {
Settings.Builder settingsBuilder = Settings.builder();
settingsBuilder.put(Environment.PATH_HOME_SETTING.getKey(), esHome.resolve("home").toString());
- settingsBuilder.putArray(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(),
+ settingsBuilder.putList(Environment.PATH_DATA_SETTING.getKey(), esHome.resolve("data1").toString(),
esHome.resolve("data2").toString());
settingsBuilder.put(Environment.PATH_SHARED_DATA_SETTING.getKey(), esHome.resolve("custom").toString());
settingsBuilder.put(Environment.PATH_LOGS_SETTING.getKey(), esHome.resolve("logs").toString());
@@ -153,7 +153,7 @@ public void testDuplicateDataPaths() throws IOException {
Settings
.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), home.toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), data.toString(), duplicate.toString())
+ .putList(Environment.PATH_DATA_SETTING.getKey(), data.toString(), duplicate.toString())
.build();
final Environment environment = new Environment(settings);
diff --git a/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java
index 3eebf4a2f6481..8192a8c8a29c5 100644
--- a/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java
+++ b/qa/evil-tests/src/test/java/org/elasticsearch/env/NodeEnvironmentEvilTests.java
@@ -50,7 +50,7 @@ public void testMissingWritePermission() throws IOException {
PosixFilePermission.OWNER_READ)));
Settings build = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
IOException ioException = expectThrows(IOException.class, () -> {
new NodeEnvironment(build, new Environment(build));
});
@@ -70,7 +70,7 @@ public void testMissingWritePermissionOnIndex() throws IOException {
PosixFilePermission.OWNER_READ)));
Settings build = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
IOException ioException = expectThrows(IOException.class, () -> {
new NodeEnvironment(build, new Environment(build));
});
@@ -95,7 +95,7 @@ public void testMissingWritePermissionOnShard() throws IOException {
PosixFilePermission.OWNER_READ)));
Settings build = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath().toString())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tempPaths).build();
IOException ioException = expectThrows(IOException.class, () -> {
new NodeEnvironment(build, new Environment(build));
});
diff --git a/qa/full-cluster-restart/build.gradle b/qa/full-cluster-restart/build.gradle
index 95e62416cade3..f271dae5cfda1 100644
--- a/qa/full-cluster-restart/build.gradle
+++ b/qa/full-cluster-restart/build.gradle
@@ -52,6 +52,9 @@ for (Version version : indexCompatVersions) {
// some tests rely on the translog not being flushed
setting 'indices.memory.shard_inactive_time', '20m'
+ // debug logging for testRecovery
+ setting 'logger.level', 'DEBUG'
+
if (version.onOrAfter('5.3.0')) {
setting 'http.content_type.required', 'true'
}
@@ -72,6 +75,9 @@ for (Version version : indexCompatVersions) {
// some tests rely on the translog not being flushed
setting 'indices.memory.shard_inactive_time', '20m'
+ // debug logging for testRecovery
+ setting 'logger.level', 'DEBUG'
+
numNodes = 2
dataDir = { nodeNum -> oldClusterTest.nodes[nodeNum].dataDir }
cleanShared = false // We want to keep snapshots made by the old cluster!
@@ -81,6 +87,7 @@ for (Version version : indexCompatVersions) {
systemProperty 'tests.is_old_cluster', 'false'
systemProperty 'tests.old_cluster_version', version.toString().minus("-SNAPSHOT")
systemProperty 'tests.path.repo', new File(buildDir, "cluster/shared/repo")
+
}
Task versionBwcTest = tasks.create(name: "${baseName}#bwcTest") {
diff --git a/qa/rolling-upgrade/build.gradle b/qa/rolling-upgrade/build.gradle
index b5f841601308e..fc3cf88b272f1 100644
--- a/qa/rolling-upgrade/build.gradle
+++ b/qa/rolling-upgrade/build.gradle
@@ -61,6 +61,7 @@ for (Version version : wireCompatVersions) {
distribution = 'zip'
clusterName = 'rolling-upgrade'
unicastTransportUri = { seedNode, node, ant -> oldClusterTest.nodes.get(0).transportUri() }
+ minimumMasterNodes = { 2 }
/* Override the data directory so the new node always gets the node we
* just stopped's data directory. */
dataDir = { nodeNumber -> oldClusterTest.nodes[1].dataDir }
@@ -81,6 +82,7 @@ for (Version version : wireCompatVersions) {
distribution = 'zip'
clusterName = 'rolling-upgrade'
unicastTransportUri = { seedNode, node, ant -> mixedClusterTest.nodes.get(0).transportUri() }
+ minimumMasterNodes = { 2 }
/* Override the data directory so the new node always gets the node we
* just stopped's data directory. */
dataDir = { nodeNumber -> oldClusterTest.nodes[0].dataDir}
diff --git a/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml
index b43af1fc07e90..ea9fa33e6a9cf 100644
--- a/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml
+++ b/qa/smoke-test-reindex-with-all-modules/src/test/resources/rest-api-spec/test/update_by_query/10_script.yml
@@ -29,6 +29,34 @@
user: notkimchy
- match: { hits.total: 1 }
+---
+"Update document using short `script` form":
+ - do:
+ index:
+ index: twitter
+ type: tweet
+ id: 1
+ body: { "user": "kimchy" }
+ - do:
+ indices.refresh: {}
+
+ - do:
+ update_by_query:
+ index: twitter
+ refresh: true
+ body: { "script": "ctx._source.user = \"not\" + ctx._source.user" }
+ - match: {updated: 1}
+ - match: {noops: 0}
+
+ - do:
+ search:
+ index: twitter
+ body:
+ query:
+ match:
+ user: notkimchy
+ - match: { hits.total: 1 }
+
---
"Noop one doc":
- do:
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json
index 118f8b6bf9632..3306b2f753b2a 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.segments.json
@@ -16,6 +16,11 @@
"type" : "string",
"description" : "a short version of the Accept header, e.g. json, yaml"
},
+ "bytes": {
+ "type": "enum",
+ "description" : "The unit in which to display byte values",
+ "options": [ "b", "k", "kb", "m", "mb", "g", "gb", "t", "tb", "p", "pb" ]
+ },
"h": {
"type": "list",
"description" : "Comma-separated list of column names to display"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json
index db46ce909ff6b..2ad714e7225d7 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/cat.shards.json
@@ -16,6 +16,11 @@
"type" : "string",
"description" : "a short version of the Accept header, e.g. json, yaml"
},
+ "bytes": {
+ "type": "enum",
+ "description" : "The unit in which to display byte values",
+ "options": [ "b", "k", "kb", "m", "mb", "g", "gb", "t", "tb", "p", "pb" ]
+ },
"local": {
"type" : "boolean",
"description" : "Return local information, do not retrieve the state from master node (default: false)"
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json
index 0e2697cd524d2..1275983ef238f 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/api/count.json
+++ b/rest-api-spec/src/main/resources/rest-api-spec/api/count.json
@@ -67,6 +67,10 @@
"lenient": {
"type" : "boolean",
"description" : "Specify whether format-based query failures (such as providing text to a numeric field) should be ignored"
+ },
+ "terminate_after" : {
+ "type" : "number",
+ "description" : "The maximum count for each shard, upon reaching which the query execution will terminate early"
}
}
},
diff --git a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml
index e25626cf3ae28..7dfbce13d7afc 100644
--- a/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml
+++ b/rest-api-spec/src/main/resources/rest-api-spec/test/bulk/20_list_of_strings.yml
@@ -1,5 +1,10 @@
---
"List of strings":
+ - skip:
+ version: " - 6.0.99"
+ reason: '[_primary] shard preference deprecated in 6.1+'
+ features: "warnings"
+
- do:
bulk:
refresh: true
@@ -14,6 +19,9 @@
# we count through the primary in case there is a replica that has not yet fully recovered
preference: _primary
index: test_index
+ warnings:
+ - "[_primary] has been deprecated in 6.1+, and will be removed in 7.0; use [_only_nodes] or [_prefer_nodes]"
+
- match: {count: 2}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
index a0777de5dc32e..243ab11e61fcb 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/ESTestCase.java
@@ -39,7 +39,6 @@
import org.apache.logging.log4j.status.StatusLogger;
import org.apache.lucene.util.LuceneTestCase;
import org.apache.lucene.util.LuceneTestCase.SuppressCodecs;
-import org.apache.lucene.util.SetOnce;
import org.apache.lucene.util.TestRuleMarkFailure;
import org.apache.lucene.util.TestUtil;
import org.apache.lucene.util.TimeUnits;
@@ -134,7 +133,6 @@
import java.util.Set;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.TimeUnit;
-import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.function.BooleanSupplier;
import java.util.function.Consumer;
@@ -812,7 +810,7 @@ public NodeEnvironment newNodeEnvironment(Settings settings) throws IOException
Settings build = Settings.builder()
.put(settings)
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toAbsolutePath())
- .putArray(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
+ .putList(Environment.PATH_DATA_SETTING.getKey(), tmpPaths()).build();
return new NodeEnvironment(build, new Environment(build));
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java
index 7e3f9a21e4386..f873ec4fb933c 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/discovery/ClusterDiscoveryConfiguration.java
@@ -129,7 +129,7 @@ public Settings nodeSettings(int nodeOrdinal) {
unicastHosts[i] = IP_ADDR + ":" + (unicastHostPorts[unicastHostOrdinals[i]]);
}
}
- builder.putArray("discovery.zen.ping.unicast.hosts", unicastHosts);
+ builder.putList("discovery.zen.ping.unicast.hosts", unicastHosts);
return builder.put(super.nodeSettings(nodeOrdinal)).build();
}
diff --git a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java
index b82a9bd188453..bdb3e317bc22e 100644
--- a/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java
+++ b/test/framework/src/main/java/org/elasticsearch/test/transport/MockTransportService.java
@@ -72,6 +72,7 @@
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CopyOnWriteArrayList;
+import java.util.concurrent.ExecutorService;
import java.util.concurrent.LinkedBlockingDeque;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Function;
@@ -167,6 +168,17 @@ protected TaskManager createTaskManager() {
}
}
+ private volatile String executorName;
+
+ public void setExecutorName(final String executorName) {
+ this.executorName = executorName;
+ }
+
+ @Override
+ protected ExecutorService getExecutorService() {
+ return executorName == null ? super.getExecutorService() : getThreadPool().executor(executorName);
+ }
+
/**
* Clears all the registered rules.
*/
diff --git a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
index 0e5f599bfc5a3..abeff11d947b3 100644
--- a/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
+++ b/test/framework/src/main/java/org/elasticsearch/transport/AbstractSimpleTransportTestCase.java
@@ -83,8 +83,10 @@
import static java.util.Collections.emptyMap;
import static java.util.Collections.emptySet;
+import static org.hamcrest.Matchers.containsString;
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.equalTo;
+import static org.hamcrest.Matchers.hasToString;
import static org.hamcrest.Matchers.instanceOf;
import static org.hamcrest.Matchers.notNullValue;
import static org.hamcrest.Matchers.startsWith;
@@ -147,14 +149,14 @@ public void onNodeDisconnected(DiscoveryNode node) {
private MockTransportService buildService(final String name, final Version version, ClusterSettings clusterSettings,
Settings settings, boolean acceptRequests, boolean doHandshake) {
MockTransportService service = build(
- Settings.builder()
- .put(settings)
- .put(Node.NODE_NAME_SETTING.getKey(), name)
- .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "")
- .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING")
- .build(),
- version,
- clusterSettings, doHandshake);
+ Settings.builder()
+ .put(settings)
+ .put(Node.NODE_NAME_SETTING.getKey(), name)
+ .put(TransportService.TRACE_LOG_INCLUDE_SETTING.getKey(), "")
+ .put(TransportService.TRACE_LOG_EXCLUDE_SETTING.getKey(), "NOTHING")
+ .build(),
+ version,
+ clusterSettings, doHandshake);
if (acceptRequests) {
service.acceptIncomingRequests();
}
@@ -2455,8 +2457,8 @@ public void testTransportProfilesWithPortAndHost() {
.put("transport.profiles.some_profile.port", "8900-9000")
.put("transport.profiles.some_profile.bind_host", "_local:ipv4_")
.put("transport.profiles.some_other_profile.port", "8700-8800")
- .putArray("transport.profiles.some_other_profile.bind_host", hosts)
- .putArray("transport.profiles.some_other_profile.publish_host", "_local:ipv4_")
+ .putList("transport.profiles.some_other_profile.bind_host", hosts)
+ .putList("transport.profiles.some_other_profile.publish_host", "_local:ipv4_")
.build(), version0, null, true)) {
serviceC.start();
@@ -2612,4 +2614,33 @@ public void testProfilesIncludesDefault() {
assertEquals(new HashSet<>(Arrays.asList("default", "test")), profileSettings.stream().map(s -> s.profileName).collect(Collectors
.toSet()));
}
+
+ public void testChannelCloseWhileConnecting() throws IOException {
+ try (MockTransportService service = build(Settings.builder().put("name", "close").build(), version0, null, true)) {
+ service.setExecutorName(ThreadPool.Names.SAME); // make sure stuff is executed in a blocking fashion
+ service.addConnectionListener(new TransportConnectionListener() {
+ @Override
+ public void onConnectionOpened(final Transport.Connection connection) {
+ try {
+ closeConnectionChannel(service.getOriginalTransport(), connection);
+ } catch (final IOException e) {
+ throw new AssertionError(e);
+ }
+ }
+ });
+ final ConnectionProfile.Builder builder = new ConnectionProfile.Builder();
+ builder.addConnections(1,
+ TransportRequestOptions.Type.BULK,
+ TransportRequestOptions.Type.PING,
+ TransportRequestOptions.Type.RECOVERY,
+ TransportRequestOptions.Type.REG,
+ TransportRequestOptions.Type.STATE);
+ final ConnectTransportException e =
+ expectThrows(ConnectTransportException.class, () -> service.openConnection(nodeA, builder.build()));
+ assertThat(e, hasToString(containsString(("a channel closed while connecting"))));
+ }
+ }
+
+ protected abstract void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException;
+
}
diff --git a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java
index 29ff4219feecb..6229db7a9b62a 100644
--- a/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java
+++ b/test/framework/src/main/java/org/elasticsearch/transport/MockTcpTransport.java
@@ -176,7 +176,8 @@ private void readMessage(MockChannel mockChannel, StreamInput input) throws IOEx
}
@Override
- protected NodeChannels connectToChannels(DiscoveryNode node, ConnectionProfile profile,
+ protected NodeChannels connectToChannels(DiscoveryNode node,
+ ConnectionProfile profile,
Consumer onChannelClose) throws IOException {
final MockChannel[] mockChannels = new MockChannel[1];
final NodeChannels nodeChannels = new NodeChannels(node, mockChannels, LIGHT_PROFILE); // we always use light here
diff --git a/test/framework/src/main/java/org/elasticsearch/transport/nio/NioClient.java b/test/framework/src/main/java/org/elasticsearch/transport/nio/NioClient.java
index 27ddca978786f..f877b7e9153a6 100644
--- a/test/framework/src/main/java/org/elasticsearch/transport/nio/NioClient.java
+++ b/test/framework/src/main/java/org/elasticsearch/transport/nio/NioClient.java
@@ -56,7 +56,9 @@ public NioClient(Logger logger, OpenChannels openChannels, Supplier closeListener) throws IOException {
boolean allowedToConnect = semaphore.tryAcquire();
if (allowedToConnect == false) {
diff --git a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java
index b32680d9da466..b1a3a914be89e 100644
--- a/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java
+++ b/test/framework/src/test/java/org/elasticsearch/transport/MockTcpTransportTests.java
@@ -33,6 +33,7 @@
import java.util.Collections;
public class MockTcpTransportTests extends AbstractSimpleTransportTestCase {
+
@Override
protected MockTransportService build(Settings settings, Version version, ClusterSettings clusterSettings, boolean doHandshake) {
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList());
@@ -53,4 +54,13 @@ protected Version executeHandshake(DiscoveryNode node, MockChannel mockChannel,
mockTransportService.start();
return mockTransportService;
}
+
+ @Override
+ protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
+ final MockTcpTransport t = (MockTcpTransport) transport;
+ @SuppressWarnings("unchecked") final TcpTransport.NodeChannels channels =
+ (TcpTransport.NodeChannels) connection;
+ t.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true, false);
+ }
+
}
diff --git a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java
index 2ba2e4cc02a85..f4e21f7093be1 100644
--- a/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java
+++ b/test/framework/src/test/java/org/elasticsearch/transport/nio/SimpleNioTransportTests.java
@@ -53,7 +53,7 @@
public class SimpleNioTransportTests extends AbstractSimpleTransportTestCase {
public static MockTransportService nioFromThreadPool(Settings settings, ThreadPool threadPool, final Version version,
- ClusterSettings clusterSettings, boolean doHandshake) {
+ ClusterSettings clusterSettings, boolean doHandshake) {
NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(Collections.emptyList());
NetworkService networkService = new NetworkService(Collections.emptyList());
Transport transport = new NioTransport(settings, threadPool,
@@ -96,6 +96,13 @@ protected MockTransportService build(Settings settings, Version version, Cluster
return transportService;
}
+ @Override
+ protected void closeConnectionChannel(Transport transport, Transport.Connection connection) throws IOException {
+ final NioTransport t = (NioTransport) transport;
+ @SuppressWarnings("unchecked") TcpTransport.NodeChannels channels = (TcpTransport.NodeChannels) connection;
+ t.closeChannels(channels.getChannels().subList(0, randomIntBetween(1, channels.getChannels().size())), true, false);
+ }
+
public void testConnectException() throws UnknownHostException {
try {
serviceA.connectToNode(new DiscoveryNode("C", new TransportAddress(InetAddress.getByName("localhost"), 9876),