Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade to Lucene-8.10.0-snapshot-bf2fcb53079 #77801

Merged
merged 8 commits into from
Sep 16, 2021
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion build-tools-internal/version.properties
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
elasticsearch = 8.0.0
lucene = 8.9.0
lucene = 8.10.0-snapshot-75a5061d371

bundled_jdk_vendor = adoptium
bundled_jdk = 16.0.2+7
Expand Down
4 changes: 2 additions & 2 deletions docs/Versions.asciidoc
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@

include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[]

:lucene_version: 8.9.0
:lucene_version_path: 8_9_0
:lucene_version: 8.10.0
:lucene_version_path: 8_10_0
:jdk: 11.0.2
:jdk_major: 11
:build_flavor: default
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
639a558c89f6421ac36bfe7d3b0e9960fafe1ceb

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ static CharacterRunAutomaton buildRemoteWhitelist(List<String> whitelist) {
return new CharacterRunAutomaton(Automata.makeEmpty());
}
Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY));
automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
if (Operations.isTotal(automaton)) {
throw new IllegalArgumentException("Refusing to start because whitelist " + whitelist + " accepts all addresses. "
+ "This would allow users to reindex-from-remote any URL they like effectively having Elasticsearch make HTTP GETs "
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
78b9e72a7c176e7279f18cbe75fc883abe1ffc66

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
54b71fbf7fa4f2aeb71e257e9efcddf5230a0e3a

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
6b1d8d793338eba731192b8f4fac97b8734e7c88

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
d0aa6a87a13e96d7a794c03a067c34e885970b18

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
72484aa38b5efe737c94a9a1fd0baefc5f02e7ab

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
664db5089998ada43fe58118da83aec398f1754a

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
9f44c087bf5ccd828549875e848e480adc65cb19

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
8277b71394c6a5d9fef83bce93d5616f4dace1ba
1 change: 0 additions & 1 deletion server/licenses/lucene-analyzers-common-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
217199dadaf8711c08343415b8d775caa8bdbdc9
1 change: 0 additions & 1 deletion server/licenses/lucene-backward-codecs-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
91129ced2b3003ac9733f70f22dc47e4cc5757ac
1 change: 0 additions & 1 deletion server/licenses/lucene-core-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
33faa8f620ff90585a709e3a864ae252463f2b55
1 change: 0 additions & 1 deletion server/licenses/lucene-grouping-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
03b1a9d0962b871b3d86bcaace55f727cff26d40
1 change: 0 additions & 1 deletion server/licenses/lucene-highlighter-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
a57536fa6f1430fe26ebfce3d52f525c8c6c07ac
1 change: 0 additions & 1 deletion server/licenses/lucene-join-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ee299c167c0aec42373b572c114bd9fee8fc680c
1 change: 0 additions & 1 deletion server/licenses/lucene-memory-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2a4d399ec8acb279593958254ac3f063402f5340
1 change: 0 additions & 1 deletion server/licenses/lucene-misc-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ded48e4c368135941198e50f444ac06c44adbcd2
1 change: 0 additions & 1 deletion server/licenses/lucene-queries-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
c5345617ad0ed9e422b620c9c535fde99fdb6d8c
1 change: 0 additions & 1 deletion server/licenses/lucene-queryparser-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
a5fac34374f9689cda0827439a5ca5684ec04804
1 change: 0 additions & 1 deletion server/licenses/lucene-sandbox-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
824342c50ebfad3561f28da2b9b31cba08cf2dc3
1 change: 0 additions & 1 deletion server/licenses/lucene-spatial-extras-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
ab7d41ad539ab1d1910cb14bba2ae7a2973352ce
1 change: 0 additions & 1 deletion server/licenses/lucene-spatial3d-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
8898c7243e29c8b7a2e89563993b2ee814420db5
1 change: 0 additions & 1 deletion server/licenses/lucene-suggest-8.9.0.jar.sha1

This file was deleted.

2 changes: 1 addition & 1 deletion server/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final Version V_7_14_2 = new Version(7140299, org.apache.lucene.util.Version.LUCENE_8_9_0);
public static final Version V_7_15_0 = new Version(7150099, org.apache.lucene.util.Version.LUCENE_8_9_0);
public static final Version V_7_16_0 = new Version(7160099, org.apache.lucene.util.Version.LUCENE_8_9_0);
public static final Version V_8_0_0 = new Version(8000099, org.apache.lucene.util.Version.LUCENE_8_9_0);
public static final Version V_8_0_0 = new Version(8000099, org.apache.lucene.util.Version.LUCENE_8_10_0);
public static final Version CURRENT = V_8_0_0;

private static final ImmutableOpenIntMap<Version> idToVersion;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue

public static final String NAME = "query_string";

public static final int DEFAULT_MAX_DETERMINED_STATES = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
public static final int DEFAULT_MAX_DETERMINED_STATES = Operations.DEFAULT_DETERMINIZE_WORK_LIMIT;
public static final boolean DEFAULT_ENABLE_POSITION_INCREMENTS = true;
public static final boolean DEFAULT_ESCAPE = false;
public static final int DEFAULT_FUZZY_PREFIX_LENGTH = FuzzyQuery.defaultPrefixLength;
Expand Down Expand Up @@ -887,7 +887,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException {
queryParser.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(this.fuzzyRewrite, LoggingDeprecationHandler.INSTANCE));
queryParser.setMultiTermRewriteMethod(QueryParsers.parseRewriteMethod(this.rewrite, LoggingDeprecationHandler.INSTANCE));
queryParser.setTimeZone(timeZone);
queryParser.setMaxDeterminizedStates(maxDeterminizedStates);
queryParser.setDeterminizeWorkLimit(maxDeterminizedStates);
queryParser.setAutoGenerateMultiTermSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery);
queryParser.setFuzzyTranspositions(fuzzyTranspositions);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
public static final String NAME = "regexp";

public static final int DEFAULT_FLAGS_VALUE = RegexpFlag.ALL.value();
public static final int DEFAULT_MAX_DETERMINIZED_STATES = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
public static final int DEFAULT_MAX_DETERMINIZED_STATES = Operations.DEFAULT_DETERMINIZE_WORK_LIMIT;
public static final boolean DEFAULT_CASE_INSENSITIVITY = false;

private static final ParseField FLAGS_VALUE_FIELD = new ParseField("flags_value");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -731,7 +731,7 @@ private Query getRegexpQuerySingle(String field, String termStr) throws ParseExc
setAnalyzer(forceAnalyzer);
return super.getRegexpQuery(field, termStr);
}
return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getMaxDeterminizedStates(),
return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getDeterminizeWorkLimit(),
getMultiTermRewriteMethod(), context);
} catch (RuntimeException e) {
if (lenient) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -576,7 +576,7 @@ private Automaton toAutomaton() {
a = Automata.makeAnyString();
}
if (exclude != null) {
a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_MAX_DETERMINIZED_STATES);
a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
}
return a;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ public static class Builder {
private int fuzzyMinLength = FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH;
private int fuzzyPrefixLength = FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX;
private boolean unicodeAware = FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
private int maxDeterminizedStates = Operations.DEFAULT_DETERMINIZE_WORK_LIMIT;

public Builder() {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
*/
public static class Builder {
private int flagsValue = RegExp.ALL;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
private int maxDeterminizedStates = Operations.DEFAULT_DETERMINIZE_WORK_LIMIT;

public Builder() {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public void testPrefixQueryIsError() {
public void testRegexpQueryIsError() {
assertQueryOnlyOnTextAndKeyword(
"regexp",
() -> simpleMappedFieldType().regexpQuery("cat", 0, 0, Operations.DEFAULT_MAX_DETERMINIZED_STATES, null, mockContext())
() -> simpleMappedFieldType().regexpQuery("cat", 0, 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, null, mockContext())
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -725,7 +725,7 @@ public void testFuzzyQueryType() throws Exception {
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co",
Fuzziness.fromEdits(FuzzyCompletionQuery.DEFAULT_MAX_EDITS), FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX,
FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH, Operations.DEFAULT_MAX_DETERMINIZED_STATES,
FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT,
FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS, FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE);
assertThat(prefixQuery, instanceOf(FuzzyCompletionQuery.class));
}
Expand All @@ -735,7 +735,7 @@ public void testRegexQueryType() throws Exception {
Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType()
.regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
.regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
assertThat(prefixQuery, instanceOf(RegexCompletionQuery.class));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ public void testRegexpQuery() throws IOException {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(
simpleMappedFieldType().regexpQuery("ca.+", 0, 0, Operations.DEFAULT_MAX_DETERMINIZED_STATES, null, mockContext())
simpleMappedFieldType().regexpQuery("ca.+", 0, 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, null, mockContext())
),
equalTo(2)
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ protected StringScriptFieldRegexpQuery createTestInstance() {
randomAlphaOfLength(6),
randomInt(RegExp.ALL),
matchFlags,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
}

Expand All @@ -43,7 +43,7 @@ protected StringScriptFieldRegexpQuery copy(StringScriptFieldRegexpQuery orig) {
orig.pattern(),
orig.syntaxFlags(),
orig.matchFlags(),
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
}

Expand Down Expand Up @@ -80,7 +80,7 @@ protected StringScriptFieldRegexpQuery mutate(StringScriptFieldRegexpQuery orig)
pattern,
syntaxFlags,
matchFlags,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
}

Expand All @@ -93,7 +93,7 @@ public void testMatches() {
"a.+b",
0,
0,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
assertTrue(query.matches(List.of("astuffb")));
assertFalse(query.matches(List.of("astuffB")));
Expand All @@ -110,7 +110,7 @@ public void testMatches() {
"a.+b",
0,
RegExp.ASCII_CASE_INSENSITIVE,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
assertTrue(ciQuery.matches(List.of("astuffB")));
assertTrue(ciQuery.matches(List.of("Astuffb", "fffff")));
Expand All @@ -131,7 +131,7 @@ public void testVisit() {
"a.+b",
0,
0,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
ByteRunAutomaton automaton = visitForSingleAutomata(query);
BytesRef term = new BytesRef("astuffb");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel
deniedFieldsAutomaton = Automatons.patterns(deniedFields);
}

grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);

if (subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) {
throw new ElasticsearchSecurityException("Exceptions for field permissions must be a subset of the " +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
import java.util.function.Function;
import java.util.function.Predicate;

import static org.apache.lucene.util.automaton.Operations.DEFAULT_MAX_DETERMINIZED_STATES;
import static org.apache.lucene.util.automaton.Operations.DEFAULT_DETERMINIZE_WORK_LIMIT;
import static org.apache.lucene.util.automaton.Operations.concatenate;
import static org.apache.lucene.util.automaton.Operations.intersection;
import static org.apache.lucene.util.automaton.Operations.minus;
Expand All @@ -39,7 +39,7 @@
public final class Automatons {

static final Setting<Integer> MAX_DETERMINIZED_STATES_SETTING =
Setting.intSetting("xpack.security.automata.max_determinized_states", 100000, DEFAULT_MAX_DETERMINIZED_STATES,
Setting.intSetting("xpack.security.automata.max_determinized_states", 100000, DEFAULT_DETERMINIZE_WORK_LIMIT,
Setting.Property.NodeScope);

static final Setting<Boolean> CACHE_ENABLED =
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -645,7 +645,7 @@ public void testSourceFiltering() {
// exclude on exact value
include = new CharacterRunAutomaton(Operations.minus(
Automata.makeAnyString(), Automatons.patterns("foo.bar"),
Operations.DEFAULT_MAX_DETERMINIZED_STATES));
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT));
filtered = FieldSubsetReader.filter(map, include, 0);
expected = new HashMap<>();
expected.put("bar", "baz");
Expand All @@ -656,7 +656,7 @@ public void testSourceFiltering() {
// exclude on wildcard
include = new CharacterRunAutomaton(Operations.minus(
Automata.makeAnyString(), Automatons.patterns("foo.*"),
Operations.DEFAULT_MAX_DETERMINIZED_STATES));
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT));
filtered = FieldSubsetReader.filter(map, include, 0);
expected = Collections.singletonMap("bar", "baz");

Expand Down Expand Up @@ -696,7 +696,7 @@ public void testSourceFiltering() {
// exclude on inner array
include = new CharacterRunAutomaton(Operations.minus(
Automata.makeAnyString(), Automatons.patterns("foo.baz"),
Operations.DEFAULT_MAX_DETERMINIZED_STATES));
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT));
filtered = FieldSubsetReader.filter(map, include, 0);
expected = new HashMap<>();
subArray = new ArrayList<>();
Expand All @@ -711,7 +711,7 @@ public void testSourceFiltering() {
// exclude on inner array 2
include = new CharacterRunAutomaton(Operations.minus(
Automata.makeAnyString(), Automatons.patterns("foo"),
Operations.DEFAULT_MAX_DETERMINIZED_STATES));
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT));
filtered = FieldSubsetReader.filter(map, include, 0);
expected = new HashMap<>();
subArray = new ArrayList<>();
Expand Down
Loading