Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Upgrade to Lucene-8.10.0-snapshot-bf2fcb53079 #77801

Merged
merged 8 commits into from
Sep 16, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion build-tools-internal/version.properties
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
elasticsearch = 8.0.0
lucene = 8.9.0
lucene = 8.10.0-snapshot-bf2fcb53079

bundled_jdk_vendor = adoptium
bundled_jdk = 16.0.2+7
Expand Down
4 changes: 2 additions & 2 deletions docs/Versions.asciidoc
Original file line number Diff line number Diff line change
@@ -1,8 +1,8 @@

include::{docs-root}/shared/versions/stack/{source_branch}.asciidoc[]

:lucene_version: 8.9.0
:lucene_version_path: 8_9_0
:lucene_version: 8.10.0
:lucene_version_path: 8_10_0
:jdk: 11.0.2
:jdk_major: 11
:build_flavor: default
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,8 @@
import org.apache.lucene.analysis.miscellaneous.LimitTokenCountFilterFactory;
import org.apache.lucene.analysis.reverse.ReverseStringFilterFactory;
import org.apache.lucene.analysis.snowball.SnowballPorterFilterFactory;
import org.apache.lucene.analysis.te.TeluguNormalizationFilterFactory;
import org.apache.lucene.analysis.te.TeluguStemFilterFactory;
import org.elasticsearch.indices.analysis.AnalysisFactoryTestCase;

import java.util.List;
Expand Down Expand Up @@ -129,6 +131,8 @@ protected Map<String, Class<?>> getTokenFilters() {
filters.put("brazilianstem", BrazilianStemTokenFilterFactory.class);
filters.put("czechstem", CzechStemTokenFilterFactory.class);
filters.put("germanstem", GermanStemTokenFilterFactory.class);
filters.put("telugunormalization", TeluguNormalizationFilterFactory.class);
filters.put("telugustem", TeluguStemFilterFactory.class);
// this filter is not exposed and should only be used internally
filters.put("fixedshingle", Void.class);
return filters;
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
97f27306f1817475b30e52f48b1407bc5d696d59

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -81,7 +81,7 @@ static CharacterRunAutomaton buildRemoteWhitelist(List<String> whitelist) {
return new CharacterRunAutomaton(Automata.makeEmpty());
}
Automaton automaton = Regex.simpleMatchToAutomaton(whitelist.toArray(Strings.EMPTY_ARRAY));
automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
automaton = MinimizationOperations.minimize(automaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
if (Operations.isTotal(automaton)) {
throw new IllegalArgumentException("Refusing to start because whitelist " + whitelist + " accepts all addresses. "
+ "This would allow users to reindex-from-remote any URL they like effectively having Elasticsearch make HTTP GETs "
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
97560884369643ba928b7e683e092699330c4f5b

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3bc69fe29001399cfc98ddbd9f1d27864fa6a4a1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
497cdb3e2b5dd63d735cd60140f7c18533f3c401

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
4fa14c42bf5864d28a7abfe197e79cf89b195c1b

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
18971c579487a7c55a65b055fec07d9286ed49ce

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
6fcf11017c3bce6e33d5cda04e3ef49560baa2a2

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
af5750b21d1d38352b49561c63b039723911a0d4

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
eb63f6ecd58a7e27a02b533b9c1e6cdb68f506fc
1 change: 0 additions & 1 deletion server/licenses/lucene-analyzers-common-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
2482a84e5e26a3eaf0bd7c5a77efc60435c7f688
1 change: 0 additions & 1 deletion server/licenses/lucene-backward-codecs-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
f33b45dbbce59e727e5a82dddab48c8c7681e25b
1 change: 0 additions & 1 deletion server/licenses/lucene-core-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
4020eb4e53d759fa11819b8d6b6a49422f51abe8
1 change: 0 additions & 1 deletion server/licenses/lucene-grouping-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
b2c9682be68699860aeeb9b048665ab6cf259c7c
1 change: 0 additions & 1 deletion server/licenses/lucene-highlighter-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
be4f561159763de6a28d47b463438331a96c31c9
1 change: 0 additions & 1 deletion server/licenses/lucene-join-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
8fc0745c4a589cdfbd56d156236fd91dbab0dacb
1 change: 0 additions & 1 deletion server/licenses/lucene-memory-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
7089f903268271acf6eb668918ac51f0cba15213
1 change: 0 additions & 1 deletion server/licenses/lucene-misc-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
35a4945ac05c2aeb0c9e244098827fd7aeea1858
1 change: 0 additions & 1 deletion server/licenses/lucene-queries-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
3d0929b7a5a2ba7f83d0357553f240f6d8362446
1 change: 0 additions & 1 deletion server/licenses/lucene-queryparser-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
d1d6696a4857bb580f6fc4a93bd3307effddd736
1 change: 0 additions & 1 deletion server/licenses/lucene-sandbox-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
bc52ac3d5fed41fde8b7ad95c7d5ce703b90377f
1 change: 0 additions & 1 deletion server/licenses/lucene-spatial-extras-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
57d8cc36815cf45eb16d43648c8d2a5b251b4e62
1 change: 0 additions & 1 deletion server/licenses/lucene-spatial3d-8.9.0.jar.sha1

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
17e5a74d57ecb861e93c3cfbf4feb356a0571bbf
1 change: 0 additions & 1 deletion server/licenses/lucene-suggest-8.9.0.jar.sha1

This file was deleted.

2 changes: 1 addition & 1 deletion server/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ public class Version implements Comparable<Version>, ToXContentFragment {
public static final Version V_7_14_2 = new Version(7140299, org.apache.lucene.util.Version.LUCENE_8_9_0);
public static final Version V_7_15_0 = new Version(7150099, org.apache.lucene.util.Version.LUCENE_8_9_0);
public static final Version V_7_16_0 = new Version(7160099, org.apache.lucene.util.Version.LUCENE_8_9_0);
public static final Version V_8_0_0 = new Version(8000099, org.apache.lucene.util.Version.LUCENE_8_9_0);
public static final Version V_8_0_0 = new Version(8000099, org.apache.lucene.util.Version.LUCENE_8_10_0);
public static final Version CURRENT = V_8_0_0;

private static final ImmutableOpenIntMap<Version> idToVersion;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ public class QueryStringQueryBuilder extends AbstractQueryBuilder<QueryStringQue

public static final String NAME = "query_string";

public static final int DEFAULT_MAX_DETERMINED_STATES = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
public static final int DEFAULT_MAX_DETERMINED_STATES = Operations.DEFAULT_DETERMINIZE_WORK_LIMIT;
public static final boolean DEFAULT_ENABLE_POSITION_INCREMENTS = true;
public static final boolean DEFAULT_ESCAPE = false;
public static final int DEFAULT_FUZZY_PREFIX_LENGTH = FuzzyQuery.defaultPrefixLength;
Expand Down Expand Up @@ -887,7 +887,7 @@ protected Query doToQuery(SearchExecutionContext context) throws IOException {
queryParser.setFuzzyRewriteMethod(QueryParsers.parseRewriteMethod(this.fuzzyRewrite, LoggingDeprecationHandler.INSTANCE));
queryParser.setMultiTermRewriteMethod(QueryParsers.parseRewriteMethod(this.rewrite, LoggingDeprecationHandler.INSTANCE));
queryParser.setTimeZone(timeZone);
queryParser.setMaxDeterminizedStates(maxDeterminizedStates);
queryParser.setDeterminizeWorkLimit(maxDeterminizedStates);
queryParser.setAutoGenerateMultiTermSynonymsPhraseQuery(autoGenerateSynonymsPhraseQuery);
queryParser.setFuzzyTranspositions(fuzzyTranspositions);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ public class RegexpQueryBuilder extends AbstractQueryBuilder<RegexpQueryBuilder>
public static final String NAME = "regexp";

public static final int DEFAULT_FLAGS_VALUE = RegexpFlag.ALL.value();
public static final int DEFAULT_MAX_DETERMINIZED_STATES = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
public static final int DEFAULT_MAX_DETERMINIZED_STATES = Operations.DEFAULT_DETERMINIZE_WORK_LIMIT;
public static final boolean DEFAULT_CASE_INSENSITIVITY = false;

private static final ParseField FLAGS_VALUE_FIELD = new ParseField("flags_value");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -731,7 +731,7 @@ private Query getRegexpQuerySingle(String field, String termStr) throws ParseExc
setAnalyzer(forceAnalyzer);
return super.getRegexpQuery(field, termStr);
}
return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getMaxDeterminizedStates(),
return currentFieldType.regexpQuery(termStr, RegExp.ALL, 0, getDeterminizeWorkLimit(),
getMultiTermRewriteMethod(), context);
} catch (RuntimeException e) {
if (lenient) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -576,7 +576,7 @@ private Automaton toAutomaton() {
a = Automata.makeAnyString();
}
if (exclude != null) {
a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_MAX_DETERMINIZED_STATES);
a = Operations.minus(a, exclude.toAutomaton(), Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
}
return a;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -197,7 +197,7 @@ public static class Builder {
private int fuzzyMinLength = FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH;
private int fuzzyPrefixLength = FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX;
private boolean unicodeAware = FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
private int maxDeterminizedStates = Operations.DEFAULT_DETERMINIZE_WORK_LIMIT;

public Builder() {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws
*/
public static class Builder {
private int flagsValue = RegExp.ALL;
private int maxDeterminizedStates = Operations.DEFAULT_MAX_DETERMINIZED_STATES;
private int maxDeterminizedStates = Operations.DEFAULT_DETERMINIZE_WORK_LIMIT;

public Builder() {
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ public void testPrefixQueryIsError() {
public void testRegexpQueryIsError() {
assertQueryOnlyOnTextAndKeyword(
"regexp",
() -> simpleMappedFieldType().regexpQuery("cat", 0, 0, Operations.DEFAULT_MAX_DETERMINIZED_STATES, null, mockContext())
() -> simpleMappedFieldType().regexpQuery("cat", 0, 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, null, mockContext())
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -725,7 +725,7 @@ public void testFuzzyQueryType() throws Exception {
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType().fuzzyQuery("co",
Fuzziness.fromEdits(FuzzyCompletionQuery.DEFAULT_MAX_EDITS), FuzzyCompletionQuery.DEFAULT_NON_FUZZY_PREFIX,
FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH, Operations.DEFAULT_MAX_DETERMINIZED_STATES,
FuzzyCompletionQuery.DEFAULT_MIN_FUZZY_LENGTH, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT,
FuzzyCompletionQuery.DEFAULT_TRANSPOSITIONS, FuzzyCompletionQuery.DEFAULT_UNICODE_AWARE);
assertThat(prefixQuery, instanceOf(FuzzyCompletionQuery.class));
}
Expand All @@ -735,7 +735,7 @@ public void testRegexQueryType() throws Exception {
Mapper fieldMapper = defaultMapper.mappers().getMapper("field");
CompletionFieldMapper completionFieldMapper = (CompletionFieldMapper) fieldMapper;
Query prefixQuery = completionFieldMapper.fieldType()
.regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
.regexpQuery(new BytesRef("co"), RegExp.ALL, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
assertThat(prefixQuery, instanceOf(RegexCompletionQuery.class));
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -263,7 +263,7 @@ public void testRegexpQuery() throws IOException {
IndexSearcher searcher = newSearcher(reader);
assertThat(
searcher.count(
simpleMappedFieldType().regexpQuery("ca.+", 0, 0, Operations.DEFAULT_MAX_DETERMINIZED_STATES, null, mockContext())
simpleMappedFieldType().regexpQuery("ca.+", 0, 0, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT, null, mockContext())
),
equalTo(2)
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -738,12 +738,12 @@ public void testToQueryRegExpQuery() throws Exception {
}

public void testToQueryRegExpQueryTooComplex() throws Exception {
QueryStringQueryBuilder queryBuilder = queryStringQuery("/[ac]*a[ac]{50,200}/").defaultField(TEXT_FIELD_NAME);
QueryStringQueryBuilder queryBuilder = queryStringQuery("/[ac]*a[ac]{200,500}/").defaultField(TEXT_FIELD_NAME);

TooComplexToDeterminizeException e = expectThrows(TooComplexToDeterminizeException.class,
() -> queryBuilder.toQuery(createSearchExecutionContext()));
assertThat(e.getMessage(), containsString("Determinizing [ac]*"));
assertThat(e.getMessage(), containsString("would result in more than 10000 states"));
assertThat(e.getMessage(), containsString("would require more than 10000 effort."));
}

/**
Expand All @@ -765,7 +765,7 @@ public void testToQueryRegExpQueryMaxDeterminizedStatesParsing() throws Exceptio
TooComplexToDeterminizeException e = expectThrows(TooComplexToDeterminizeException.class,
() -> queryBuilder.toQuery(createSearchExecutionContext()));
assertThat(e.getMessage(), containsString("Determinizing [ac]*"));
assertThat(e.getMessage(), containsString("would result in more than 10 states"));
assertThat(e.getMessage(), containsString("would require more than 10 effort."));
}

public void testToQueryFuzzyQueryAutoFuziness() throws Exception {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -912,10 +912,9 @@ public void testTooManyUnmappedFieldWildcardPattern() throws IOException {

XContentBuilder source = XContentFactory.jsonBuilder().startObject().field("a", "foo").endObject();

List<FieldAndFormat> fieldAndFormatList = new ArrayList<>();
boolean includeUnmapped = true;
for (int i = 0; i < 1000; i++) {
fieldAndFormatList.add(new FieldAndFormat(randomAlphaOfLength(150) + "*", null, includeUnmapped));
List<FieldAndFormat> fieldAndFormatList = new ArrayList<>(10_000);
for (int i = 0; i < 8000; i++) {
fieldAndFormatList.add(new FieldAndFormat(randomAlphaOfLength(150) + "*", null, true));
}
expectThrows(TooComplexToDeterminizeException.class, () -> fetchFields(mapperService, source, fieldAndFormatList));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ protected StringScriptFieldRegexpQuery createTestInstance() {
randomAlphaOfLength(6),
randomInt(RegExp.ALL),
matchFlags,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
}

Expand All @@ -43,7 +43,7 @@ protected StringScriptFieldRegexpQuery copy(StringScriptFieldRegexpQuery orig) {
orig.pattern(),
orig.syntaxFlags(),
orig.matchFlags(),
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
}

Expand Down Expand Up @@ -80,7 +80,7 @@ protected StringScriptFieldRegexpQuery mutate(StringScriptFieldRegexpQuery orig)
pattern,
syntaxFlags,
matchFlags,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
}

Expand All @@ -93,7 +93,7 @@ public void testMatches() {
"a.+b",
0,
0,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
assertTrue(query.matches(List.of("astuffb")));
assertFalse(query.matches(List.of("astuffB")));
Expand All @@ -110,7 +110,7 @@ public void testMatches() {
"a.+b",
0,
RegExp.ASCII_CASE_INSENSITIVE,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
assertTrue(ciQuery.matches(List.of("astuffB")));
assertTrue(ciQuery.matches(List.of("Astuffb", "fffff")));
Expand All @@ -131,7 +131,7 @@ public void testVisit() {
"a.+b",
0,
0,
Operations.DEFAULT_MAX_DETERMINIZED_STATES
Operations.DEFAULT_DETERMINIZE_WORK_LIMIT
);
ByteRunAutomaton automaton = visitForSingleAutomata(query);
BytesRef term = new BytesRef("astuffb");
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -187,7 +187,10 @@ public abstract class AnalysisFactoryTestCase extends ESTestCase {
// LUCENE-9574
entry("dropifflagged", Void.class),
// LUCENE-9575
entry("patterntyping", Void.class));
entry("patterntyping", Void.class),
entry("telugustem", Void.class),
entry("telugunormalization", Void.class));


static final Map<String, Class<?>> KNOWN_CHARFILTERS = Map.of(
"htmlstrip", MovedToAnalysisCommon.class,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,8 @@ public static Automaton buildPermittedFieldsAutomaton(final String[] grantedFiel
deniedFieldsAutomaton = Automatons.patterns(deniedFields);
}

grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton, Operations.DEFAULT_MAX_DETERMINIZED_STATES);
grantedFieldsAutomaton = MinimizationOperations.minimize(grantedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);
deniedFieldsAutomaton = MinimizationOperations.minimize(deniedFieldsAutomaton, Operations.DEFAULT_DETERMINIZE_WORK_LIMIT);

if (subsetOf(deniedFieldsAutomaton, grantedFieldsAutomaton) == false) {
throw new ElasticsearchSecurityException("Exceptions for field permissions must be a subset of the " +
Expand Down
Loading