Skip to content

Commit

Permalink
Add Leak Tracking to the SearchContext implementations (elastic#102274)
Browse files Browse the repository at this point in the history
Another step towards ref counting search hits. This adds leak tracking to the search context. Required 2 fixes in the production code to not fail tests: sub aggregations need to be closed eventually, found it easiest to just tie this to the parent context. If we throw in the constructor of the context (we have tests for this case), we should release/close it still (it's just impossible to fix the leak tracking otherwise, also it seems to me that this is more correct anyway since we initialise resources in that constructor).
Other than that, just trivial test changes to make sure the contexts get closed everywhere.
  • Loading branch information
original-brownbear authored Nov 16, 2023
1 parent de0140c commit e03b0a5
Show file tree
Hide file tree
Showing 14 changed files with 906 additions and 851 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -84,6 +84,7 @@
import org.elasticsearch.index.query.SearchExecutionContext;
import org.elasticsearch.lucene.queries.BlendedTermQuery;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.search.internal.SearchContext;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.index.IndexVersionUtils;
import org.elasticsearch.xcontent.XContentFactory;
Expand Down Expand Up @@ -215,93 +216,95 @@ public void testDuel() throws Exception {
}
Collections.sort(intValues);

SearchExecutionContext context = createSearchContext(indexService).getSearchExecutionContext();
MappedFieldType intFieldType = mapperService.fieldType("int_field");

List<Supplier<Query>> queryFunctions = new ArrayList<>();
queryFunctions.add(MatchNoDocsQuery::new);
queryFunctions.add(MatchAllDocsQuery::new);
queryFunctions.add(() -> new TermQuery(new Term("unknown_field", "value")));
String field1 = randomFrom(stringFields);
queryFunctions.add(() -> new TermQuery(new Term(field1, randomFrom(stringContent.get(field1)))));
String field2 = randomFrom(stringFields);
queryFunctions.add(() -> new TermQuery(new Term(field2, randomFrom(stringContent.get(field2)))));
queryFunctions.add(() -> intFieldType.termQuery(randomFrom(intValues), context));
queryFunctions.add(() -> intFieldType.termsQuery(Arrays.asList(randomFrom(intValues), randomFrom(intValues)), context));
queryFunctions.add(
() -> intFieldType.rangeQuery(
intValues.get(4),
intValues.get(intValues.size() - 4),
true,
true,
ShapeRelation.WITHIN,
null,
null,
context
)
);
queryFunctions.add(
() -> new TermInSetQuery(
field1,
new BytesRef(randomFrom(stringContent.get(field1))),
new BytesRef(randomFrom(stringContent.get(field1)))
)
);
queryFunctions.add(
() -> new TermInSetQuery(
field2,
new BytesRef(randomFrom(stringContent.get(field1))),
new BytesRef(randomFrom(stringContent.get(field1)))
)
);
// many iterations with boolean queries, which are the most complex queries to deal with when nested
int numRandomBoolQueries = 1000;
for (int i = 0; i < numRandomBoolQueries; i++) {
queryFunctions.add(() -> createRandomBooleanQuery(1, stringFields, stringContent, intFieldType, intValues, context));
}
queryFunctions.add(() -> {
int numClauses = randomIntBetween(1, 1 << randomIntBetween(2, 4));
List<Query> clauses = new ArrayList<>();
for (int i = 0; i < numClauses; i++) {
String field = randomFrom(stringFields);
clauses.add(new TermQuery(new Term(field, randomFrom(stringContent.get(field)))));
}
return new DisjunctionMaxQuery(clauses, 0.01f);
});
queryFunctions.add(() -> {
Float minScore = randomBoolean() ? null : (float) randomIntBetween(1, 1000);
Query innerQuery;
if (randomBoolean()) {
innerQuery = new TermQuery(new Term(field1, randomFrom(stringContent.get(field1))));
} else {
innerQuery = new PhraseQuery(field1, randomFrom(stringContent.get(field1)), randomFrom(stringContent.get(field1)));
try (SearchContext searchContext = createSearchContext(indexService)) {
SearchExecutionContext context = searchContext.getSearchExecutionContext();
MappedFieldType intFieldType = mapperService.fieldType("int_field");

List<Supplier<Query>> queryFunctions = new ArrayList<>();
queryFunctions.add(MatchNoDocsQuery::new);
queryFunctions.add(MatchAllDocsQuery::new);
queryFunctions.add(() -> new TermQuery(new Term("unknown_field", "value")));
String field1 = randomFrom(stringFields);
queryFunctions.add(() -> new TermQuery(new Term(field1, randomFrom(stringContent.get(field1)))));
String field2 = randomFrom(stringFields);
queryFunctions.add(() -> new TermQuery(new Term(field2, randomFrom(stringContent.get(field2)))));
queryFunctions.add(() -> intFieldType.termQuery(randomFrom(intValues), context));
queryFunctions.add(() -> intFieldType.termsQuery(Arrays.asList(randomFrom(intValues), randomFrom(intValues)), context));
queryFunctions.add(
() -> intFieldType.rangeQuery(
intValues.get(4),
intValues.get(intValues.size() - 4),
true,
true,
ShapeRelation.WITHIN,
null,
null,
context
)
);
queryFunctions.add(
() -> new TermInSetQuery(
field1,
new BytesRef(randomFrom(stringContent.get(field1))),
new BytesRef(randomFrom(stringContent.get(field1)))
)
);
queryFunctions.add(
() -> new TermInSetQuery(
field2,
new BytesRef(randomFrom(stringContent.get(field1))),
new BytesRef(randomFrom(stringContent.get(field1)))
)
);
// many iterations with boolean queries, which are the most complex queries to deal with when nested
int numRandomBoolQueries = 1000;
for (int i = 0; i < numRandomBoolQueries; i++) {
queryFunctions.add(() -> createRandomBooleanQuery(1, stringFields, stringContent, intFieldType, intValues, context));
}
return new FunctionScoreQuery(innerQuery, minScore, 1f);
});

List<LuceneDocument> documents = new ArrayList<>();
for (Supplier<Query> queryFunction : queryFunctions) {
Query query = queryFunction.get();
addQuery(query, documents);
}
queryFunctions.add(() -> {
int numClauses = randomIntBetween(1, 1 << randomIntBetween(2, 4));
List<Query> clauses = new ArrayList<>();
for (int i = 0; i < numClauses; i++) {
String field = randomFrom(stringFields);
clauses.add(new TermQuery(new Term(field, randomFrom(stringContent.get(field)))));
}
return new DisjunctionMaxQuery(clauses, 0.01f);
});
queryFunctions.add(() -> {
Float minScore = randomBoolean() ? null : (float) randomIntBetween(1, 1000);
Query innerQuery;
if (randomBoolean()) {
innerQuery = new TermQuery(new Term(field1, randomFrom(stringContent.get(field1))));
} else {
innerQuery = new PhraseQuery(field1, randomFrom(stringContent.get(field1)), randomFrom(stringContent.get(field1)));
}
return new FunctionScoreQuery(innerQuery, minScore, 1f);
});

indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);
List<LuceneDocument> documents = new ArrayList<>();
for (Supplier<Query> queryFunction : queryFunctions) {
Query query = queryFunction.get();
addQuery(query, documents);
}

LuceneDocument document = new LuceneDocument();
for (Map.Entry<String, List<String>> entry : stringContent.entrySet()) {
String value = entry.getValue().stream().collect(Collectors.joining(" "));
document.add(new TextField(entry.getKey(), value, Field.Store.NO));
}
for (Integer intValue : intValues) {
NumberFieldMapper.NumberType.INTEGER.addFields(document, "int_field", intValue, true, true, false);
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);

LuceneDocument document = new LuceneDocument();
for (Map.Entry<String, List<String>> entry : stringContent.entrySet()) {
String value = entry.getValue().stream().collect(Collectors.joining(" "));
document.add(new TextField(entry.getKey(), value, Field.Store.NO));
}
for (Integer intValue : intValues) {
NumberFieldMapper.NumberType.INTEGER.addFields(document, "int_field", intValue, true, true, false);
}
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}

private BooleanQuery createRandomBooleanQuery(
Expand Down Expand Up @@ -376,53 +379,55 @@ public void testDuel2() throws Exception {
ranges.add(new int[] { 0, 10 });
ranges.add(new int[] { 15, 50 });

SearchExecutionContext context = createSearchContext(indexService).getSearchExecutionContext();
List<LuceneDocument> documents = new ArrayList<>();
{
addQuery(new TermQuery(new Term("string_field", randomFrom(stringValues))), documents);
}
{
addQuery(new PhraseQuery(0, "string_field", stringValues.toArray(new String[0])), documents);
}
{
int[] range = randomFrom(ranges);
Query rangeQuery = intFieldType.rangeQuery(range[0], range[1], true, true, null, null, null, context);
addQuery(rangeQuery, documents);
}
{
int numBooleanQueries = randomIntBetween(1, 5);
for (int i = 0; i < numBooleanQueries; i++) {
Query randomBQ = randomBQ(1, stringValues, ranges, intFieldType, context);
addQuery(randomBQ, documents);
try (SearchContext searchContext = createSearchContext(indexService)) {
SearchExecutionContext context = searchContext.getSearchExecutionContext();
List<LuceneDocument> documents = new ArrayList<>();
{
addQuery(new TermQuery(new Term("string_field", randomFrom(stringValues))), documents);
}
{
addQuery(new PhraseQuery(0, "string_field", stringValues.toArray(new String[0])), documents);
}
{
int[] range = randomFrom(ranges);
Query rangeQuery = intFieldType.rangeQuery(range[0], range[1], true, true, null, null, null, context);
addQuery(rangeQuery, documents);
}
{
int numBooleanQueries = randomIntBetween(1, 5);
for (int i = 0; i < numBooleanQueries; i++) {
Query randomBQ = randomBQ(1, stringValues, ranges, intFieldType, context);
addQuery(randomBQ, documents);
}
}
{
addQuery(new MatchNoDocsQuery(), documents);
}
{
addQuery(new MatchAllDocsQuery(), documents);
}
}
{
addQuery(new MatchNoDocsQuery(), documents);
}
{
addQuery(new MatchAllDocsQuery(), documents);
}

indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);

LuceneDocument document = new LuceneDocument();
for (String value : stringValues) {
document.add(new TextField("string_field", value, Field.Store.NO));
logger.info("Test with document: {}" + document);
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
indexWriter.addDocuments(documents);
indexWriter.close();
directoryReader = DirectoryReader.open(directory);
IndexSearcher shardSearcher = newSearcher(directoryReader);
// Disable query cache, because ControlQuery cannot be cached...
shardSearcher.setQueryCache(null);

LuceneDocument document = new LuceneDocument();
for (String value : stringValues) {
document.add(new TextField("string_field", value, Field.Store.NO));
logger.info("Test with document: {}" + document);
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}

for (int[] range : ranges) {
NumberFieldMapper.NumberType.INTEGER.addFields(document, "int_field", between(range[0], range[1]), true, true, false);
logger.info("Test with document: {}" + document);
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
for (int[] range : ranges) {
NumberFieldMapper.NumberType.INTEGER.addFields(document, "int_field", between(range[0], range[1]), true, true, false);
logger.info("Test with document: {}" + document);
MemoryIndex memoryIndex = MemoryIndex.fromDocument(document, new WhitespaceAnalyzer());
duelRun(queryStore, memoryIndex, shardSearcher);
}
}
}

Expand Down
Loading

0 comments on commit e03b0a5

Please sign in to comment.