Skip to content

Commit

Permalink
Percolator to support parsing script score query with params (#101051)
Browse files Browse the repository at this point in the history
While dot expansion is disabled when parsing percolator queries at index
time, as that would interfere with query parsing,  we still use a wrapper parser
that is conservative about what methods it supports, assuming that
document parsing needs nextToken and not much more. Turns out that when
parsing queries instead, we need to support all the XContentParser
methods including map, list etc.

This commit adds a test for script score query parsing through document
parsing via percolator field mapper, and removes the limitations in the
wrapper parser when dots expansion is disabled.

Closes #97377
  • Loading branch information
javanna authored Oct 24, 2023
1 parent 1aaab9d commit 748446e
Show file tree
Hide file tree
Showing 4 changed files with 308 additions and 2 deletions.
6 changes: 6 additions & 0 deletions docs/changelog/101051.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,6 @@
pr: 101051
summary: Percolator to support parsing script score query with params
area: Mapping
type: bug
issues:
- 97377
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,7 @@
import org.apache.lucene.search.join.ScoreMode;
import org.apache.lucene.util.BytesRef;
import org.elasticsearch.TransportVersion;
import org.elasticsearch.TransportVersions;
import org.elasticsearch.action.support.PlainActionFuture;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
Expand All @@ -40,6 +41,7 @@
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.io.stream.StreamOutput;
import org.elasticsearch.common.network.InetAddresses;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.core.Tuple;
Expand All @@ -53,6 +55,7 @@
import org.elasticsearch.index.mapper.ParsedDocument;
import org.elasticsearch.index.mapper.SourceToParse;
import org.elasticsearch.index.mapper.TestDocumentParserContext;
import org.elasticsearch.index.query.AbstractQueryBuilder;
import org.elasticsearch.index.query.BoolQueryBuilder;
import org.elasticsearch.index.query.BoostingQueryBuilder;
import org.elasticsearch.index.query.ConstantScoreQueryBuilder;
Expand All @@ -67,13 +70,17 @@
import org.elasticsearch.index.query.functionscore.FunctionScoreQueryBuilder;
import org.elasticsearch.index.query.functionscore.RandomScoreFunctionBuilder;
import org.elasticsearch.index.query.functionscore.ScriptScoreFunctionBuilder;
import org.elasticsearch.index.query.functionscore.ScriptScoreQueryBuilder;
import org.elasticsearch.indices.TermsLookup;
import org.elasticsearch.join.ParentJoinPlugin;
import org.elasticsearch.join.query.HasChildQueryBuilder;
import org.elasticsearch.join.query.HasParentQueryBuilder;
import org.elasticsearch.plugins.Plugin;
import org.elasticsearch.plugins.SearchPlugin;
import org.elasticsearch.script.MockScriptPlugin;
import org.elasticsearch.script.Script;
import org.elasticsearch.script.ScriptType;
import org.elasticsearch.search.DummyQueryParserPlugin;
import org.elasticsearch.test.ESSingleNodeTestCase;
import org.elasticsearch.test.InternalSettingsPlugin;
import org.elasticsearch.xcontent.XContentBuilder;
Expand All @@ -92,6 +99,7 @@
import java.util.Collection;
import java.util.Collections;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Function;
Expand Down Expand Up @@ -130,7 +138,13 @@ public class PercolatorFieldMapperTests extends ESSingleNodeTestCase {

@Override
protected Collection<Class<? extends Plugin>> getPlugins() {
return pluginList(InternalSettingsPlugin.class, PercolatorPlugin.class, FoolMeScriptPlugin.class, ParentJoinPlugin.class);
return pluginList(
InternalSettingsPlugin.class,
PercolatorPlugin.class,
FoolMeScriptPlugin.class,
ParentJoinPlugin.class,
CustomQueriesPlugin.class
);
}

@Override
Expand Down Expand Up @@ -540,6 +554,38 @@ public void testPercolatorFieldMapper() throws Exception {
assertThat(doc.rootDoc().getFields(fieldType.extractionResultField.name()).get(0).stringValue(), equalTo(EXTRACTION_FAILED));
}

public void testParseScriptScoreQueryWithParams() throws Exception {
addQueryFieldMappings();
ScriptScoreQueryBuilder scriptScoreQueryBuilder = new ScriptScoreQueryBuilder(
new MatchAllQueryBuilder(),
new Script(ScriptType.INLINE, Script.DEFAULT_SCRIPT_LANG, "score", Collections.singletonMap("param", "1"))
);
ParsedDocument doc = mapperService.documentMapper()
.parse(
new SourceToParse(
"1",
BytesReference.bytes(XContentFactory.jsonBuilder().startObject().field(fieldName, scriptScoreQueryBuilder).endObject()),
XContentType.JSON
)
);
assertNotNull(doc);
}

public void testParseCustomParserQuery() throws Exception {
addQueryFieldMappings();
ParsedDocument doc = mapperService.documentMapper()
.parse(
new SourceToParse(
"1",
BytesReference.bytes(
XContentFactory.jsonBuilder().startObject().field(fieldName, new CustomParserQueryBuilder()).endObject()
),
XContentType.JSON
)
);
assertNotNull(doc);
}

public void testStoringQueries() throws Exception {
addQueryFieldMappings();
QueryBuilder[] queries = new QueryBuilder[] {
Expand Down Expand Up @@ -1106,12 +1152,147 @@ public static class FoolMeScriptPlugin extends MockScriptPlugin {

@Override
protected Map<String, Function<Map<String, Object>, Object>> pluginScripts() {
return Collections.singletonMap("return true", (vars) -> true);
return Map.of("return true", (vars) -> true, "score", (vars) -> 0f);
}

@Override
public String pluginScriptLang() {
return Script.DEFAULT_SCRIPT_LANG;
}
}

public static class CustomQueriesPlugin extends Plugin implements SearchPlugin {
@Override
public List<QuerySpec<?>> getQueries() {
return Collections.singletonList(
new QuerySpec<QueryBuilder>(
CustomParserQueryBuilder.NAME,
CustomParserQueryBuilder::new,
CustomParserQueryBuilder::fromXContent
)
);
}
}

public static final class CustomParserQueryBuilder extends AbstractQueryBuilder<CustomParserQueryBuilder> {
private static final String NAME = "CUSTOM";

CustomParserQueryBuilder() {}

CustomParserQueryBuilder(StreamInput in) throws IOException {
super(in);
}

@Override
protected void doWriteTo(StreamOutput out) {
// only the superclass has state
}

@Override
protected Query doToQuery(SearchExecutionContext context) {
return new DummyQueryParserPlugin.DummyQuery();
}

@Override
protected int doHashCode() {
return 0;
}

@Override
protected boolean doEquals(CustomParserQueryBuilder other) {
return true;
}

@Override
public TransportVersion getMinimalSupportedVersion() {
return TransportVersions.ZERO;
}

@Override
public String getWriteableName() {
return NAME;
}

@Override
protected void doXContent(XContentBuilder builder, Params params) throws IOException {
builder.startObject(NAME);
builder.array("list", "value0", "value1", "value2");
builder.array("listOrdered", "value0", "value1", "value2");
builder.field("map");
builder.map(Map.of("key1", "value1", "key2", "value2"));
builder.field("mapOrdered");
builder.map(Map.of("key3", "value3", "key4", "value4"));
builder.field("mapStrings");
builder.map(Map.of("key5", "value5", "key6", "value6"));
builder.field("mapSupplier");
builder.map(Map.of("key7", "value7", "key8", "value8"));
builder.endObject();
}

public static CustomParserQueryBuilder fromXContent(XContentParser parser) throws IOException {
{
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals("list", parser.currentName());
List<Object> list = parser.list();
assertEquals(3, list.size());
for (int i = 0; i < 3; i++) {
assertEquals("value" + i, list.get(i).toString());
}
assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken());
}
{
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals("listOrdered", parser.currentName());
List<Object> listOrdered = parser.listOrderedMap();
assertEquals(3, listOrdered.size());
for (int i = 0; i < 3; i++) {
assertEquals("value" + i, listOrdered.get(i).toString());
}
assertEquals(XContentParser.Token.END_ARRAY, parser.currentToken());
}
{
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals("map", parser.currentName());
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
Map<String, Object> map = parser.map();
assertEquals(2, map.size());
assertEquals("value1", map.get("key1").toString());
assertEquals("value2", map.get("key2").toString());
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
}
{
assertEquals("mapOrdered", parser.currentName());
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
Map<String, Object> mapOrdered = parser.mapOrdered();
assertEquals(2, mapOrdered.size());
assertEquals("value3", mapOrdered.get("key3").toString());
assertEquals("value4", mapOrdered.get("key4").toString());
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
}
{
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals("mapStrings", parser.currentName());
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
Map<String, Object> mapStrings = parser.map();
assertEquals(2, mapStrings.size());
assertEquals("value5", mapStrings.get("key5").toString());
assertEquals("value6", mapStrings.get("key6").toString());
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
}
{
assertEquals(XContentParser.Token.FIELD_NAME, parser.nextToken());
assertEquals("mapSupplier", parser.currentName());
assertEquals(XContentParser.Token.START_OBJECT, parser.nextToken());
Map<String, Object> mapSupplier = parser.map(HashMap::new, XContentParser::text);
assertEquals(2, mapSupplier.size());
assertEquals("value7", mapSupplier.get("key7").toString());
assertEquals("value8", mapSupplier.get("key8").toString());
assertEquals(XContentParser.Token.END_OBJECT, parser.currentToken());
}

assertEquals(XContentParser.Token.END_OBJECT, parser.nextToken());
return new CustomParserQueryBuilder();
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -172,34 +172,60 @@ protected XContentParser delegate() {
return parsers.peek();
}

/*
The following methods (map* and list*) are known not be called by DocumentParser when parsing documents, but we support indexing
percolator queries which are also parsed through DocumentParser, and their parsing code is completely up to each query, which are
also pluggable. That means that this parser needs to fully support parsing arbitrary content, when dots expansion is turned off.
We do throw UnsupportedOperationException when dots expansion is enabled as we don't expect such methods to be ever called in
those circumstances.
*/

@Override
public Map<String, Object> map() throws IOException {
if (contentPath.isWithinLeafObject()) {
return super.map();
}
throw new UnsupportedOperationException();
}

@Override
public Map<String, Object> mapOrdered() throws IOException {
if (contentPath.isWithinLeafObject()) {
return super.mapOrdered();
}
throw new UnsupportedOperationException();
}

@Override
public Map<String, String> mapStrings() throws IOException {
if (contentPath.isWithinLeafObject()) {
return super.mapStrings();
}
throw new UnsupportedOperationException();
}

@Override
public <T> Map<String, T> map(Supplier<Map<String, T>> mapFactory, CheckedFunction<XContentParser, T, IOException> mapValueParser)
throws IOException {
if (contentPath.isWithinLeafObject()) {
return super.map(mapFactory, mapValueParser);
}
throw new UnsupportedOperationException();
}

@Override
public List<Object> list() throws IOException {
if (contentPath.isWithinLeafObject()) {
return super.list();
}
throw new UnsupportedOperationException();
}

@Override
public List<Object> listOrderedMap() throws IOException {
if (contentPath.isWithinLeafObject()) {
return super.listOrderedMap();
}
throw new UnsupportedOperationException();
}
}
Expand Down
Loading

0 comments on commit 748446e

Please sign in to comment.