Skip to content

Commit

Permalink
Fix test compilation errors
Browse files Browse the repository at this point in the history
  • Loading branch information
kderusso committed Dec 2, 2024
1 parent 6197541 commit c8a576c
Show file tree
Hide file tree
Showing 7 changed files with 28 additions and 24 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -42,6 +42,10 @@ public final class InferenceFieldMetadata implements SimpleDiffable<InferenceFie
private final String searchInferenceId;
private final String[] sourceFields;

public InferenceFieldMetadata(String name, String inferenceId, String[] sourceFields) {
this(name, inferenceId, inferenceId, sourceFields);
}

public InferenceFieldMetadata(String name, String inferenceId, String searchInferenceId, String[] sourceFields) {
this.name = Objects.requireNonNull(name);
this.inferenceId = Objects.requireNonNull(inferenceId);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -693,8 +693,8 @@ private static InferenceFieldMetadata randomInferenceFieldMetadata(String name)
return new InferenceFieldMetadata(
name,
randomIdentifier(),
randomSet(1, 5, ESTestCase::randomIdentifier).toArray(String[]::new),
randomIdentifier()
randomIdentifier(),
randomSet(1, 5, ESTestCase::randomIdentifier).toArray(String[]::new)
);
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -63,15 +63,13 @@ private static InferenceFieldMetadata createTestItem() {
String inferenceId = randomIdentifier();
String searchInferenceId = randomIdentifier();
String[] inputFields = generateRandomStringArray(5, 10, false, false);
String queryName = randomIdentifier();
return new InferenceFieldMetadata(name, inferenceId, searchInferenceId, inputFields, queryName);
return new InferenceFieldMetadata(name, inferenceId, searchInferenceId, inputFields);
}

public void testNullCtorArgsThrowException() {
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata(null, "inferenceId", "searchInferenceId", new String[0], "queryName"));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", null, "searchInferenceId", new String[0], "queryName"));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", null, new String[0], "queryName"));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", "searchInferenceId", null, "queryName"));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", "searchInferenceId", new String[0], null));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata(null, "inferenceId", "searchInferenceId", new String[0]));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", null, "searchInferenceId", new String[0]));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", null, new String[0]));
assertThrows(NullPointerException.class, () -> new InferenceFieldMetadata("name", "inferenceId", "searchInferenceId", null));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@
import org.elasticsearch.plugins.MapperPlugin;
import org.elasticsearch.plugins.Plugin;

import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
Expand Down Expand Up @@ -47,12 +46,10 @@ public void testInferenceFieldMapper() throws Exception {
InferenceFieldMetadata inferenceFieldMetadata = inferenceFieldMetadataMap.get("inference_field");
assertThat(inferenceFieldMetadata.getInferenceId(), equalTo(TestInferenceFieldMapper.INFERENCE_ID));
assertThat(inferenceFieldMetadata.getSourceFields(), arrayContaining("inference_field"));
assertThat(inferenceFieldMetadata.getQueryName(), equalTo(TestInferenceFieldMapper.QUERY_NAME));

inferenceFieldMetadata = inferenceFieldMetadataMap.get("another_inference_field");
assertThat(inferenceFieldMetadata.getInferenceId(), equalTo(TestInferenceFieldMapper.INFERENCE_ID));
assertThat(inferenceFieldMetadata.getSourceFields(), arrayContaining("another_inference_field"));
assertThat(inferenceFieldMetadata.getQueryName(), equalTo(TestInferenceFieldMapper.QUERY_NAME));
}

public void testInferenceFieldMapperWithCopyTo() throws Exception {
Expand Down Expand Up @@ -82,7 +79,6 @@ public void testInferenceFieldMapperWithCopyTo() throws Exception {
inferenceFieldMetadata.getSourceFields(),
arrayContainingInAnyOrder("another_non_inference_field", "inference_field", "non_inference_field")
);
assertThat(inferenceFieldMetadata.getQueryName(), equalTo(TestInferenceFieldMapper.QUERY_NAME));
}

private static class TestInferenceFieldMapperPlugin extends Plugin implements MapperPlugin {
Expand All @@ -97,16 +93,16 @@ private static class TestInferenceFieldMapper extends FieldMapper implements Inf

public static final TypeParser PARSER = new TypeParser((n, c) -> new Builder(n));
public static final String INFERENCE_ID = "test_inference_id";
public static final String SEARCH_INFERENCE_ID = "test_search_inference_id";
public static final String CONTENT_TYPE = "test_inference_field";
public static final String QUERY_NAME = "test_query_name";

TestInferenceFieldMapper(String simpleName) {
super(simpleName, new TestInferenceFieldMapperFieldType(simpleName), BuilderParams.empty());
}

@Override
public InferenceFieldMetadata getMetadata(Set<String> sourcePaths) {
return new InferenceFieldMetadata(fullPath(), INFERENCE_ID, sourcePaths.toArray(new String[0]), QUERY_NAME);
return new InferenceFieldMetadata(fullPath(), INFERENCE_ID, SEARCH_INFERENCE_ID, sourcePaths.toArray(new String[0]));
}

@Override
Expand All @@ -115,7 +111,7 @@ public Object getOriginalValue(Map<String, Object> sourceAsMap) {
}

@Override
protected void parseCreateField(DocumentParserContext context) throws IOException {}
protected void parseCreateField(DocumentParserContext context) {}

@Override
public Builder getMergeBuilder() {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@ public void testGetTierPreference() {
null,
null,
null,
null,
null
);

Expand Down Expand Up @@ -79,6 +80,7 @@ public void testGetTierPreference() {
null,
null,
null,
null,
null
);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -102,7 +102,7 @@ public void testFilterNoop() throws Exception {
new BulkItemRequest[0]
);
request.setInferenceFieldMap(
Map.of("foo", new InferenceFieldMetadata("foo", "bar", generateRandomStringArray(5, 10, false, false), "baz"))
Map.of("foo", new InferenceFieldMetadata("foo", "bar", "baz", generateRandomStringArray(5, 10, false, false)))
);
filter.apply(task, TransportShardBulkAction.ACTION_NAME, request, actionListener, actionFilterChain);
awaitLatch(chainExecuted, 10, TimeUnit.SECONDS);
Expand Down Expand Up @@ -136,11 +136,11 @@ public void testInferenceNotFound() throws Exception {

Map<String, InferenceFieldMetadata> inferenceFieldMap = Map.of(
"field1",
new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" }, "queryName"),
new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" }),
"field2",
new InferenceFieldMetadata("field2", "inference_0", new String[] { "field2" }, "queryName"),
new InferenceFieldMetadata("field2", "inference_0", new String[] { "field2" }),
"field3",
new InferenceFieldMetadata("field3", "inference_0", new String[] { "field3" }, "queryName")
new InferenceFieldMetadata("field3", "inference_0", new String[] { "field3" })
);
BulkItemRequest[] items = new BulkItemRequest[10];
for (int i = 0; i < items.length; i++) {
Expand Down Expand Up @@ -194,7 +194,7 @@ public void testItemFailures() throws Exception {

Map<String, InferenceFieldMetadata> inferenceFieldMap = Map.of(
"field1",
new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" }, "queryName")
new InferenceFieldMetadata("field1", model.getInferenceEntityId(), new String[] { "field1" })
);
BulkItemRequest[] items = new BulkItemRequest[3];
items[0] = new BulkItemRequest(0, new IndexRequest("index").source("field1", "I am a failure"));
Expand All @@ -220,7 +220,7 @@ public void testManyRandomDocs() throws Exception {
for (int i = 0; i < numInferenceFields; i++) {
String field = randomAlphaOfLengthBetween(5, 10);
String inferenceId = randomFrom(inferenceModelMap.keySet());
inferenceFieldMap.put(field, new InferenceFieldMetadata(field, inferenceId, new String[] { field }, "queryName"));
inferenceFieldMap.put(field, new InferenceFieldMetadata(field, inferenceId, new String[] { field }));
}

int numRequests = atLeast(100);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,9 @@ public void testRetrieverExtractionErrors() throws IOException {
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> ssb.parseXContent(parser, true, nf -> true)
.rewrite(new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid"))))
.rewrite(
new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")), null)
)
);
assertEquals("[search_after] cannot be used in children of compound retrievers", iae.getMessage());
}
Expand All @@ -70,7 +72,9 @@ public void testRetrieverExtractionErrors() throws IOException {
IllegalArgumentException iae = expectThrows(
IllegalArgumentException.class,
() -> ssb.parseXContent(parser, true, nf -> true)
.rewrite(new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid"))))
.rewrite(
new QueryRewriteContext(parserConfig(), null, null, null, new PointInTimeBuilder(new BytesArray("pitid")), null)
)
);
assertEquals("[terminate_after] cannot be used in children of compound retrievers", iae.getMessage());
}
Expand Down

0 comments on commit c8a576c

Please sign in to comment.