Skip to content

Commit

Permalink
High-level client: fix clusterAlias parsing in SearchHit (#32465)
Browse files Browse the repository at this point in the history
    When using cross-cluster search through the high-level REST client, the cluster alias from each search hit was not parsed correctly. It would be part of the index field initially, but overridden just a few lines later once setting the shard target (in case we have enough info to build it from the response). In any case, getClusterAlias returns `null` which is a bug.

    With this change we rather parse back clusterAliases from the index name, set its corresponding field and properly handle the two possible cases depending on whether we can or cannot build the shard target object.

This also includes the test changes made in #32362 to make the backport to 6.3 easier.
  • Loading branch information
javanna committed Jul 31, 2018
1 parent 0951b95 commit 8471cc4
Show file tree
Hide file tree
Showing 8 changed files with 111 additions and 96 deletions.
12 changes: 8 additions & 4 deletions server/src/main/java/org/elasticsearch/index/get/GetResult.java
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@
package org.elasticsearch.index.get;

import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.compress.CompressorFactory;
import org.elasticsearch.common.document.DocumentField;
Expand Down Expand Up @@ -226,10 +227,8 @@ public XContentBuilder toXContentEmbedded(XContentBuilder builder, Params params
}
}
}

for (DocumentField field : metaFields) {
Object value = field.getValue();
builder.field(field.getName(), value);
builder.field(field.getName(), field.<Object>getValue());
}

builder.field(FOUND, exists);
Expand Down Expand Up @@ -399,7 +398,12 @@ public boolean equals(Object o) {

@Override
public int hashCode() {
return Objects.hash(index, type, id, version, exists, fields, sourceAsMap());
return Objects.hash(version, exists, index, type, id, fields, sourceAsMap());
}

@Override
public String toString() {
return Strings.toString(this, true, true);
}
}

75 changes: 48 additions & 27 deletions server/src/main/java/org/elasticsearch/search/SearchHit.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,16 @@

package org.elasticsearch.search;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;

import org.apache.lucene.search.Explanation;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.action.OriginalIndices;
Expand Down Expand Up @@ -51,16 +61,6 @@
import org.elasticsearch.search.suggest.completion.CompletionSuggestion;
import org.elasticsearch.transport.RemoteClusterAware;

import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Objects;

import static java.util.Collections.emptyMap;
import static java.util.Collections.singletonMap;
import static java.util.Collections.unmodifiableMap;
Expand Down Expand Up @@ -107,6 +107,9 @@ public final class SearchHit implements Streamable, ToXContentObject, Iterable<D
@Nullable
private SearchShardTarget shard;

//These two fields normally get set when setting the shard target, so they hold the same values as the target thus don't get
//serialized over the wire. When parsing hits back from xcontent though, in most of the cases (whenever explanation is disabled)
//we can't rebuild the shard target object so we need to set these manually for users retrieval.
private transient String index;
private transient String clusterAlias;

Expand Down Expand Up @@ -546,7 +549,26 @@ public static SearchHit createFromMap(Map<String, Object> values) {
Map<String, DocumentField> fields = get(Fields.FIELDS, values, Collections.emptyMap());

SearchHit searchHit = new SearchHit(-1, id, type, nestedIdentity, fields);
searchHit.index = get(Fields._INDEX, values, null);
String index = get(Fields._INDEX, values, null);
String clusterAlias = null;
if (index != null) {
int indexOf = index.indexOf(RemoteClusterAware.REMOTE_CLUSTER_INDEX_SEPARATOR);
if (indexOf > 0) {
clusterAlias = index.substring(0, indexOf);
index = index.substring(indexOf + 1);
}
}
ShardId shardId = get(Fields._SHARD, values, null);
String nodeId = get(Fields._NODE, values, null);
if (shardId != null && nodeId != null) {
assert shardId.getIndexName().equals(index);
searchHit.shard(new SearchShardTarget(nodeId, shardId, clusterAlias, OriginalIndices.NONE));
} else {
//these fields get set anyways when setting the shard target,
//but we set them explicitly when we don't have enough info to rebuild the shard target
searchHit.index = index;
searchHit.clusterAlias = clusterAlias;
}
searchHit.score(get(Fields._SCORE, values, DEFAULT_SCORE));
searchHit.version(get(Fields._VERSION, values, -1L));
searchHit.sortValues(get(Fields.SORT, values, SearchSortValues.EMPTY));
Expand All @@ -556,12 +578,7 @@ public static SearchHit createFromMap(Map<String, Object> values) {
searchHit.setInnerHits(get(Fields.INNER_HITS, values, null));
List<String> matchedQueries = get(Fields.MATCHED_QUERIES, values, null);
if (matchedQueries != null) {
searchHit.matchedQueries(matchedQueries.toArray(new String[matchedQueries.size()]));
}
ShardId shardId = get(Fields._SHARD, values, null);
String nodeId = get(Fields._NODE, values, null);
if (shardId != null && nodeId != null) {
searchHit.shard(new SearchShardTarget(nodeId, shardId, null, OriginalIndices.NONE));
searchHit.matchedQueries(matchedQueries.toArray(new String[0]));
}
return searchHit;
}
Expand Down Expand Up @@ -598,15 +615,12 @@ private static void declareMetaDataFields(ObjectParser<Map<String, Object>, Void
if (metadatafield.equals(Fields._ID) == false && metadatafield.equals(Fields._INDEX) == false
&& metadatafield.equals(Fields._TYPE) == false) {
parser.declareField((map, field) -> {
@SuppressWarnings("unchecked")
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS,
@SuppressWarnings("unchecked")
Map<String, DocumentField> fieldMap = (Map<String, DocumentField>) map.computeIfAbsent(Fields.FIELDS,
v -> new HashMap<String, DocumentField>());
fieldMap.put(field.getName(), field);
}, (p, c) -> {
List<Object> values = new ArrayList<>();
values.add(parseFieldsValue(p));
return new DocumentField(metadatafield, values);
}, new ParseField(metadatafield), ValueType.VALUE);
fieldMap.put(field.getName(), field);
}, (p, c) -> new DocumentField(metadatafield, Collections.singletonList(parseFieldsValue(p))),
new ParseField(metadatafield), ValueType.VALUE);
}
}
}
Expand Down Expand Up @@ -829,13 +843,15 @@ public boolean equals(Object obj) {
&& Arrays.equals(matchedQueries, other.matchedQueries)
&& Objects.equals(explanation, other.explanation)
&& Objects.equals(shard, other.shard)
&& Objects.equals(innerHits, other.innerHits);
&& Objects.equals(innerHits, other.innerHits)
&& Objects.equals(index, other.index)
&& Objects.equals(clusterAlias, other.clusterAlias);
}

@Override
public int hashCode() {
return Objects.hash(id, type, nestedIdentity, version, source, fields, getHighlightFields(), Arrays.hashCode(matchedQueries),
explanation, shard, innerHits);
explanation, shard, innerHits, index, clusterAlias);
}

/**
Expand Down Expand Up @@ -953,4 +969,9 @@ public int hashCode() {
return Objects.hash(field, offset, child);
}
}

@Override
public String toString() {
return Strings.toString(this, true, true);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,8 @@

package org.elasticsearch.search;

import java.io.IOException;

import org.elasticsearch.Version;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.Nullable;
Expand All @@ -30,16 +32,14 @@
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.transport.RemoteClusterAware;

import java.io.IOException;

/**
* The target that the search request was executed on.
*/
public final class SearchShardTarget implements Writeable, Comparable<SearchShardTarget> {

private final Text nodeId;
private final ShardId shardId;
//original indices and cluster alias are only needed in the coordinating node throughout the search request execution.
//original indices are only needed in the coordinating node throughout the search request execution.
//no need to serialize them as part of SearchShardTarget.
private final transient OriginalIndices originalIndices;
private final String clusterAlias;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,9 +26,10 @@
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.index.mapper.ParentFieldMapper;
import org.elasticsearch.index.mapper.RoutingFieldMapper;
import org.elasticsearch.index.mapper.UidFieldMapper;
import org.elasticsearch.index.mapper.IdFieldMapper;
import org.elasticsearch.index.mapper.IndexFieldMapper;
import org.elasticsearch.index.mapper.MapperService;
import org.elasticsearch.index.mapper.TypeFieldMapper;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects;

Expand Down Expand Up @@ -100,14 +101,17 @@ private static DocumentField mutateDocumentField(DocumentField documentField) {

public static Tuple<DocumentField, DocumentField> randomDocumentField(XContentType xContentType) {
if (randomBoolean()) {
String fieldName = randomFrom(ParentFieldMapper.NAME, RoutingFieldMapper.NAME, UidFieldMapper.NAME);
DocumentField documentField = new DocumentField(fieldName, Collections.singletonList(randomAlphaOfLengthBetween(3, 10)));
String metaField = randomValueOtherThanMany(field -> field.equals(TypeFieldMapper.NAME)
|| field.equals(IndexFieldMapper.NAME) || field.equals(IdFieldMapper.NAME),
() -> randomFrom(MapperService.getAllMetaFields()));
DocumentField documentField = new DocumentField(metaField, Collections.singletonList(randomAlphaOfLengthBetween(3, 10)));
return Tuple.tuple(documentField, documentField);
} else {
String fieldName = randomAlphaOfLengthBetween(3, 10);
Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType);
DocumentField input = new DocumentField(fieldName, tuple.v1());
DocumentField expected = new DocumentField(fieldName, tuple.v2());
return Tuple.tuple(input, expected);
}
String fieldName = randomAlphaOfLengthBetween(3, 10);
Tuple<List<Object>, List<Object>> tuple = RandomObjects.randomStoredFieldValues(random(), xContentType);
DocumentField input = new DocumentField(fieldName, tuple.v1());
DocumentField expected = new DocumentField(fieldName, tuple.v2());
return Tuple.tuple(input, expected);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,6 @@ public void testToAndFromXContentEmbedded() throws Exception {
XContentType xContentType = randomFrom(XContentType.values());
Tuple<GetResult, GetResult> tuple = randomGetResult(xContentType);
GetResult getResult = tuple.v1();

// We don't expect to retrieve the index/type/id of the GetResult because they are not rendered
// by the toXContentEmbedded method.
GetResult expectedGetResult = new GetResult(null, null, null, -1,
Expand All @@ -106,7 +105,6 @@ public void testToAndFromXContentEmbedded() throws Exception {
parsedEmbeddedGetResult = GetResult.fromXContentEmbedded(parser);
assertNull(parser.nextToken());
}

assertEquals(expectedGetResult, parsedEmbeddedGetResult);
//print the parsed object out and test that the output is the same as the original output
BytesReference finalBytes = toXContentEmbedded(parsedEmbeddedGetResult, xContentType, humanReadable);
Expand Down Expand Up @@ -203,16 +201,17 @@ public static Tuple<GetResult, GetResult> randomGetResult(XContentType xContentT
return Tuple.tuple(getResult, expectedGetResult);
}

private static Tuple<Map<String, DocumentField>,Map<String, DocumentField>> randomDocumentFields(XContentType xContentType) {
public static Tuple<Map<String, DocumentField>,Map<String, DocumentField>> randomDocumentFields(XContentType xContentType) {
int numFields = randomIntBetween(2, 10);
Map<String, DocumentField> fields = new HashMap<>(numFields);
Map<String, DocumentField> expectedFields = new HashMap<>(numFields);
for (int i = 0; i < numFields; i++) {
while (fields.size() < numFields) {
Tuple<DocumentField, DocumentField> tuple = randomDocumentField(xContentType);
DocumentField getField = tuple.v1();
DocumentField expectedGetField = tuple.v2();
fields.put(getField.getName(), getField);
expectedFields.put(expectedGetField.getName(), expectedGetField);
if (fields.putIfAbsent(getField.getName(), getField) == null) {
assertNull(expectedFields.putIfAbsent(expectedGetField.getName(), expectedGetField));
}
}
return Tuple.tuple(fields, expectedFields);
}
Expand Down
45 changes: 16 additions & 29 deletions server/src/test/java/org/elasticsearch/search/SearchHitTests.java
Original file line number Diff line number Diff line change
Expand Up @@ -19,40 +19,38 @@

package org.elasticsearch.search;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.function.Predicate;

import org.apache.lucene.search.Explanation;
import org.elasticsearch.action.OriginalIndices;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.bytes.BytesArray;
import org.elasticsearch.common.bytes.BytesReference;
import org.elasticsearch.common.collect.Tuple;
import org.elasticsearch.common.document.DocumentField;
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.InputStreamStreamInput;
import org.elasticsearch.common.text.Text;
import org.elasticsearch.common.util.set.Sets;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
import org.elasticsearch.common.xcontent.XContentParser;
import org.elasticsearch.common.xcontent.XContentType;
import org.elasticsearch.common.xcontent.json.JsonXContent;
import org.elasticsearch.index.Index;
import org.elasticsearch.index.get.GetResultTests;
import org.elasticsearch.index.shard.ShardId;
import org.elasticsearch.search.SearchHit.NestedIdentity;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightField;
import org.elasticsearch.search.fetch.subphase.highlight.HighlightFieldTests;
import org.elasticsearch.test.ESTestCase;
import org.elasticsearch.test.RandomObjects;

import java.io.IOException;
import java.io.InputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.Predicate;

import static org.elasticsearch.common.xcontent.XContentHelper.toXContent;
import static org.elasticsearch.test.XContentTestUtils.insertRandomFields;
import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent;
Expand All @@ -63,8 +61,6 @@

public class SearchHitTests extends ESTestCase {

private static Set<String> META_FIELDS = Sets.newHashSet("_uid", "_all", "_parent", "_routing", "_size", "_timestamp", "_ttl");

public static SearchHit createTestItem(boolean withOptionalInnerHits) {
int internalId = randomInt();
String uid = randomAlphaOfLength(10);
Expand All @@ -75,18 +71,7 @@ public static SearchHit createTestItem(boolean withOptionalInnerHits) {
}
Map<String, DocumentField> fields = new HashMap<>();
if (randomBoolean()) {
int size = randomIntBetween(0, 10);
for (int i = 0; i < size; i++) {
Tuple<List<Object>, List<Object>> values = RandomObjects.randomStoredFieldValues(random(),
XContentType.JSON);
if (randomBoolean()) {
String metaField = randomFrom(META_FIELDS);
fields.put(metaField, new DocumentField(metaField, values.v1()));
} else {
String fieldName = randomAlphaOfLengthBetween(5, 10);
fields.put(fieldName, new DocumentField(fieldName, values.v1()));
}
}
fields = GetResultTests.randomDocumentFields(XContentType.JSON).v1();
}
SearchHit hit = new SearchHit(internalId, uid, type, nestedIdentity, fields);
if (frequently()) {
Expand All @@ -109,7 +94,8 @@ public static SearchHit createTestItem(boolean withOptionalInnerHits) {
int size = randomIntBetween(0, 5);
Map<String, HighlightField> highlightFields = new HashMap<>(size);
for (int i = 0; i < size; i++) {
highlightFields.put(randomAlphaOfLength(5), HighlightFieldTests.createTestItem());
HighlightField testItem = HighlightFieldTests.createTestItem();
highlightFields.put(testItem.getName(), testItem);
}
hit.highlightFields(highlightFields);
}
Expand All @@ -133,9 +119,10 @@ public static SearchHit createTestItem(boolean withOptionalInnerHits) {
hit.setInnerHits(innerHits);
}
if (randomBoolean()) {
String index = randomAlphaOfLengthBetween(5, 10);
String clusterAlias = randomBoolean() ? null : randomAlphaOfLengthBetween(5, 10);
hit.shard(new SearchShardTarget(randomAlphaOfLengthBetween(5, 10),
new ShardId(new Index(randomAlphaOfLengthBetween(5, 10), randomAlphaOfLengthBetween(5, 10)), randomInt()), null,
OriginalIndices.NONE));
new ShardId(new Index(index, randomAlphaOfLengthBetween(5, 10)), randomInt()), clusterAlias, OriginalIndices.NONE));
}
return hit;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,13 +46,13 @@ public static SearchSortValues createTestItem() {
List<Supplier<Object>> valueSuppliers = new ArrayList<>();
// this should reflect all values that are allowed to go through the transport layer
valueSuppliers.add(() -> null);
valueSuppliers.add(() -> randomInt());
valueSuppliers.add(() -> randomLong());
valueSuppliers.add(() -> randomDouble());
valueSuppliers.add(() -> randomFloat());
valueSuppliers.add(() -> randomByte());
valueSuppliers.add(() -> randomShort());
valueSuppliers.add(() -> randomBoolean());
valueSuppliers.add(ESTestCase::randomInt);
valueSuppliers.add(ESTestCase::randomLong);
valueSuppliers.add(ESTestCase::randomDouble);
valueSuppliers.add(ESTestCase::randomFloat);
valueSuppliers.add(ESTestCase::randomByte);
valueSuppliers.add(ESTestCase::randomShort);
valueSuppliers.add(ESTestCase::randomBoolean);
valueSuppliers.add(() -> frequently() ? randomAlphaOfLengthBetween(1, 30) : randomRealisticUnicodeOfCodepointLength(30));

int size = randomIntBetween(1, 20);
Expand Down
Loading

0 comments on commit 8471cc4

Please sign in to comment.