Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

MetaData Builder doesn't properly prevent an alias with the same name as an index #26804

Merged
merged 2 commits into from
Sep 29, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
83 changes: 50 additions & 33 deletions core/src/main/java/org/elasticsearch/cluster/metadata/MetaData.java
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,6 @@
import com.carrotsearch.hppc.ObjectHashSet;
import com.carrotsearch.hppc.cursors.ObjectCursor;
import com.carrotsearch.hppc.cursors.ObjectObjectCursor;

import org.apache.logging.log4j.Logger;
import org.apache.lucene.util.CollectionUtil;
import org.elasticsearch.cluster.Diff;
Expand All @@ -33,6 +32,7 @@
import org.elasticsearch.cluster.block.ClusterBlock;
import org.elasticsearch.cluster.block.ClusterBlockLevel;
import org.elasticsearch.common.Nullable;
import org.elasticsearch.common.Strings;
import org.elasticsearch.common.UUIDs;
import org.elasticsearch.common.collect.HppcMaps;
import org.elasticsearch.common.collect.ImmutableOpenMap;
Expand Down Expand Up @@ -62,9 +62,11 @@
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import java.util.TreeMap;

Expand Down Expand Up @@ -914,55 +916,70 @@ public MetaData build() {
// while these datastructures aren't even used.
// 2) The aliasAndIndexLookup can be updated instead of rebuilding it all the time.

// build all concrete indices arrays:
// TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices.
// When doing an operation across all indices, most of the time is spent on actually going to all shards and
// do the required operations, the bottleneck isn't resolving expressions into concrete indices.
List<String> allIndicesLst = new ArrayList<>();
final Set<String> allIndices = new HashSet<>(indices.size());
final List<String> allOpenIndices = new ArrayList<>();
final List<String> allClosedIndices = new ArrayList<>();
final Set<String> duplicateAliasesIndices = new HashSet<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
allIndicesLst.add(cursor.value.getIndex().getName());
}
String[] allIndices = allIndicesLst.toArray(new String[allIndicesLst.size()]);

List<String> allOpenIndicesLst = new ArrayList<>();
List<String> allClosedIndicesLst = new ArrayList<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
IndexMetaData indexMetaData = cursor.value;
final IndexMetaData indexMetaData = cursor.value;
final String name = indexMetaData.getIndex().getName();
boolean added = allIndices.add(name);
assert added : "double index named [" + name + "]";
if (indexMetaData.getState() == IndexMetaData.State.OPEN) {
allOpenIndicesLst.add(indexMetaData.getIndex().getName());
allOpenIndices.add(indexMetaData.getIndex().getName());
} else if (indexMetaData.getState() == IndexMetaData.State.CLOSE) {
allClosedIndicesLst.add(indexMetaData.getIndex().getName());
allClosedIndices.add(indexMetaData.getIndex().getName());
}
indexMetaData.getAliases().keysIt().forEachRemaining(duplicateAliasesIndices::add);
}
duplicateAliasesIndices.retainAll(allIndices);
if (duplicateAliasesIndices.isEmpty() == false) {
// iterate again and constructs a helpful message
ArrayList<String> duplicates = new ArrayList<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
for (String alias: duplicateAliasesIndices) {
if (cursor.value.getAliases().containsKey(alias)) {
duplicates.add(alias + " (alias of " + cursor.value.getIndex() + ")");
}
}
}
assert duplicates.size() > 0;
throw new IllegalStateException("index and alias names need to be unique, but the following duplicates were found ["
+ Strings.collectionToCommaDelimitedString(duplicates)+ "]");

}
String[] allOpenIndices = allOpenIndicesLst.toArray(new String[allOpenIndicesLst.size()]);
String[] allClosedIndices = allClosedIndicesLst.toArray(new String[allClosedIndicesLst.size()]);

// build all indices map
SortedMap<String, AliasOrIndex> aliasAndIndexLookup = new TreeMap<>();
for (ObjectCursor<IndexMetaData> cursor : indices.values()) {
IndexMetaData indexMetaData = cursor.value;
aliasAndIndexLookup.put(indexMetaData.getIndex().getName(), new AliasOrIndex.Index(indexMetaData));
AliasOrIndex existing = aliasAndIndexLookup.put(indexMetaData.getIndex().getName(), new AliasOrIndex.Index(indexMetaData));
assert existing == null : "duplicate for " + indexMetaData.getIndex();

for (ObjectObjectCursor<String, AliasMetaData> aliasCursor : indexMetaData.getAliases()) {
AliasMetaData aliasMetaData = aliasCursor.value;
AliasOrIndex aliasOrIndex = aliasAndIndexLookup.get(aliasMetaData.getAlias());
if (aliasOrIndex == null) {
aliasOrIndex = new AliasOrIndex.Alias(aliasMetaData, indexMetaData);
aliasAndIndexLookup.put(aliasMetaData.getAlias(), aliasOrIndex);
} else if (aliasOrIndex instanceof AliasOrIndex.Alias) {
AliasOrIndex.Alias alias = (AliasOrIndex.Alias) aliasOrIndex;
alias.addIndex(indexMetaData);
} else if (aliasOrIndex instanceof AliasOrIndex.Index) {
AliasOrIndex.Index index = (AliasOrIndex.Index) aliasOrIndex;
throw new IllegalStateException("index and alias names need to be unique, but alias [" + aliasMetaData.getAlias() + "] and index " + index.getIndex().getIndex() + " have the same name");
} else {
throw new IllegalStateException("unexpected alias [" + aliasMetaData.getAlias() + "][" + aliasOrIndex + "]");
}
aliasAndIndexLookup.compute(aliasMetaData.getAlias(), (aliasName, alias) -> {
if (alias == null) {
return new AliasOrIndex.Alias(aliasMetaData, indexMetaData);
} else {
assert alias instanceof AliasOrIndex.Alias : alias.getClass().getName();
((AliasOrIndex.Alias) alias).addIndex(indexMetaData);
return alias;
}
});
}
}
aliasAndIndexLookup = Collections.unmodifiableSortedMap(aliasAndIndexLookup);
// build all concrete indices arrays:
// TODO: I think we can remove these arrays. it isn't worth the effort, for operations on all indices.
// When doing an operation across all indices, most of the time is spent on actually going to all shards and
// do the required operations, the bottleneck isn't resolving expressions into concrete indices.
String[] allIndicesArray = allIndices.toArray(new String[allIndices.size()]);
String[] allOpenIndicesArray = allOpenIndices.toArray(new String[allOpenIndices.size()]);
String[] allClosedIndicesArray = allClosedIndices.toArray(new String[allClosedIndices.size()]);

return new MetaData(clusterUUID, version, transientSettings, persistentSettings, indices.build(), templates.build(),
customs.build(), allIndices, allOpenIndices, allClosedIndices, aliasAndIndexLookup);
customs.build(), allIndicesArray, allOpenIndicesArray, allClosedIndicesArray, aliasAndIndexLookup);
}

public static String toXContent(MetaData metaData) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,6 @@
import org.elasticsearch.common.io.stream.BytesStreamOutput;
import org.elasticsearch.common.io.stream.NamedWriteableAwareStreamInput;
import org.elasticsearch.common.io.stream.NamedWriteableRegistry;
import org.elasticsearch.common.io.stream.StreamInput;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.xcontent.ToXContent;
import org.elasticsearch.common.xcontent.XContentBuilder;
Expand All @@ -36,9 +35,14 @@
import org.elasticsearch.test.ESTestCase;

import java.io.IOException;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Set;

import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
import static org.hamcrest.Matchers.startsWith;

public class MetaDataTests extends ESTestCase {

Expand All @@ -52,7 +56,42 @@ public void testIndexAndAliasWithSameName() {
MetaData.builder().put(builder).build();
fail("exception should have been thrown");
} catch (IllegalStateException e) {
assertThat(e.getMessage(), equalTo("index and alias names need to be unique, but alias [index] and index [index] have the same name"));
assertThat(e.getMessage(), equalTo("index and alias names need to be unique, but the following duplicates were found [index (alias of [index])]"));
}
}

public void testAliasCollidingWithAnExistingIndex() {
int indexCount = randomIntBetween(10, 100);
Set<String> indices = new HashSet<>(indexCount);
for (int i = 0; i < indexCount; i++) {
indices.add(randomAlphaOfLength(10));
}
Map<String, Set<String>> aliasToIndices = new HashMap<>();
for (String alias: randomSubsetOf(randomIntBetween(1, 10), indices)) {
aliasToIndices.put(alias, new HashSet<>(randomSubsetOf(randomIntBetween(1, 3), indices)));
}
int properAliases = randomIntBetween(0, 3);
for (int i = 0; i < properAliases; i++) {
aliasToIndices.put(randomAlphaOfLength(5), new HashSet<>(randomSubsetOf(randomIntBetween(1, 3), indices)));
}
MetaData.Builder metaDataBuilder = MetaData.builder();
for (String index : indices) {
IndexMetaData.Builder indexBuilder = IndexMetaData.builder(index)
.settings(Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT))
.numberOfShards(1)
.numberOfReplicas(0);
aliasToIndices.forEach((key, value) -> {
if (value.contains(index)) {
indexBuilder.putAlias(AliasMetaData.builder(key).build());
}
});
metaDataBuilder.put(indexBuilder);
}
try {
metaDataBuilder.build();
fail("exception should have been thrown");
} catch (IllegalStateException e) {
assertThat(e.getMessage(), startsWith("index and alias names need to be unique"));
}
}

Expand Down