Skip to content

Commit

Permalink
Merge branch '6.x' into ccr-6.x
Browse files Browse the repository at this point in the history
* 6.x:
  Update Tika version to 1.15
  Introduce templating support to timezone/locale in DateProcessor (#27089)
  Increase logging on qa:mixed-cluster tests
  Update to AWS SDK 1.11.223 (#27278)
  Improve error message for parse failures of completion fields (#27297)
  Remove optimisations to reuse objects when applying a new `ClusterState` (#27317)
  Decouple `ChannelFactory` from Tcp classes (#27286)
  Use PlainListenableActionFuture for CloseFuture (#26242)
  Fix find remote when building BWC
  Remove colons from task and configuration names
  Fix snapshot getting stuck in INIT state (#27214)
  Snapshot/Restore: better handle incorrect chunk_size settings in FS repo (#26844)
  Add unreleased 5.6.5 version number
  testCreateSplitIndexToN: do not set `routing_partition_size` to >= `number_of_routing_shards`
  Correct comment in index shard test
  Roll translog generation on primary promotion
  ObjectParser: Replace IllegalStateException with ParsingException (#27302)
  scripted_metric _agg parameter disappears if params are provided (#27159)
  Update discovery-ec2.asciidoc
  • Loading branch information
jasontedor committed Nov 9, 2017
2 parents 2a151ff + 0a50fca commit 536c2cc
Show file tree
Hide file tree
Showing 78 changed files with 987 additions and 300 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -239,7 +239,7 @@ class BuildPlugin implements Plugin<Project> {

/** Return the configuration name used for finding transitive deps of the given dependency. */
private static String transitiveDepConfigName(String groupId, String artifactId, String version) {
return "_transitive_${groupId}:${artifactId}:${version}"
return "_transitive_${groupId}_${artifactId}_${version}"
}

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -429,7 +429,7 @@ class ClusterFormationTasks {

Project pluginProject = plugin.getValue()
verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject)
String configurationName = "_plugin_${prefix}_${pluginProject.path}"
String configurationName = pluginConfigurationName(prefix, pluginProject)
Configuration configuration = project.configurations.findByName(configurationName)
if (configuration == null) {
configuration = project.configurations.create(configurationName)
Expand Down Expand Up @@ -458,13 +458,21 @@ class ClusterFormationTasks {
return copyPlugins
}

private static String pluginConfigurationName(final String prefix, final Project project) {
return "_plugin_${prefix}_${project.path}".replace(':', '_')
}

private static String pluginBwcConfigurationName(final String prefix, final Project project) {
return "_plugin_bwc_${prefix}_${project.path}".replace(':', '_')
}

/** Configures task to copy a plugin based on a zip file resolved using dependencies for an older version */
static Task configureCopyBwcPluginsTask(String name, Project project, Task setup, NodeInfo node, String prefix) {
Configuration bwcPlugins = project.configurations.getByName("${prefix}_elasticsearchBwcPlugins")
for (Map.Entry<String, Project> plugin : node.config.plugins.entrySet()) {
Project pluginProject = plugin.getValue()
verifyProjectHasBuildPlugin(name, node.nodeVersion, project, pluginProject)
String configurationName = "_plugin_bwc_${prefix}_${pluginProject.path}"
String configurationName = pluginBwcConfigurationName(prefix, pluginProject)
Configuration configuration = project.configurations.findByName(configurationName)
if (configuration == null) {
configuration = project.configurations.create(configurationName)
Expand Down Expand Up @@ -503,9 +511,9 @@ class ClusterFormationTasks {
static Task configureInstallPluginTask(String name, Project project, Task setup, NodeInfo node, Project plugin, String prefix) {
final FileCollection pluginZip;
if (node.nodeVersion != VersionProperties.elasticsearch) {
pluginZip = project.configurations.getByName("_plugin_bwc_${prefix}_${plugin.path}")
pluginZip = project.configurations.getByName(pluginBwcConfigurationName(prefix, plugin))
} else {
pluginZip = project.configurations.getByName("_plugin_${prefix}_${plugin.path}")
pluginZip = project.configurations.getByName(pluginConfigurationName(prefix, plugin))
}
// delay reading the file location until execution time by wrapping in a closure within a GString
final Object file = "${-> new File(node.pluginsTmpDir, pluginZip.singleFile.getName()).toURI().toURL().toString()}"
Expand Down
4 changes: 4 additions & 0 deletions core/src/main/java/org/elasticsearch/Version.java
Original file line number Diff line number Diff line change
Expand Up @@ -100,6 +100,8 @@ public class Version implements Comparable<Version> {
public static final Version V_5_6_3 = new Version(V_5_6_3_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
public static final int V_5_6_4_ID = 5060499;
public static final Version V_5_6_4 = new Version(V_5_6_4_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
public static final int V_5_6_5_ID = 5060599;
public static final Version V_5_6_5 = new Version(V_5_6_5_ID, org.apache.lucene.util.Version.LUCENE_6_6_1);
public static final int V_6_0_0_alpha1_ID = 6000001;
public static final Version V_6_0_0_alpha1 = new Version(V_6_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0);
public static final int V_6_0_0_alpha2_ID = 6000002;
Expand Down Expand Up @@ -148,6 +150,8 @@ public static Version fromId(int id) {
return V_6_0_0_alpha2;
case V_6_0_0_alpha1_ID:
return V_6_0_0_alpha1;
case V_5_6_5_ID:
return V_5_6_5;
case V_5_6_4_ID:
return V_5_6_4;
case V_5_6_3_ID:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ public interface ActionListener<Response> {
* Creates a listener that listens for a response (or failure) and executes the
* corresponding consumer when the response (or failure) is received.
*
* @param onResponse the consumer of the response, when the listener receives one
* @param onResponse the checked consumer of the response, when the listener receives one
* @param onFailure the consumer of the failure, when the listener receives one
* @param <Response> the type of the response
* @return a listener that listens for responses and invokes the consumer when received
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@ public class PlainListenableActionFuture<T> extends AdapterActionFuture<T, T> im
volatile Object listeners;
boolean executedListeners = false;

private PlainListenableActionFuture() {}
protected PlainListenableActionFuture() {}

/**
* This method returns a listenable future. The listeners will be called on completion of the future.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,9 @@ public void move(String source, String target) throws IOException {
Path targetPath = path.resolve(target);
// If the target file exists then Files.move() behaviour is implementation specific
// the existing file might be replaced or this method fails by throwing an IOException.
assert !Files.exists(targetPath);
if (Files.exists(targetPath)) {
throw new FileAlreadyExistsException("blob [" + targetPath + "] already exists, cannot overwrite");
}
Files.move(sourcePath, targetPath, StandardCopyOption.ATOMIC_MOVE);
IOUtils.fsync(path, true);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ public Value parse(XContentParser parser, Value value, Context context) throws I
} else {
token = parser.nextToken();
if (token != XContentParser.Token.START_OBJECT) {
throw new IllegalStateException("[" + name + "] Expected START_OBJECT but was: " + token);
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] Expected START_OBJECT but was: " + token);
}
}

Expand All @@ -159,13 +159,13 @@ public Value parse(XContentParser parser, Value value, Context context) throws I
fieldParser = getParser(currentFieldName);
} else {
if (currentFieldName == null) {
throw new IllegalStateException("[" + name + "] no field found");
throw new ParsingException(parser.getTokenLocation(), "[" + name + "] no field found");
}
if (fieldParser == null) {
assert ignoreUnknownFields : "this should only be possible if configured to ignore known fields";
parser.skipChildren(); // noop if parser points to a value, skips children if parser is start object or start array
} else {
fieldParser.assertSupports(name, token, currentFieldName);
fieldParser.assertSupports(name, token, currentFieldName, parser.getTokenLocation());
parseSub(parser, fieldParser, currentFieldName, value, context);
}
fieldParser = null;
Expand Down Expand Up @@ -330,7 +330,7 @@ private void parseSub(XContentParser parser, FieldParser fieldParser, String cur
case END_OBJECT:
case END_ARRAY:
case FIELD_NAME:
throw new IllegalStateException("[" + name + "]" + token + " is unexpected");
throw new ParsingException(parser.getTokenLocation(), "[" + name + "]" + token + " is unexpected");
case VALUE_STRING:
case VALUE_NUMBER:
case VALUE_BOOLEAN:
Expand Down Expand Up @@ -361,12 +361,12 @@ private class FieldParser {
this.type = type;
}

void assertSupports(String parserName, XContentParser.Token token, String currentFieldName) {
void assertSupports(String parserName, XContentParser.Token token, String currentFieldName, XContentLocation location) {
if (parseField.match(currentFieldName) == false) {
throw new IllegalStateException("[" + parserName + "] parsefield doesn't accept: " + currentFieldName);
throw new ParsingException(location, "[" + parserName + "] parsefield doesn't accept: " + currentFieldName);
}
if (supportedTokens.contains(token) == false) {
throw new IllegalArgumentException(
throw new ParsingException(location,
"[" + parserName + "] " + currentFieldName + " doesn't support values of type: " + token);
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -735,7 +735,6 @@ boolean processNextCommittedClusterState(String reason) {

final ClusterState newClusterState = pendingStatesQueue.getNextClusterStateToProcess();
final ClusterState currentState = committedState.get();
final ClusterState adaptedNewClusterState;
// all pending states have been processed
if (newClusterState == null) {
return false;
Expand Down Expand Up @@ -773,54 +772,23 @@ boolean processNextCommittedClusterState(String reason) {
if (currentState.blocks().hasGlobalBlock(discoverySettings.getNoMasterBlock())) {
// its a fresh update from the master as we transition from a start of not having a master to having one
logger.debug("got first state from fresh master [{}]", newClusterState.nodes().getMasterNodeId());
adaptedNewClusterState = newClusterState;
} else if (newClusterState.nodes().isLocalNodeElectedMaster() == false) {
// some optimizations to make sure we keep old objects where possible
ClusterState.Builder builder = ClusterState.builder(newClusterState);

// if the routing table did not change, use the original one
if (newClusterState.routingTable().version() == currentState.routingTable().version()) {
builder.routingTable(currentState.routingTable());
}
// same for metadata
if (newClusterState.metaData().version() == currentState.metaData().version()) {
builder.metaData(currentState.metaData());
} else {
// if its not the same version, only copy over new indices or ones that changed the version
MetaData.Builder metaDataBuilder = MetaData.builder(newClusterState.metaData()).removeAllIndices();
for (IndexMetaData indexMetaData : newClusterState.metaData()) {
IndexMetaData currentIndexMetaData = currentState.metaData().index(indexMetaData.getIndex());
if (currentIndexMetaData != null && currentIndexMetaData.isSameUUID(indexMetaData.getIndexUUID()) &&
currentIndexMetaData.getVersion() == indexMetaData.getVersion()) {
// safe to reuse
metaDataBuilder.put(currentIndexMetaData, false);
} else {
metaDataBuilder.put(indexMetaData, false);
}
}
builder.metaData(metaDataBuilder);
}

adaptedNewClusterState = builder.build();
} else {
adaptedNewClusterState = newClusterState;
}

if (currentState == adaptedNewClusterState) {
if (currentState == newClusterState) {
return false;
}

committedState.set(adaptedNewClusterState);
committedState.set(newClusterState);

// update failure detection only after the state has been updated to prevent race condition with handleLeaveRequest
// and handleNodeFailure as those check the current state to determine whether the failure is to be handled by this node
if (adaptedNewClusterState.nodes().isLocalNodeElectedMaster()) {
if (newClusterState.nodes().isLocalNodeElectedMaster()) {
// update the set of nodes to ping
nodesFD.updateNodesAndPing(adaptedNewClusterState);
nodesFD.updateNodesAndPing(newClusterState);
} else {
// check to see that we monitor the correct master of the cluster
if (masterFD.masterNode() == null || !masterFD.masterNode().equals(adaptedNewClusterState.nodes().getMasterNode())) {
masterFD.restart(adaptedNewClusterState.nodes().getMasterNode(),
if (masterFD.masterNode() == null || !masterFD.masterNode().equals(newClusterState.nodes().getMasterNode())) {
masterFD.restart(newClusterState.nodes().getMasterNode(),
"new cluster state received and we are monitoring the wrong master [" + masterFD.masterNode() + "]");
}
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,9 +30,9 @@
import org.apache.lucene.search.suggest.document.PrefixCompletionQuery;
import org.apache.lucene.search.suggest.document.RegexCompletionQuery;
import org.apache.lucene.search.suggest.document.SuggestField;
import org.elasticsearch.ElasticsearchParseException;
import org.elasticsearch.Version;
import org.elasticsearch.common.ParseField;
import org.elasticsearch.common.ParsingException;
import org.elasticsearch.common.settings.Settings;
import org.elasticsearch.common.unit.Fuzziness;
import org.elasticsearch.common.util.set.Sets;
Expand Down Expand Up @@ -560,7 +560,7 @@ private void parse(ParseContext parseContext, Token token, XContentParser parser
}
}
} else {
throw new ElasticsearchParseException("failed to parse expected text or object got" + token.name());
throw new ParsingException(parser.getTokenLocation(), "failed to parse [" + parser.currentName() + "]: expected text or object, but got " + token.name());
}
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -471,8 +471,12 @@ public void updateShardState(final ShardRouting newRouting,
* subsequently fails before the primary/replica re-sync completes successfully and we are now being
* promoted, the local checkpoint tracker here could be left in a state where it would re-issue sequence
* numbers. To ensure that this is not the case, we restore the state of the local checkpoint tracker by
* replaying the translog and marking any operations there are completed.
* replaying the translog and marking any operations there are completed. Rolling the translog generation is
* not strictly needed here (as we will never have collisions between sequence numbers in a translog
* generation in a new primary as it takes the last known sequence number as a starting point), but it
* simplifies reasoning about the relationship between primary terms and translog generations.
*/
getEngine().rollTranslogGeneration();
getEngine().restoreLocalCheckpointFromTranslog();
getEngine().fillSeqNoGaps(newPrimaryTerm);
getEngine().seqNoService().updateLocalCheckpointForShard(currentRouting.allocationId().getId(),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -66,7 +66,7 @@ public FileInfo(String name, StoreFileMetaData metaData, ByteSizeValue partSize)
this.metadata = metaData;

long partBytes = Long.MAX_VALUE;
if (partSize != null) {
if (partSize != null && partSize.getBytes() > 0) {
partBytes = partSize.getBytes();
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -241,6 +241,10 @@ protected BlobStoreRepository(RepositoryMetaData metadata, Settings globalSettin
BlobStoreIndexShardSnapshot::fromXContent, namedXContentRegistry, isCompress());
indexShardSnapshotsFormat = new ChecksumBlobStoreFormat<>(SNAPSHOT_INDEX_CODEC, SNAPSHOT_INDEX_NAME_FORMAT,
BlobStoreIndexShardSnapshots::fromXContent, namedXContentRegistry, isCompress());
ByteSizeValue chunkSize = chunkSize();
if (chunkSize != null && chunkSize.getBytes() <= 0) {
throw new IllegalArgumentException("the chunk size cannot be negative: [" + chunkSize + "]");
}
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -54,10 +54,10 @@ public class FsRepository extends BlobStoreRepository {
new Setting<>("location", "", Function.identity(), Property.NodeScope);
public static final Setting<String> REPOSITORIES_LOCATION_SETTING =
new Setting<>("repositories.fs.location", LOCATION_SETTING, Function.identity(), Property.NodeScope);
public static final Setting<ByteSizeValue> CHUNK_SIZE_SETTING =
Setting.byteSizeSetting("chunk_size", new ByteSizeValue(-1), Property.NodeScope);
public static final Setting<ByteSizeValue> REPOSITORIES_CHUNK_SIZE_SETTING =
Setting.byteSizeSetting("repositories.fs.chunk_size", new ByteSizeValue(-1), Property.NodeScope);
public static final Setting<ByteSizeValue> CHUNK_SIZE_SETTING = Setting.byteSizeSetting("chunk_size",
new ByteSizeValue(Long.MAX_VALUE), new ByteSizeValue(5), new ByteSizeValue(Long.MAX_VALUE), Property.NodeScope);
public static final Setting<ByteSizeValue> REPOSITORIES_CHUNK_SIZE_SETTING = Setting.byteSizeSetting("repositories.fs.chunk_size",
new ByteSizeValue(Long.MAX_VALUE), new ByteSizeValue(5), new ByteSizeValue(Long.MAX_VALUE), Property.NodeScope);
public static final Setting<Boolean> COMPRESS_SETTING = Setting.boolSetting("compress", false, Property.NodeScope);
public static final Setting<Boolean> REPOSITORIES_COMPRESS_SETTING =
Setting.boolSetting("repositories.fs.compress", false, Property.NodeScope);
Expand Down Expand Up @@ -95,10 +95,8 @@ public FsRepository(RepositoryMetaData metadata, Environment environment,
blobStore = new FsBlobStore(settings, locationFile);
if (CHUNK_SIZE_SETTING.exists(metadata.settings())) {
this.chunkSize = CHUNK_SIZE_SETTING.get(metadata.settings());
} else if (REPOSITORIES_CHUNK_SIZE_SETTING.exists(settings)) {
this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(settings);
} else {
this.chunkSize = null;
this.chunkSize = REPOSITORIES_CHUNK_SIZE_SETTING.get(settings);
}
this.compress = COMPRESS_SETTING.exists(metadata.settings()) ? COMPRESS_SETTING.get(metadata.settings()) : REPOSITORIES_COMPRESS_SETTING.get(settings);
this.basePath = BlobPath.cleanPath();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,8 @@ public Aggregator createInternal(Aggregator parent, boolean collectsFromSingleBu
params = deepCopyParams(params, context);
} else {
params = new HashMap<>();
}
if (params.containsKey("_agg") == false) {
params.put("_agg", new HashMap<String, Object>());
}

Expand Down
Loading

0 comments on commit 536c2cc

Please sign in to comment.