Skip to content

Commit

Permalink
Merge branch 'main' into replace-master-internal
Browse files Browse the repository at this point in the history
Signed-off-by: Tianli Feng <ftianli@amazon.com>

# Conflicts:
#	buildSrc/src/main/groovy/org/opensearch/gradle/test/ClusterFormationTasks.groovy
#	client/rest/src/main/java/org/opensearch/client/Node.java
#	server/src/main/java/org/opensearch/action/admin/cluster/health/ClusterHealthResponse.java
#	server/src/main/java/org/opensearch/cluster/coordination/ClusterBootstrapService.java
#	server/src/main/java/org/opensearch/cluster/coordination/NoMasterBlockService.java
#	server/src/test/java/org/opensearch/cluster/coordination/NoMasterBlockServiceTests.java
#	server/src/test/java/org/opensearch/env/NodeRepurposeCommandTests.java
#	test/framework/src/main/java/org/opensearch/test/InternalTestCluster.java
  • Loading branch information
Tianli Feng committed Mar 19, 2022
2 parents 36a5bbe + e0f7706 commit 0817155
Show file tree
Hide file tree
Showing 240 changed files with 2,972 additions and 999 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ public static DiscoveryNode newNode(String nodeId, Map<String, String> attribute
nodeId,
new TransportAddress(TransportAddress.META_ADDRESS, portGenerator.incrementAndGet()),
attributes,
Sets.newHashSet(DiscoveryNodeRole.MASTER_ROLE, DiscoveryNodeRole.DATA_ROLE),
Sets.newHashSet(DiscoveryNodeRole.CLUSTER_MANAGER_ROLE, DiscoveryNodeRole.DATA_ROLE),
Version.CURRENT
);
}
Expand Down
2 changes: 1 addition & 1 deletion buildSrc/build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,7 @@ dependencies {
api 'de.thetaphi:forbiddenapis:3.2'
api 'com.avast.gradle:gradle-docker-compose-plugin:0.14.12'
api 'org.apache.maven:maven-model:3.6.2'
api 'com.networknt:json-schema-validator:1.0.36'
api 'com.networknt:json-schema-validator:1.0.67'
api "com.fasterxml.jackson.core:jackson-databind:${props.getProperty('jackson')}"

testFixturesApi "junit:junit:${props.getProperty('junit')}"
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,8 @@ class ClusterFormationTasks {
}
boolean supportsInitialClusterManagerNodes = hasBwcNodes == false || config.bwcVersion.onOrAfter("7.0.0")
if (esConfig['discovery.type'] == null && config.getAutoSetClusterManagerNodes() && supportsInitialClusterManagerNodes) {
esConfig['cluster.initial_master_nodes'] = nodes.stream().map({ n ->
// To promote inclusive language, the old setting name is deprecated in 2.0.0
esConfig[node.nodeVersion.onOrAfter("2.0.0") ? 'cluster.initial_cluster_manager_nodes' : 'cluster.initial_master_nodes'] = nodes.stream().map({ n ->
if (n.config.settings['node.name'] == null) {
return "node-" + n.nodeNum
} else {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -361,7 +361,12 @@ private void commonNodeConfig(OpenSearchNode node, String nodeNames, OpenSearchN
.collect(Collectors.toList())
.forEach(node.defaultConfig::remove);
if (nodeNames != null && node.settings.getOrDefault("discovery.type", "anything").equals("single-node") == false) {
node.defaultConfig.put("cluster.initial_master_nodes", "[" + nodeNames + "]");
// To promote inclusive language, the old setting name is deprecated n 2.0.0
if (node.getVersion().onOrAfter("2.0.0")) {
node.defaultConfig.put("cluster.initial_cluster_manager_nodes", "[" + nodeNames + "]");
} else {
node.defaultConfig.put("cluster.initial_master_nodes", "[" + nodeNames + "]");
}
}
node.defaultConfig.put("discovery.seed_providers", "file");
node.defaultConfig.put("discovery.seed_hosts", "[]");
Expand Down
2 changes: 1 addition & 1 deletion buildSrc/version.properties
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ spatial4j = 0.7
jts = 1.15.0
jackson = 2.12.6
snakeyaml = 1.26
icu4j = 68.2
icu4j = 70.1
supercsv = 2.4.0
log4j = 2.17.1
slf4j = 1.6.2
Expand Down
8 changes: 4 additions & 4 deletions client/rest/src/main/java/org/opensearch/client/Node.java
Original file line number Diff line number Diff line change
Expand Up @@ -210,21 +210,21 @@ public Roles(final Set<String> roles) {
}

/**
* Returns whether or not the node <strong>could</strong> be elected cluster_manager.
* Returns whether or not the node <strong>could</strong> be elected cluster-manager.
*/
public boolean isMasterEligible() {
return roles.contains("master");
return roles.contains("master") || roles.contains("cluster_manager");
}

/**
* Teturns whether or not the node stores data.
* Returns whether or not the node stores data.
*/
public boolean isData() {
return roles.contains("data");
}

/**
* Teturns whether or not the node runs ingest pipelines.
* Returns whether or not the node runs ingest pipelines.
*/
public boolean isIngest() {
return roles.contains("ingest");
Expand Down
4 changes: 2 additions & 2 deletions distribution/docker/docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ services:
image: opensearch:test
environment:
- node.name=opensearch-1
- cluster.initial_master_nodes=opensearch-1,opensearch-2
- cluster.initial_cluster_manager_nodes=opensearch-1,opensearch-2
- discovery.seed_hosts=opensearch-2:9300
- cluster.name=opensearch
- bootstrap.memory_lock=true
Expand All @@ -29,7 +29,7 @@ services:
image: opensearch:test
environment:
- node.name=opensearch-2
- cluster.initial_master_nodes=opensearch-1,opensearch-2
- cluster.initial_cluster_manager_nodes=opensearch-1,opensearch-2
- discovery.seed_hosts=opensearch-1:9300
- cluster.name=opensearch
- bootstrap.memory_lock=true
Expand Down
Original file line number Diff line number Diff line change
@@ -1,22 +1,40 @@
---
"Test cat nodes output - before 2.0.0":
- skip:
version: "2.0.0 - "
reason: "master is replaced by cluster_manager in 2.0.0"

- do:
cat.nodes:
v: true

- match:
$body: |
/^ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load_1m \s+ load_5m \s+ load_15m \s+ node\.role \s+ master \s+ name \n
((\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ (-)?\d* \s+ ((-)?\d*(\.\d+)?)? \s+ ((-)?\d*(\.\d+)?)? \s+ ((-)?\d*(\.\d+)?)? \s+ (-|[cdhilmrstvw]{1,11}) \s+ [-*x] \s+ (\S+\s?)+ \n)+ $/
---
"Test cat nodes output":
- skip:
version: " - 1.4.99"
reason: "cluster_manager is introduced in 2.0.0"

- do:
cat.nodes: {}

- match:
$body: |
/ #ip heap.percent ram.percent cpu load_1m load_5m load_15m node.role master name
^ ((\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ (-)?\d* \s+ ((-)?\d*(\.\d+)?)? \s+ ((-)?\d*(\.\d+)?)?\s+ ((-)?\d*(\.\d+)?)? \s+ (-|[cdhilmrstvw]{1,11}) \s+ [-*x] \s+ (\S+\s?)+ \n)+ $/
/ #ip heap.percent ram.percent cpu load_1m load_5m load_15m node.role cluster_manager name
^ ((\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ (-)?\d* \s+ ((-)?\d*(\.\d+)?)? \s+ ((-)?\d*(\.\d+)?)?\s+ ((-)?\d*(\.\d+)?)? \s+ (-|[cdhilmrstvw]{1,11}) \s+ [-*x] \s+ (\S+\s?)+ \n)+ $/
- do:
cat.nodes:
v: true

- match:
$body: |
/^ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load_1m \s+ load_5m \s+ load_15m \s+ node\.role \s+ master \s+ name \n
((\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ (-)?\d* \s+ ((-)?\d*(\.\d+)?)? \s+ ((-)?\d*(\.\d+)?)? \s+ ((-)?\d*(\.\d+)?)? \s+ (-|[cdhilmrstvw]{1,11}) \s+ [-*x] \s+ (\S+\s?)+ \n)+ $/
/^ ip \s+ heap\.percent \s+ ram\.percent \s+ cpu \s+ load_1m \s+ load_5m \s+ load_15m \s+ node\.role \s+ cluster_manager \s+ name \n
((\d{1,3}\.){3}\d{1,3} \s+ \d+ \s+ \d* \s+ (-)?\d* \s+ ((-)?\d*(\.\d+)?)? \s+ ((-)?\d*(\.\d+)?)? \s+ ((-)?\d*(\.\d+)?)? \s+ (-|[cdhilmrstvw]{1,11}) \s+ [-*x] \s+ (\S+\s?)+ \n)+ $/
- do:
cat.nodes:
Expand Down
4 changes: 2 additions & 2 deletions distribution/src/config/opensearch.yml
Original file line number Diff line number Diff line change
Expand Up @@ -67,9 +67,9 @@ ${path.logs}
#
#discovery.seed_hosts: ["host1", "host2"]
#
# Bootstrap the cluster using an initial set of master-eligible nodes:
# Bootstrap the cluster using an initial set of cluster-manager-eligible nodes:
#
#cluster.initial_master_nodes: ["node-1", "node-2"]
#cluster.initial_cluster_manager_nodes: ["node-1", "node-2"]
#
# For more information, consult the discovery and cluster formation module documentation.
#
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ public void testCustomWordDelimiterQueryString() {
.put("analysis.filter.custom_word_delimiter.split_on_numerics", "false")
.put("analysis.filter.custom_word_delimiter.stem_english_possessive", "false")
)
.addMapping("type1", "field1", "type=text,analyzer=my_analyzer", "field2", "type=text,analyzer=my_analyzer")
.setMapping("field1", "type=text,analyzer=my_analyzer", "field2", "type=text,analyzer=my_analyzer")
);

client().prepareIndex("test").setId("1").setSource("field1", "foo bar baz", "field2", "not needed").get();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -149,8 +149,7 @@ public void testMultiPhraseCutoff() throws IOException {
* query. We cut off and extract terms if there are more than 16 terms in the query
*/
assertAcked(
prepareCreate("test").addMapping(
"test",
prepareCreate("test").setMapping(
"body",
"type=text,analyzer=custom_analyzer," + "search_analyzer=custom_analyzer,term_vector=with_positions_offsets"
)
Expand Down Expand Up @@ -225,8 +224,7 @@ public void testSynonyms() throws IOException {

assertAcked(
prepareCreate("test").setSettings(builder.build())
.addMapping(
"type1",
.setMapping(
"field1",
"type=text,term_vector=with_positions_offsets,search_analyzer=synonym," + "analyzer=standard,index_options=offsets"
)
Expand Down Expand Up @@ -335,8 +333,7 @@ public void testPhrasePrefix() throws IOException {

assertAcked(
prepareCreate("second_test_index").setSettings(builder.build())
.addMapping(
"doc",
.setMapping(
"field4",
"type=text,term_vector=with_positions_offsets,analyzer=synonym",
"field3",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ public void testConvertScalarToList() throws Exception {
public void testAppendMetadataExceptVersion() throws Exception {
// here any metadata field value becomes a list, which won't make sense in most of the cases,
// but support for append is streamlined like for set so we test it
Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.TYPE, Metadata.ID, Metadata.ROUTING);
Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.ID, Metadata.ROUTING);
List<String> values = new ArrayList<>();
Processor appendProcessor;
if (randomBoolean()) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ public void testSetExistingNullFieldWithOverrideDisabled() throws Exception {
}

public void testSetMetadataExceptVersion() throws Exception {
Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.TYPE, Metadata.ID, Metadata.ROUTING);
Metadata randomMetadata = randomFrom(Metadata.INDEX, Metadata.ID, Metadata.ROUTING);
Processor processor = createSetProcessor(randomMetadata.getFieldName(), "_value", true, false);
IngestDocument ingestDocument = RandomDocumentPicks.randomIngestDocument(random());
processor.execute(ingestDocument);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -158,7 +158,7 @@ public void testScore() throws Exception {
}

public void testDateMethods() throws Exception {
OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "date0", "type=date", "date1", "type=date"));
OpenSearchAssertions.assertAcked(prepareCreate("test").setMapping("date0", "type=date", "date1", "type=date"));
ensureGreen("test");
indexRandom(
true,
Expand Down Expand Up @@ -188,7 +188,7 @@ public void testDateMethods() throws Exception {
}

public void testDateObjectMethods() throws Exception {
OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "date0", "type=date", "date1", "type=date"));
OpenSearchAssertions.assertAcked(prepareCreate("test").setMapping("date0", "type=date", "date1", "type=date"));
ensureGreen("test");
indexRandom(
true,
Expand Down Expand Up @@ -219,7 +219,7 @@ public void testDateObjectMethods() throws Exception {

public void testMultiValueMethods() throws Exception {
OpenSearchAssertions.assertAcked(
prepareCreate("test").addMapping("doc", "double0", "type=double", "double1", "type=double", "double2", "type=double")
prepareCreate("test").setMapping("double0", "type=double", "double1", "type=double", "double2", "type=double")
);
ensureGreen("test");

Expand Down Expand Up @@ -322,7 +322,7 @@ public void testMultiValueMethods() throws Exception {
}

public void testInvalidDateMethodCall() throws Exception {
OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "double", "type=double"));
OpenSearchAssertions.assertAcked(prepareCreate("test").setMapping("double", "type=double"));
ensureGreen("test");
indexRandom(true, client().prepareIndex("test").setId("1").setSource("double", "178000000.0"));
try {
Expand All @@ -343,7 +343,7 @@ public void testInvalidDateMethodCall() throws Exception {
}

public void testSparseField() throws Exception {
OpenSearchAssertions.assertAcked(prepareCreate("test").addMapping("doc", "x", "type=long", "y", "type=long"));
OpenSearchAssertions.assertAcked(prepareCreate("test").setMapping("x", "type=long", "y", "type=long"));
ensureGreen("test");
indexRandom(
true,
Expand Down Expand Up @@ -528,7 +528,7 @@ public void testSpecialValueVariable() throws Exception {

public void testStringSpecialValueVariable() throws Exception {
// i.e. expression script for term aggregations, which is not allowed
assertAcked(client().admin().indices().prepareCreate("test").addMapping("doc", "text", "type=keyword").get());
assertAcked(client().admin().indices().prepareCreate("test").setMapping("text", "type=keyword").get());
ensureGreen("test");
indexRandom(
true,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,7 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws
"_doc",
new CompressedXContent(
Strings.toString(
PutMappingRequest.buildFromSimplifiedDef(
PutMappingRequest.simpleMapping(
"my_feature_field",
"type=rank_feature",
"my_negative_feature_field",
Expand Down
Loading

0 comments on commit 0817155

Please sign in to comment.