Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into cluster-manager-clu…
Browse files Browse the repository at this point in the history
…ster-health

Signed-off-by: Tianli Feng <ftianli@amazon.com>
  • Loading branch information
Tianli Feng committed Mar 15, 2022
2 parents 0148e0e + 10b9986 commit c05d14c
Show file tree
Hide file tree
Showing 435 changed files with 4,181 additions and 2,993 deletions.
12 changes: 12 additions & 0 deletions DEVELOPER_GUIDE.md
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,8 @@
- [runtimeOnly](#runtimeonly)
- [compileOnly](#compileonly)
- [testImplementation](#testimplementation)
- [Gradle Plugins](#gradle-plugins)
- [Distribution Download Plugin](#distribution-download-plugin)
- [Misc](#misc)
- [git-secrets](#git-secrets)
- [Installation](#installation)
Expand Down Expand Up @@ -361,6 +363,16 @@ somehow. OpenSearch plugins use this configuration to include dependencies that
Code that is on the classpath for compiling tests that are part of this project but not production code. The canonical example
of this is `junit`.

### Gradle Plugins

#### Distribution Download Plugin

The Distribution Download plugin downloads the latest version of OpenSearch by default, and supports overriding this behavior by setting `customDistributionUrl`.
```
./gradlew integTest -PcustomDistributionUrl="https://ci.opensearch.org/ci/dbc/bundle-build/1.2.0/1127/linux/x64/dist/opensearch-1.2.0-linux-x64.tar.gz"
```


## Misc

### git-secrets
Expand Down
6 changes: 6 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -39,3 +39,9 @@ This project is licensed under the [Apache v2.0 License](LICENSE.txt).
## Copyright

Copyright OpenSearch Contributors. See [NOTICE](NOTICE.txt) for details.

## Trademark

OpenSearch is a registered trademark of Amazon Web Services.

OpenSearch includes certain Apache-licensed Elasticsearch code from Elasticsearch B.V. and other source code. Elasticsearch B.V. is not the source of that other source code. ELASTICSEARCH is a registered trademark of Elasticsearch B.V.
5 changes: 4 additions & 1 deletion build.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -230,7 +230,10 @@ tasks.register("branchConsistency") {
allprojects {
// configure compiler options
tasks.withType(JavaCompile).configureEach { JavaCompile compile ->
compile.options.compilerArgs << '-Werror'
// See please https://bugs.openjdk.java.net/browse/JDK-8209058
if (BuildParams.runtimeJavaVersion > JavaVersion.VERSION_11) {
compile.options.compilerArgs << '-Werror'
}
compile.options.compilerArgs << '-Xlint:auxiliaryclass'
compile.options.compilerArgs << '-Xlint:cast'
compile.options.compilerArgs << '-Xlint:classfile'
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -195,15 +195,22 @@ private static void setupDownloadServiceRepo(Project project) {
if (project.getRepositories().findByName(DOWNLOAD_REPO_NAME) != null) {
return;
}
addIvyRepo(
project,
DOWNLOAD_REPO_NAME,
"https://artifacts.opensearch.org",
FAKE_IVY_GROUP,
"/releases" + RELEASE_PATTERN_LAYOUT,
"/release-candidates" + RELEASE_PATTERN_LAYOUT
);
addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT);
Object customDistributionUrl = project.findProperty("customDistributionUrl");
// checks if custom Distribution Url has been passed by user from plugins
if (customDistributionUrl != null) {
addIvyRepo(project, DOWNLOAD_REPO_NAME, customDistributionUrl.toString(), FAKE_IVY_GROUP, "");
addIvyRepo(project, SNAPSHOT_REPO_NAME, customDistributionUrl.toString(), FAKE_SNAPSHOT_IVY_GROUP, "");
} else {
addIvyRepo(
project,
DOWNLOAD_REPO_NAME,
"https://artifacts.opensearch.org",
FAKE_IVY_GROUP,
"/releases" + RELEASE_PATTERN_LAYOUT,
"/release-candidates" + RELEASE_PATTERN_LAYOUT
);
addIvyRepo(project, SNAPSHOT_REPO_NAME, "https://artifacts.opensearch.org", FAKE_SNAPSHOT_IVY_GROUP, SNAPSHOT_PATTERN_LAYOUT);
}

addIvyRepo2(project, DOWNLOAD_REPO_NAME_ES, "https://artifacts-no-kpi.elastic.co", FAKE_IVY_GROUP_ES);
addIvyRepo2(project, SNAPSHOT_REPO_NAME_ES, "https://snapshots-no-kpi.elastic.co", FAKE_SNAPSHOT_IVY_GROUP_ES);
Expand Down
2 changes: 1 addition & 1 deletion buildSrc/src/main/resources/minimumRuntimeVersion
Original file line number Diff line number Diff line change
@@ -1 +1 @@
11
11
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@

package org.opensearch.gradle;

import org.gradle.api.internal.artifacts.repositories.DefaultIvyArtifactRepository;
import org.opensearch.gradle.OpenSearchDistribution.Platform;
import org.opensearch.gradle.OpenSearchDistribution.Type;
import org.opensearch.gradle.info.BuildParams;
Expand Down Expand Up @@ -79,6 +80,59 @@ public void testVersionDefault() {
assertEquals(distro.getVersion(), VersionProperties.getOpenSearch());
}

public void testCustomDistributionUrlWithUrl() {
Project project = ProjectBuilder.builder().build();
String customUrl = "https://artifacts.opensearch.org/custom";
project.getExtensions().getExtraProperties().set("customDistributionUrl", customUrl);
DistributionDownloadPlugin plugin = new DistributionDownloadPlugin();
plugin.apply(project);
assertEquals(4, project.getRepositories().size());
assertEquals(
((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(),
customUrl
);
assertEquals(
((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(),
customUrl
);
assertEquals(
((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(),
"https://artifacts-no-kpi.elastic.co"
);
assertEquals(
((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(),
"https://snapshots-no-kpi.elastic.co"
);

}

public void testCustomDistributionUrlWithoutUrl() {
Project project = ProjectBuilder.builder().build();
DistributionDownloadPlugin plugin = new DistributionDownloadPlugin();
plugin.apply(project);
assertEquals(5, project.getRepositories().size());
assertEquals(
((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads")).getUrl().toString(),
"https://artifacts.opensearch.org"
);
assertEquals(
((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-downloads2")).getUrl().toString(),
"https://artifacts.opensearch.org"
);
assertEquals(
((DefaultIvyArtifactRepository) project.getRepositories().getAt("opensearch-snapshots")).getUrl().toString(),
"https://artifacts.opensearch.org"
);
assertEquals(
((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-downloads")).getUrl().toString(),
"https://artifacts-no-kpi.elastic.co"
);
assertEquals(
((DefaultIvyArtifactRepository) project.getRepositories().getAt("elasticsearch-snapshots")).getUrl().toString(),
"https://snapshots-no-kpi.elastic.co"
);
}

public void testBadVersionFormat() {
assertDistroError(
createProject(null, false),
Expand Down
4 changes: 2 additions & 2 deletions buildSrc/version.properties
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
opensearch = 2.0.0
lucene = 8.10.1
lucene = 9.0.0

bundled_jdk_vendor = adoptium
bundled_jdk = 17.0.2+8
Expand All @@ -11,7 +11,7 @@ spatial4j = 0.7
jts = 1.15.0
jackson = 2.12.6
snakeyaml = 1.26
icu4j = 62.1
icu4j = 68.2
supercsv = 2.4.0
log4j = 2.17.1
slf4j = 1.6.2
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,9 @@

package org.opensearch.common.settings;

import org.apache.lucene.backward_codecs.store.EndiannessReverserUtil;
import org.apache.lucene.codecs.CodecUtil;
import org.apache.lucene.store.DataOutput;
import org.apache.lucene.store.IOContext;
import org.apache.lucene.store.IndexOutput;
import org.apache.lucene.store.NIOFSDirectory;
Expand Down Expand Up @@ -328,13 +330,14 @@ private void possiblyAlterEncryptedBytes(
byte[] encryptedBytes,
int truncEncryptedDataLength
) throws Exception {
indexOutput.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length);
indexOutput.writeInt(salt.length);
indexOutput.writeBytes(salt, salt.length);
indexOutput.writeInt(iv.length);
indexOutput.writeBytes(iv, iv.length);
indexOutput.writeInt(encryptedBytes.length - truncEncryptedDataLength);
indexOutput.writeBytes(encryptedBytes, encryptedBytes.length);
DataOutput io = EndiannessReverserUtil.wrapDataOutput(indexOutput);
io.writeInt(4 + salt.length + 4 + iv.length + 4 + encryptedBytes.length);
io.writeInt(salt.length);
io.writeBytes(salt, salt.length);
io.writeInt(iv.length);
io.writeBytes(iv, iv.length);
io.writeInt(encryptedBytes.length - truncEncryptedDataLength);
io.writeBytes(encryptedBytes, encryptedBytes.length);
}

public void testUpgradeAddsSeed() throws Exception {
Expand Down Expand Up @@ -363,7 +366,7 @@ public void testBackcompatV1() throws Exception {
assumeFalse("Can't run in a FIPS JVM as PBE is not available", inFipsJvm());
Path configDir = env.configFile();
NIOFSDirectory directory = new NIOFSDirectory(configDir);
try (IndexOutput output = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) {
try (IndexOutput output = EndiannessReverserUtil.createOutput(directory, "opensearch.keystore", IOContext.DEFAULT)) {
CodecUtil.writeHeader(output, "opensearch.keystore", 1);
output.writeByte((byte) 0); // hasPassword = false
output.writeString("PKCS12");
Expand Down Expand Up @@ -396,7 +399,7 @@ public void testBackcompatV2() throws Exception {
NIOFSDirectory directory = new NIOFSDirectory(configDir);
byte[] fileBytes = new byte[20];
random().nextBytes(fileBytes);
try (IndexOutput output = directory.createOutput("opensearch.keystore", IOContext.DEFAULT)) {
try (IndexOutput output = EndiannessReverserUtil.createOutput(directory, "opensearch.keystore", IOContext.DEFAULT)) {

CodecUtil.writeHeader(output, "opensearch.keystore", 2);
output.writeByte((byte) 0); // hasPassword = false
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
package org.opensearch.analysis.common;

import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.standard.ClassicFilter;
import org.apache.lucene.analysis.classic.ClassicFilter;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
import org.opensearch.index.IndexSettings;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
package org.opensearch.analysis.common;

import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.standard.ClassicTokenizer;
import org.apache.lucene.analysis.classic.ClassicTokenizer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -51,6 +51,8 @@
import org.apache.lucene.analysis.cjk.CJKWidthFilter;
import org.apache.lucene.analysis.ckb.SoraniAnalyzer;
import org.apache.lucene.analysis.ckb.SoraniNormalizationFilter;
import org.apache.lucene.analysis.classic.ClassicFilter;
import org.apache.lucene.analysis.classic.ClassicTokenizer;
import org.apache.lucene.analysis.commongrams.CommonGramsFilter;
import org.apache.lucene.analysis.core.DecimalDigitFilter;
import org.apache.lucene.analysis.core.KeywordTokenizer;
Expand All @@ -64,6 +66,7 @@
import org.apache.lucene.analysis.de.GermanNormalizationFilter;
import org.apache.lucene.analysis.de.GermanStemFilter;
import org.apache.lucene.analysis.el.GreekAnalyzer;
import org.apache.lucene.analysis.email.UAX29URLEmailTokenizer;
import org.apache.lucene.analysis.en.EnglishAnalyzer;
import org.apache.lucene.analysis.en.KStemFilter;
import org.apache.lucene.analysis.en.PorterStemFilter;
Expand Down Expand Up @@ -113,10 +116,7 @@
import org.apache.lucene.analysis.ru.RussianAnalyzer;
import org.apache.lucene.analysis.shingle.ShingleFilter;
import org.apache.lucene.analysis.snowball.SnowballFilter;
import org.apache.lucene.analysis.standard.ClassicFilter;
import org.apache.lucene.analysis.standard.ClassicTokenizer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer;
import org.apache.lucene.analysis.sv.SwedishAnalyzer;
import org.apache.lucene.analysis.th.ThaiAnalyzer;
import org.apache.lucene.analysis.th.ThaiTokenizer;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -62,10 +62,18 @@ public TokenStream create(TokenStream tokenStream) {

private Map<String, String> convertSettings(Settings settings) {
Map<String, String> settingMap = new HashMap<>();
settingMap.put("hashCount", settings.get("hash_count"));
settingMap.put("bucketCount", settings.get("bucket_count"));
settingMap.put("hashSetSize", settings.get("hash_set_size"));
settingMap.put("withRotation", settings.get("with_rotation"));
if (settings.hasValue("hash_count")) {
settingMap.put("hashCount", settings.get("hash_count"));
}
if (settings.hasValue("bucketCount")) {
settingMap.put("bucketCount", settings.get("bucket_count"));
}
if (settings.hasValue("hashSetSize")) {
settingMap.put("hashSetSize", settings.get("hash_set_size"));
}
if (settings.hasValue("with_rotation")) {
settingMap.put("withRotation", settings.get("with_rotation"));
}
return settingMap;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@

import org.apache.lucene.analysis.Tokenizer;
import org.apache.lucene.analysis.standard.StandardAnalyzer;
import org.apache.lucene.analysis.standard.UAX29URLEmailTokenizer;
import org.apache.lucene.analysis.email.UAX29URLEmailTokenizer;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
import org.opensearch.index.IndexSettings;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,13 +110,15 @@ protected Map<String, Class<?>> getTokenFilters() {
filters.put("latvianstem", StemmerTokenFilterFactory.class);
filters.put("norwegianlightstem", StemmerTokenFilterFactory.class);
filters.put("norwegianminimalstem", StemmerTokenFilterFactory.class);
filters.put("norwegiannormalization", Void.class);
filters.put("portuguesestem", StemmerTokenFilterFactory.class);
filters.put("portugueselightstem", StemmerTokenFilterFactory.class);
filters.put("portugueseminimalstem", StemmerTokenFilterFactory.class);
filters.put("russianlightstem", StemmerTokenFilterFactory.class);
filters.put("soranistem", StemmerTokenFilterFactory.class);
filters.put("spanishlightstem", StemmerTokenFilterFactory.class);
filters.put("swedishlightstem", StemmerTokenFilterFactory.class);
filters.put("swedishminimalstem", Void.class);
filters.put("stemmeroverride", StemmerOverrideTokenFilterFactory.class);
filters.put("kstem", KStemTokenFilterFactory.class);
filters.put("synonym", SynonymTokenFilterFactory.class);
Expand Down Expand Up @@ -242,7 +244,7 @@ protected Map<String, Class<?>> getPreConfiguredTokenizers() {
tokenizers.put("keyword", null);
tokenizers.put("lowercase", Void.class);
tokenizers.put("classic", null);
tokenizers.put("uax_url_email", org.apache.lucene.analysis.standard.UAX29URLEmailTokenizerFactory.class);
tokenizers.put("uax_url_email", org.apache.lucene.analysis.email.UAX29URLEmailTokenizerFactory.class);
tokenizers.put("path_hierarchy", null);
tokenizers.put("letter", null);
tokenizers.put("whitespace", null);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -107,11 +107,15 @@ public void setup() {
// parsed queries for "text_shingle_unigram:(foo bar baz)" with query parsers
// that ignores position length attribute
expectedQueryWithUnigram = new BooleanQuery.Builder().add(
new SynonymQuery(new Term("text_shingle_unigram", "foo"), new Term("text_shingle_unigram", "foo bar")),
new SynonymQuery.Builder("text_shingle_unigram").addTerm(new Term("text_shingle_unigram", "foo"))
.addTerm(new Term("text_shingle_unigram", "foo bar"))
.build(),
BooleanClause.Occur.SHOULD
)
.add(
new SynonymQuery(new Term("text_shingle_unigram", "bar"), new Term("text_shingle_unigram", "bar baz")),
new SynonymQuery.Builder("text_shingle_unigram").addTerm(new Term("text_shingle_unigram", "bar"))
.addTerm(new Term("text_shingle_unigram", "bar baz"))
.build(),
BooleanClause.Occur.SHOULD
)
.add(new TermQuery(new Term("text_shingle_unigram", "baz")), BooleanClause.Occur.SHOULD)
Expand Down

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
0a3d818d6f6fb113831ed34553b24763fbda1e84
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,6 @@
import org.apache.lucene.expressions.js.JavascriptCompiler;
import org.apache.lucene.expressions.js.VariableContext;
import org.apache.lucene.search.DoubleValuesSource;
import org.apache.lucene.search.SortField;
import org.opensearch.SpecialPermission;
import org.opensearch.common.Nullable;
import org.opensearch.index.fielddata.IndexFieldData;
Expand Down Expand Up @@ -263,7 +262,7 @@ private static NumberSortScript.LeafFactory newSortScript(Expression expr, Searc
for (String variable : expr.variables) {
try {
if (variable.equals("_score")) {
bindings.add(new SortField("_score", SortField.Type.SCORE));
bindings.add("_score", DoubleValuesSource.SCORES);
needsScores = true;
} else if (vars != null && vars.containsKey(variable)) {
bindFromParams(vars, bindings, variable);
Expand Down Expand Up @@ -320,7 +319,7 @@ private static AggregationScript.LeafFactory newAggregationScript(
for (String variable : expr.variables) {
try {
if (variable.equals("_score")) {
bindings.add(new SortField("_score", SortField.Type.SCORE));
bindings.add("_score", DoubleValuesSource.SCORES);
needsScores = true;
} else if (variable.equals("_value")) {
specialValue = new ReplaceableConstDoubleValueSource();
Expand Down Expand Up @@ -393,7 +392,7 @@ private static ScoreScript.LeafFactory newScoreScript(Expression expr, SearchLoo
for (String variable : expr.variables) {
try {
if (variable.equals("_score")) {
bindings.add(new SortField("_score", SortField.Type.SCORE));
bindings.add("_score", DoubleValuesSource.SCORES);
needsScores = true;
} else if (variable.equals("_value")) {
specialValue = new ReplaceableConstDoubleValueSource();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -42,4 +42,5 @@ grant {
permission org.opensearch.script.ClassPermission "java.lang.Math";
permission org.opensearch.script.ClassPermission "org.apache.lucene.util.MathUtil";
permission org.opensearch.script.ClassPermission "org.apache.lucene.util.SloppyMath";
permission org.opensearch.script.ClassPermission "org.apache.lucene.expressions.js.ExpressionMath";
};
Original file line number Diff line number Diff line change
Expand Up @@ -75,10 +75,7 @@ public List<Route> routes() {
new Route(GET, "/_msearch/template"),
new Route(POST, "/_msearch/template"),
new Route(GET, "/{index}/_msearch/template"),
new Route(POST, "/{index}/_msearch/template"),
// Deprecated typed endpoints.
new Route(GET, "/{index}/{type}/_msearch/template"),
new Route(POST, "/{index}/{type}/_msearch/template")
new Route(POST, "/{index}/_msearch/template")
)
);
}
Expand Down
Loading

0 comments on commit c05d14c

Please sign in to comment.