Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/main' into decommission/it
Browse files Browse the repository at this point in the history
Signed-off-by: Rishab Nahata <rnnahata@amazon.com>
  • Loading branch information
imRishN committed Oct 8, 2022
2 parents 8fc8440 + 77cff55 commit 4ea0ce6
Show file tree
Hide file tree
Showing 218 changed files with 917 additions and 3,083 deletions.
12 changes: 12 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)

## [Unreleased]
### Dependencies
- Bumps `gson` from 2.9.0 to 2.9.1

### Added

Expand All @@ -29,6 +31,8 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- Added release notes for 1.3.6 ([#4681](https://github.com/opensearch-project/OpenSearch/pull/4681))
- Added precommit support for MacOS ([#4682](https://github.com/opensearch-project/OpenSearch/pull/4682))
- Recommission API changes for service layer ([#4320](https://github.com/opensearch-project/OpenSearch/pull/4320))
- Update GeoGrid base class access modifier to support extensibility ([#4572](https://github.com/opensearch-project/OpenSearch/pull/4572))

### Dependencies
- Bumps `log4j-core` from 2.18.0 to 2.19.0
- Bumps `reactor-netty-http` from 1.0.18 to 1.0.23
Expand Down Expand Up @@ -80,6 +84,10 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
### Removed
- Remove deprecated code to add node name into log pattern of log4j property file ([#4568](https://github.com/opensearch-project/OpenSearch/pull/4568))
- Unused object and import within TransportClusterAllocationExplainAction ([#4639](https://github.com/opensearch-project/OpenSearch/pull/4639))
- Remove LegacyESVersion.V_7_0_* and V_7_1_* Constants ([#2768](https://https://github.com/opensearch-project/OpenSearch/pull/2768))
- Remove LegacyESVersion.V_7_2_ and V_7_3_ Constants ([#4702](https://github.com/opensearch-project/OpenSearch/pull/4702))
- Always auto release the flood stage block ([#4703](https://github.com/opensearch-project/OpenSearch/pull/4703))
- Remove LegacyESVersion.V_7_4_ and V_7_5_ Constants ([#4704](https://github.com/opensearch-project/OpenSearch/pull/4704))

### Fixed

Expand Down Expand Up @@ -117,8 +125,12 @@ Inspired from [Keep a Changelog](https://keepachangelog.com/en/1.0.0/)
- [Segment Replication] Adding check to make sure checkpoint is not processed when a shard's shard routing is primary ([#4630](https://github.com/opensearch-project/OpenSearch/pull/4630))
- [Bug]: Fixed invalid location of JDK dependency for arm64 architecture([#4613](https://github.com/opensearch-project/OpenSearch/pull/4613))
- [Bug]: Alias filter lost after rollover ([#4499](https://github.com/opensearch-project/OpenSearch/pull/4499))
- Fixed misunderstanding message "No OpenSearchException found" when detailed_error disabled ([#4669](https://github.com/opensearch-project/OpenSearch/pull/4669))
- Attempt to fix Github workflow for Gradle Check job ([#4679](https://github.com/opensearch-project/OpenSearch/pull/4679))
- Fix flaky DecommissionControllerTests.testTimesOut ([4683](https://github.com/opensearch-project/OpenSearch/pull/4683))
- Fix new race condition in DecommissionControllerTests ([4688](https://github.com/opensearch-project/OpenSearch/pull/4688))
- Fix SearchStats (de)serialization (caused by https://github.com/opensearch-project/OpenSearch/pull/4616) ([#4697](https://github.com/opensearch-project/OpenSearch/pull/4697))
- Fixing Gradle warnings associated with publishPluginZipPublicationToXxx tasks ([#4696](https://github.com/opensearch-project/OpenSearch/pull/4696))

### Security

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ public String call() throws Exception {
return String.format(
"%s/distributions/%s-%s.pom",
project.getBuildDir(),
getArchivesBaseName(project),
pomTask.getName().toLowerCase().contains("zip") ? project.getName() : getArchivesBaseName(project),
project.getVersion()
);
}
Expand Down Expand Up @@ -130,7 +130,6 @@ public String call() throws Exception {
publication.getPom().withXml(PublishPlugin::addScmInfo);

if (!publication.getName().toLowerCase().contains("zip")) {

// have to defer this until archivesBaseName is set
project.afterEvaluate(p -> publication.setArtifactId(getArchivesBaseName(project)));

Expand All @@ -139,6 +138,8 @@ public String call() throws Exception {
publication.artifact(project.getTasks().getByName("sourcesJar"));
publication.artifact(project.getTasks().getByName("javadocJar"));
}
} else {
project.afterEvaluate(p -> publication.setArtifactId(project.getName()));
}

generatePomTask.configure(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -13,12 +13,14 @@
import org.gradle.api.publish.maven.MavenPublication;

import java.nio.file.Path;
import java.util.Set;
import java.util.stream.Collectors;

import org.gradle.api.Task;
import org.gradle.api.publish.maven.plugins.MavenPublishPlugin;

public class Publish implements Plugin<Project> {

// public final static String PLUGIN_ZIP_PUBLISH_POM_TASK = "generatePomFileForPluginZipPublication";
public final static String PUBLICATION_NAME = "pluginZip";
public final static String STAGING_REPO = "zipStaging";
public final static String LOCAL_STAGING_REPO_PATH = "/build/local-staging-repo";
Expand Down Expand Up @@ -67,10 +69,15 @@ public void apply(Project project) {
if (validatePluginZipPom != null) {
validatePluginZipPom.dependsOn("generatePomFileForNebulaPublication");
}
Task publishPluginZipPublicationToZipStagingRepository = project.getTasks()
.findByName("publishPluginZipPublicationToZipStagingRepository");
if (publishPluginZipPublicationToZipStagingRepository != null) {
publishPluginZipPublicationToZipStagingRepository.dependsOn("generatePomFileForNebulaPublication");

// There are number of tasks prefixed by 'publishPluginZipPublication', f.e.:
// publishPluginZipPublicationToZipStagingRepository, publishPluginZipPublicationToMavenLocal
final Set<Task> publishPluginZipPublicationToTasks = project.getTasks()
.stream()
.filter(t -> t.getName().startsWith("publishPluginZipPublicationTo"))
.collect(Collectors.toSet());
if (!publishPluginZipPublicationToTasks.isEmpty()) {
publishPluginZipPublicationToTasks.forEach(t -> t.dependsOn("generatePomFileForNebulaPublication"));
}
} else {
project.getLogger()
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -485,19 +485,10 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
filters.add(PreConfiguredTokenFilter.singleton("dutch_stem", false, input -> new SnowballFilter(input, new DutchStemmer())));
filters.add(PreConfiguredTokenFilter.singleton("edge_ngram", false, false, input -> new EdgeNGramTokenFilter(input, 1)));
filters.add(PreConfiguredTokenFilter.openSearchVersion("edgeNGram", false, false, (reader, version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [edge_ngram] instead."
);
} else {
deprecationLogger.deprecate(
"edgeNGram_deprecation",
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [edge_ngram] instead."
);
}
return new EdgeNGramTokenFilter(reader, 1);
throw new IllegalArgumentException(
"The [edgeNGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [edge_ngram] instead."
);
}));
filters.add(
PreConfiguredTokenFilter.singleton("elision", true, input -> new ElisionFilter(input, FrenchAnalyzer.DEFAULT_ARTICLES))
Expand All @@ -524,19 +515,10 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
);
filters.add(PreConfiguredTokenFilter.singleton("ngram", false, false, reader -> new NGramTokenFilter(reader, 1, 2, false)));
filters.add(PreConfiguredTokenFilter.openSearchVersion("nGram", false, false, (reader, version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [ngram] instead."
);
} else {
deprecationLogger.deprecate(
"nGram_deprecation",
"The [nGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [ngram] instead."
);
}
return new NGramTokenFilter(reader, 1, 2, false);
throw new IllegalArgumentException(
"The [nGram] token filter name was deprecated in 6.4 and cannot be used in new indices. "
+ "Please change the filter name to [ngram] instead."
);
}));
filters.add(PreConfiguredTokenFilter.singleton("persian_normalization", true, PersianNormalizationFilter::new));
filters.add(PreConfiguredTokenFilter.singleton("porter_stem", false, PorterStemFilter::new));
Expand Down Expand Up @@ -581,18 +563,22 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
)
)
);
filters.add(PreConfiguredTokenFilter.openSearchVersion("word_delimiter_graph", false, false, (input, version) -> {
boolean adjustOffsets = version.onOrAfter(LegacyESVersion.V_7_3_0);
return new WordDelimiterGraphFilter(
input,
adjustOffsets,
WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE,
WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS
| WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS
| WordDelimiterGraphFilter.STEM_ENGLISH_POSSESSIVE,
null
);
}));
filters.add(
PreConfiguredTokenFilter.openSearchVersion(
"word_delimiter_graph",
false,
false,
(input, version) -> new WordDelimiterGraphFilter(
input,
true,
WordDelimiterIterator.DEFAULT_WORD_DELIM_TABLE,
WordDelimiterGraphFilter.GENERATE_WORD_PARTS | WordDelimiterGraphFilter.GENERATE_NUMBER_PARTS
| WordDelimiterGraphFilter.SPLIT_ON_CASE_CHANGE | WordDelimiterGraphFilter.SPLIT_ON_NUMERICS
| WordDelimiterGraphFilter.STEM_ENGLISH_POSSESSIVE,
null
)
)
);
return filters;
}

Expand All @@ -606,12 +592,12 @@ public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
tokenizers.add(PreConfiguredTokenizer.singleton("letter", LetterTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("whitespace", WhitespaceTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.singleton("ngram", NGramTokenizer::new));
tokenizers.add(PreConfiguredTokenizer.openSearchVersion("edge_ngram", (version) -> {
if (version.onOrAfter(LegacyESVersion.V_7_3_0)) {
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
}));
tokenizers.add(
PreConfiguredTokenizer.openSearchVersion(
"edge_ngram",
(version) -> new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE)
)
);
tokenizers.add(PreConfiguredTokenizer.singleton("pattern", () -> new PatternTokenizer(Regex.compile("\\W+", null), -1)));
tokenizers.add(PreConfiguredTokenizer.singleton("thai", ThaiTokenizer::new));
// TODO deprecate and remove in API
Expand All @@ -637,10 +623,7 @@ public List<PreConfiguredTokenizer> getPreConfiguredTokenizers() {
+ "Please change the tokenizer name to [edge_ngram] instead."
);
}
if (version.onOrAfter(LegacyESVersion.V_7_3_0)) {
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}
return new EdgeNGramTokenizer(EdgeNGramTokenizer.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenizer.DEFAULT_MAX_GRAM_SIZE);
return new EdgeNGramTokenizer(NGramTokenizer.DEFAULT_MIN_NGRAM_SIZE, NGramTokenizer.DEFAULT_MAX_NGRAM_SIZE);
}));
tokenizers.add(PreConfiguredTokenizer.singleton("PathHierarchy", PathHierarchyTokenizer::new));

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@

import org.apache.lucene.analysis.TokenStream;
import org.apache.lucene.analysis.ngram.NGramTokenFilter;
import org.opensearch.LegacyESVersion;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
import org.opensearch.index.IndexSettings;
Expand All @@ -54,25 +53,15 @@ public class NGramTokenFilterFactory extends AbstractTokenFilterFactory {
this.maxGram = settings.getAsInt("max_gram", 2);
int ngramDiff = maxGram - minGram;
if (ngramDiff > maxAllowedNgramDiff) {
if (indexSettings.getIndexVersionCreated().onOrAfter(LegacyESVersion.V_7_0_0)) {
throw new IllegalArgumentException(
"The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: ["
+ maxAllowedNgramDiff
+ "] but was ["
+ ngramDiff
+ "]. This limit can be set by changing the ["
+ IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey()
+ "] index level setting."
);
} else {
deprecationLogger.deprecate(
"ngram_big_difference",
"Deprecated big difference between max_gram and min_gram in NGram Tokenizer,"
+ "expected difference must be less than or equal to: ["
+ maxAllowedNgramDiff
+ "]"
);
}
throw new IllegalArgumentException(
"The difference between max_gram and min_gram in NGram Tokenizer must be less than or equal to: ["
+ maxAllowedNgramDiff
+ "] but was ["
+ ngramDiff
+ "]. This limit can be set by changing the ["
+ IndexSettings.MAX_NGRAM_DIFF_SETTING.getKey()
+ "] index level setting."
);
}
preserveOriginal = settings.getAsBoolean(PRESERVE_ORIG_KEY, false);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,9 +35,8 @@
import org.opensearch.geo.search.aggregations.bucket.composite.GeoTileGridValuesSourceBuilder;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGridAggregationBuilder;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregationBuilder;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGridAggregator;
import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoHashGrid;
import org.opensearch.geo.search.aggregations.bucket.geogrid.InternalGeoTileGrid;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoHashGrid;
import org.opensearch.geo.search.aggregations.bucket.geogrid.GeoTileGrid;
import org.opensearch.geo.search.aggregations.metrics.GeoBounds;
import org.opensearch.geo.search.aggregations.metrics.GeoBoundsAggregationBuilder;
import org.opensearch.geo.search.aggregations.metrics.GeoBoundsGeoShapeAggregator;
Expand Down Expand Up @@ -78,18 +77,18 @@ public List<AggregationSpec> getAggregations() {
GeoHashGridAggregationBuilder.NAME,
GeoHashGridAggregationBuilder::new,
GeoHashGridAggregationBuilder.PARSER
).addResultReader(InternalGeoHashGrid::new).setAggregatorRegistrar(GeoHashGridAggregationBuilder::registerAggregators);
).addResultReader(GeoHashGrid::new).setAggregatorRegistrar(GeoHashGridAggregationBuilder::registerAggregators);

final AggregationSpec geoTileGrid = new AggregationSpec(
GeoTileGridAggregationBuilder.NAME,
GeoTileGridAggregationBuilder::new,
GeoTileGridAggregationBuilder.PARSER
).addResultReader(InternalGeoTileGrid::new).setAggregatorRegistrar(GeoTileGridAggregationBuilder::registerAggregators);
).addResultReader(GeoTileGrid::new).setAggregatorRegistrar(GeoTileGridAggregationBuilder::registerAggregators);
return List.of(geoBounds, geoHashGrid, geoTileGrid);
}

/**
* Registering the {@link GeoTileGridAggregator} in the {@link CompositeAggregation}.
* Registering the geotile grid in the {@link CompositeAggregation}.
*
* @return a {@link List} of {@link CompositeAggregationSpec}
*/
Expand Down
Loading

0 comments on commit 4ea0ce6

Please sign in to comment.