Skip to content

Commit

Permalink
Remove 6.0.* version constants (#1658)
Browse files Browse the repository at this point in the history
This PR removes LegacyESVersion.V_6_0_* constants including all pre-release
versions and bug fixes.

Signed-off-by: Nicholas Walter Knize <nknize@apache.org>
  • Loading branch information
nknize committed Dec 8, 2021
1 parent e983fac commit b74d71f
Show file tree
Hide file tree
Showing 113 changed files with 207 additions and 1,819 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -1250,9 +1250,7 @@ private void createConfiguration() {
} else {
baseConfig.put("script.max_compilations_rate", "2048/1m");
}
if (getVersion().onOrAfter("6.0.0")) {
baseConfig.put("cluster.routing.allocation.disk.watermark.flood_stage", "1b");
}
baseConfig.put("cluster.routing.allocation.disk.watermark.flood_stage", "1b");
// Temporarily disable the real memory usage circuit breaker. It depends on real memory usage which we have no full control
// over and the REST client will not retry on circuit breaking exceptions yet (see #31986 for details). Once the REST client
// can retry on circuit breaking exceptions, we can revert again to the default configuration.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ protected org.opensearch.action.main.MainResponse createServerTestInstance(XCont
ClusterName clusterName = new ClusterName(randomAlphaOfLength(10));
String nodeName = randomAlphaOfLength(10);
final String date = new Date(randomNonNegativeLong()).toString();
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_1, Version.CURRENT);
Version version = VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT);
Build build = new Build(
Build.Type.UNKNOWN,
randomAlphaOfLength(8),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,13 @@

package org.opensearch.analysis.common;

import org.apache.lucene.analysis.Analyzer;
import org.apache.lucene.analysis.Tokenizer;
import org.opensearch.LegacyESVersion;
import org.opensearch.Version;
import org.opensearch.cluster.metadata.IndexMetadata;
import org.opensearch.common.settings.Settings;
import org.opensearch.env.Environment;
import org.opensearch.index.IndexSettings;
import org.opensearch.index.analysis.IndexAnalyzers;
import org.opensearch.index.analysis.NamedAnalyzer;
import org.opensearch.index.analysis.TokenizerFactory;
import org.opensearch.test.OpenSearchTestCase;
import org.opensearch.test.IndexSettingsModule;
Expand All @@ -52,35 +49,6 @@

public class CommonAnalysisPluginTests extends OpenSearchTestCase {

/**
* Check that the deprecated name "nGram" issues a deprecation warning for indices created since 6.0.0
*/
public void testNGramDeprecationWarning() throws IOException {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(
IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(
random(),
LegacyESVersion.V_6_0_0,
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
)
)
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer.filter", "nGram")
.build();

try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin);
}

assertWarnings(
"The [nGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [ngram] instead."
);
}

/**
* Check that the deprecated name "nGram" throws an error since 7.0.0
*/
Expand All @@ -107,45 +75,20 @@ public void testNGramDeprecationError() throws IOException {
}

/**
* Check that the deprecated name "edgeNGram" issues a deprecation warning for indices created since 6.0.0
* Check that the deprecated name "edgeNGram" throws an error for indices created since 7.0.0
*/
public void testEdgeNGramDeprecationWarning() throws IOException {
public void testEdgeNGramDeprecationError() throws IOException {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(
IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(
random(),
LegacyESVersion.V_6_4_0,
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
)
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, Version.CURRENT)
)
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer.filter", "edgeNGram")
.build();

try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
createTestAnalysis(IndexSettingsModule.newIndexSettings("index", settings), settings, commonAnalysisPlugin);
}
assertWarnings(
"The [edgeNGram] token filter name is deprecated and will be removed in a future version. "
+ "Please change the filter name to [edge_ngram] instead."
);
}

/**
* Check that the deprecated name "edgeNGram" throws an error for indices created since 7.0.0
*/
public void testEdgeNGramDeprecationError() throws IOException {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(IndexMetadata.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_7_0_0, null))
.put("index.analysis.analyzer.custom_analyzer.type", "custom")
.put("index.analysis.analyzer.custom_analyzer.tokenizer", "standard")
.putList("index.analysis.analyzer.custom_analyzer.filter", "edgeNGram")
.build();

try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
IllegalArgumentException ex = expectThrows(
IllegalArgumentException.class,
Expand Down Expand Up @@ -186,36 +129,6 @@ public void testStandardHtmlStripAnalyzerDeprecationError() throws IOException {
);
}

/**
* Check that the deprecated analyzer name "standard_html_strip" issues a deprecation warning for indices created since 6.5.0 until 7
*/
public void testStandardHtmlStripAnalyzerDeprecationWarning() throws IOException {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(
IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(
random(),
LegacyESVersion.V_6_0_0,
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
)
)
.put("index.analysis.analyzer.custom_analyzer.type", "standard_html_strip")
.putList("index.analysis.analyzer.custom_analyzer.stopwords", "a", "b")
.build();

IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
IndexAnalyzers analyzers = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).indexAnalyzers;
Analyzer analyzer = analyzers.get("custom_analyzer");
assertNotNull(((NamedAnalyzer) analyzer).analyzer());
assertWarnings(
"Deprecated analyzer [standard_html_strip] used, "
+ "replace it with a custom analyzer using [standard] tokenizer and [html_strip] char_filter, plus [lowercase] filter"
);
}
}

/**
* Check that the deprecated "nGram" filter logs a warning when the filter is used as a custom filter
*/
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,24 +72,4 @@ public void testDeprecationWarning() throws IOException {
);
}
}

/**
* Check that the deprecated name "htmlStrip" does NOT issues a deprecation warning for indices created before 6.3.0
*/
public void testNoDeprecationWarningPre6_3() throws IOException {
Settings settings = Settings.builder()
.put(Environment.PATH_HOME_SETTING.getKey(), createTempDir())
.put(
IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(random(), LegacyESVersion.V_6_0_0, LegacyESVersion.V_6_2_4)
)
.build();

IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
try (CommonAnalysisPlugin commonAnalysisPlugin = new CommonAnalysisPlugin()) {
Map<String, CharFilterFactory> charFilters = createTestAnalysis(idxSettings, settings, commonAnalysisPlugin).charFilter;
CharFilterFactory charFilterFactory = charFilters.get("htmlStrip");
assertNotNull(charFilterFactory.create(new StringReader("")));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -56,11 +56,9 @@
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Set;

import static org.hamcrest.Matchers.equalTo;
Expand Down Expand Up @@ -317,32 +315,6 @@ public void testPreconfiguredTokenFilters() throws IOException {
tf.get(idxSettings, null, tf.getName(), settings).getSynonymFilter();
}
}

Settings settings2 = Settings.builder()
.put(
IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(
random(),
LegacyESVersion.V_6_0_0,
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
)
)
.put("path.home", createTempDir().toString())
.putList("common_words", "a", "b")
.put("output_unigrams", "true")
.build();
IndexSettings idxSettings2 = IndexSettingsModule.newIndexSettings("index", settings2);

List<String> expectedWarnings = new ArrayList<>();
for (PreConfiguredTokenFilter tf : plugin.getPreConfiguredTokenFilters()) {
if (disallowedFilters.contains(tf.getName())) {
tf.get(idxSettings2, null, tf.getName(), settings2).getSynonymFilter();
expectedWarnings.add("Token filter [" + tf.getName() + "] will not be usable to parse synonyms after v7.0");
} else {
tf.get(idxSettings2, null, tf.getName(), settings2).getSynonymFilter();
}
}
assertWarnings(expectedWarnings.toArray(new String[0]));
}

public void testDisallowedTokenFilters() throws IOException {
Expand Down Expand Up @@ -382,58 +354,6 @@ public void testDisallowedTokenFilters() throws IOException {

assertEquals(factory, "Token filter [" + factory + "] cannot be used to parse synonyms", e.getMessage());
}

settings = Settings.builder()
.put(
IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(
random(),
LegacyESVersion.V_6_0_0,
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
)
)
.put("path.home", createTempDir().toString())
.putList("common_words", "a", "b")
.put("output_unigrams", "true")
.build();
idxSettings = IndexSettingsModule.newIndexSettings("index", settings);

List<String> expectedWarnings = new ArrayList<>();
for (String factory : disallowedFactories) {
TokenFilterFactory tff = plugin.getTokenFilters().get(factory).get(idxSettings, null, factory, settings);
TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings);
SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings);

stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null);
expectedWarnings.add("Token filter [" + factory + "] will not be usable to parse synonyms after v7.0");
}

assertWarnings(expectedWarnings.toArray(new String[0]));

settings = Settings.builder()
.put(
IndexMetadata.SETTING_VERSION_CREATED,
VersionUtils.randomVersionBetween(
random(),
LegacyESVersion.V_6_0_0,
VersionUtils.getPreviousVersion(LegacyESVersion.V_7_0_0)
)
)
.put("path.home", createTempDir().toString())
.put("preserve_original", "false")
.build();
idxSettings = IndexSettingsModule.newIndexSettings("index", settings);
TokenFilterFactory tff = plugin.getTokenFilters().get("multiplexer").get(idxSettings, null, "multiplexer", settings);
TokenizerFactory tok = new KeywordTokenizerFactory(idxSettings, null, "keyword", settings);
SynonymTokenFilterFactory stff = new SynonymTokenFilterFactory(idxSettings, null, "synonym", settings);

IllegalArgumentException e = expectThrows(
IllegalArgumentException.class,
() -> stff.buildSynonymAnalyzer(tok, Collections.emptyList(), Collections.singletonList(tff), null)
);

assertEquals("Token filter [multiplexer] cannot be used to parse synonyms unless [preserve_original] is [true]", e.getMessage());

}

private void match(String analyzerName, String source, String target) throws IOException {
Expand Down
Loading

0 comments on commit b74d71f

Please sign in to comment.