diff --git a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java
index 039aaba5a2490..9d3cfdf045a5f 100644
--- a/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java
+++ b/core/src/main/java/org/elasticsearch/index/analysis/AnalysisRegistry.java
@@ -300,6 +300,7 @@ public String toString() {
         };
     }
 
+    @SuppressWarnings("unchecked")
     private <T> Map<String, T> buildMapping(Component component, IndexSettings settings, Map<String, Settings> settingsMap,
                     Map<String, ? extends AnalysisModule.AnalysisProvider<T>> providerMap,
                     Map<String, ? extends AnalysisModule.AnalysisProvider<T>> defaultInstance) throws IOException {
diff --git a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
index 5e81949402055..3ce7dc3cd2a46 100644
--- a/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
+++ b/core/src/test/java/org/elasticsearch/action/termvectors/GetTermVectorsTests.java
@@ -152,10 +152,10 @@ public void testRandomPayloadWithDelimitedPayloadTokenFilter() throws IOExceptio
                 .field("analyzer", "payload_test").endObject().endObject().endObject().endObject();
         Settings setting =  Settings.builder()
             .put("index.analysis.analyzer.payload_test.tokenizer", "whitespace")
-            .putList("index.analysis.analyzer.payload_test.filter", "my_delimited_payload_filter")
-            .put("index.analysis.filter.my_delimited_payload_filter.delimiter", delimiter)
-            .put("index.analysis.filter.my_delimited_payload_filter.encoding", encodingString)
-            .put("index.analysis.filter.my_delimited_payload_filter.type", "mock_payload_filter").build();
+            .putList("index.analysis.analyzer.payload_test.filter", "my_delimited_payload")
+            .put("index.analysis.filter.my_delimited_payload.delimiter", delimiter)
+            .put("index.analysis.filter.my_delimited_payload.encoding", encodingString)
+            .put("index.analysis.filter.my_delimited_payload.type", "mock_payload_filter").build();
         createIndex("test", setting, "type1", mapping);
 
         client().prepareIndex("test", "type1", Integer.toString(1))
diff --git a/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc b/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc
index b64f5edbeb94c..f50eb2fdd05cc 100644
--- a/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc
+++ b/docs/reference/analysis/tokenfilters/delimited-payload-tokenfilter.asciidoc
@@ -1,7 +1,7 @@
 [[analysis-delimited-payload-tokenfilter]]
 === Delimited Payload Token Filter
 
-Named `delimited_payload_filter`. Splits tokens into tokens and payload whenever a delimiter character is found.
+Named `delimited_payload`. Splits tokens into tokens and payload whenever a delimiter character is found.
 
 Example: "the|1 quick|2 fox|3" is split by default into tokens `the`, `quick`, and `fox` with payloads `1`, `2`, and `3` respectively.
 
diff --git a/docs/reference/migration/migrate_7_0.asciidoc b/docs/reference/migration/migrate_7_0.asciidoc
index 53dc8ef9ec824..75a5131f8fb19 100644
--- a/docs/reference/migration/migrate_7_0.asciidoc
+++ b/docs/reference/migration/migrate_7_0.asciidoc
@@ -30,6 +30,7 @@ way to reindex old indices is to use the `reindex` API.
 * <<breaking_70_mappings_changes>>
 * <<breaking_70_search_changes>>
 * <<breaking_70_plugins_changes>>
+* <<breaking_70_analysis_changes>>
 * <<breaking_70_api_changes>>
 
 
diff --git a/docs/reference/migration/migrate_7_0/analysis.asciidoc b/docs/reference/migration/migrate_7_0/analysis.asciidoc
new file mode 100644
index 0000000000000..9a1b0aecd599d
--- /dev/null
+++ b/docs/reference/migration/migrate_7_0/analysis.asciidoc
@@ -0,0 +1,8 @@
+[[breaking_70_analysis_changes]]
+=== Analysis changes
+
+==== The `delimited_payload_filter` is renamed
+
+The `delimited_payload_filter` is renamed to `delimited_payload`, the old name is 
+deprecated and will be removed at some point, so it should be replaced by 
+`delimited_payload`.
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java
index 813075fa73f06..6f47ad5f77096 100644
--- a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/CommonAnalysisPlugin.java
@@ -103,7 +103,8 @@ public Map<String, AnalysisProvider<TokenFilterFactory>> getTokenFilters() {
         filters.put("czech_stem", CzechStemTokenFilterFactory::new);
         filters.put("common_grams", requriesAnalysisSettings(CommonGramsTokenFilterFactory::new));
         filters.put("decimal_digit", DecimalDigitFilterFactory::new);
-        filters.put("delimited_payload_filter", DelimitedPayloadTokenFilterFactory::new);
+        filters.put("delimited_payload_filter", LegacyDelimitedPayloadTokenFilterFactory::new);
+        filters.put("delimited_payload", DelimitedPayloadTokenFilterFactory::new);
         filters.put("dictionary_decompounder", requriesAnalysisSettings(DictionaryCompoundWordTokenFilterFactory::new));
         filters.put("dutch_stem", DutchStemTokenFilterFactory::new);
         filters.put("edge_ngram", EdgeNGramTokenFilterFactory::new);
@@ -195,6 +196,10 @@ public List<PreConfiguredTokenFilter> getPreConfiguredTokenFilters() {
                 new DelimitedPayloadTokenFilter(input,
                         DelimitedPayloadTokenFilterFactory.DEFAULT_DELIMITER,
                         DelimitedPayloadTokenFilterFactory.DEFAULT_ENCODER)));
+        filters.add(PreConfiguredTokenFilter.singleton("delimited_payload", false, input ->
+                new DelimitedPayloadTokenFilter(input,
+                        DelimitedPayloadTokenFilterFactory.DEFAULT_DELIMITER,
+                        DelimitedPayloadTokenFilterFactory.DEFAULT_ENCODER)));
         filters.add(PreConfiguredTokenFilter.singleton("dutch_stem", false, input -> new SnowballFilter(input, new DutchStemmer())));
         filters.add(PreConfiguredTokenFilter.singleton("edge_ngram", false, input ->
                 new EdgeNGramTokenFilter(input, EdgeNGramTokenFilter.DEFAULT_MIN_GRAM_SIZE, EdgeNGramTokenFilter.DEFAULT_MAX_GRAM_SIZE)));
diff --git a/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java
new file mode 100644
index 0000000000000..d4ecee4b90b01
--- /dev/null
+++ b/modules/analysis-common/src/main/java/org/elasticsearch/analysis/common/LegacyDelimitedPayloadTokenFilterFactory.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to Elasticsearch under one or more contributor
+ * license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright
+ * ownership. Elasticsearch licenses this file to you under
+ * the Apache License, Version 2.0 (the "License"); you may
+ * not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *    http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing,
+ * software distributed under the License is distributed on an
+ * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
+ * KIND, either express or implied.  See the License for the
+ * specific language governing permissions and limitations
+ * under the License.
+ */
+
+package org.elasticsearch.analysis.common;
+
+import org.elasticsearch.Version;
+import org.elasticsearch.common.logging.DeprecationLogger;
+import org.elasticsearch.common.logging.Loggers;
+import org.elasticsearch.common.settings.Settings;
+import org.elasticsearch.env.Environment;
+import org.elasticsearch.index.IndexSettings;
+
+public class LegacyDelimitedPayloadTokenFilterFactory extends DelimitedPayloadTokenFilterFactory {
+    private static final DeprecationLogger DEPRECATION_LOGGER =
+        new DeprecationLogger(Loggers.getLogger(LegacyDelimitedPayloadTokenFilterFactory.class));
+
+    LegacyDelimitedPayloadTokenFilterFactory(IndexSettings indexSettings, Environment env, String name, Settings settings) {
+        super(indexSettings, env, name, settings);
+        if (indexSettings.getIndexVersionCreated().onOrAfter(Version.V_7_0_0_alpha1)) {
+            DEPRECATION_LOGGER.deprecated("Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]");
+        }
+    }
+}
diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
index 707930277e7a2..5113f8a6eb492 100644
--- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
+++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisFactoryTests.java
@@ -170,6 +170,7 @@ protected Map<String, Class<?>> getPreConfiguredTokenFilters() {
         filters.put("czech_stem", null);
         filters.put("decimal_digit", null);
         filters.put("delimited_payload_filter", org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilterFactory.class);
+        filters.put("delimited_payload", org.apache.lucene.analysis.payloads.DelimitedPayloadTokenFilterFactory.class);
         filters.put("dutch_stem", SnowballPorterFilterFactory.class);
         filters.put("edge_ngram", null);
         filters.put("edgeNGram", null);
diff --git a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml
index 47eb436788abf..3738e316b71d0 100644
--- a/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml
+++ b/modules/analysis-common/src/test/resources/rest-api-spec/test/analysis-common/40_token_filters.yml
@@ -1027,7 +1027,14 @@
 
 ---
 "delimited_payload_filter":
+    - skip:
+        version: " - 6.99.99"
+        reason:  delimited_payload_filter deprecated in 7.0, replaced by delimited_payload
+        features: "warnings"
+
     - do:
+        warnings:
+          - "Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]"
         indices.create:
           index: test
           body:
@@ -1039,6 +1046,8 @@
                     delimiter: ^
                     encoding: identity
     - do:
+        warnings:
+          - "Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]"
         indices.analyze:
           index: test
           body:
@@ -1050,6 +1059,8 @@
 
     # Test pre-configured token filter too:
     - do:
+        warnings:
+          - "Deprecated [delimited_payload_filter] used, replaced by [delimited_payload]"
         indices.analyze:
           body:
             text:      foo|5
@@ -1058,6 +1069,39 @@
     - length: { tokens: 1 }
     - match:  { tokens.0.token: foo }
 
+---
+"delimited_payload":
+    - do:
+        indices.create:
+          index: test
+          body:
+            settings:
+              analysis:
+                filter:
+                  my_delimited_payload:
+                    type: delimited_payload
+                    delimiter: ^
+                    encoding: identity
+    - do:
+        indices.analyze:
+          index: test
+          body:
+            text:      foo^bar
+            tokenizer: keyword
+            filter:    [my_delimited_payload]
+    - length: { tokens: 1 }
+    - match:  { tokens.0.token: foo }
+
+    # Test pre-configured token filter too:
+    - do:
+        indices.analyze:
+          body:
+            text:      foo|5
+            tokenizer: keyword
+            filter:    [delimited_payload]
+    - length: { tokens: 1 }
+    - match:  { tokens.0.token: foo }
+
 ---
 "keep_filter":
     - do: