From f4e9729d647add3d42b5bdd091278dcb3af727af Mon Sep 17 00:00:00 2001 From: Jim Ferenczi Date: Fri, 24 Aug 2018 09:51:21 +0200 Subject: [PATCH 1/6] Remove unsupported Version.V_5_* (#32937) This change removes the es 5x version constants and their usages. --- .../gradle/VersionCollection.groovy | 8 +- .../gradle/VersionCollectionTests.groovy | 4 +- .../common/CommonAnalysisPluginTests.java | 4 +- .../HtmlStripCharFilterFactoryTests.java | 2 +- .../mustache/MultiSearchTemplateRequest.java | 19 +- .../join/query/HasChildQueryBuilder.java | 11 +- .../join/query/HasParentQueryBuilder.java | 11 +- .../join/query/HasChildQueryBuilderTests.java | 4 - .../query/HasParentQueryBuilderTests.java | 4 - .../percolator/PercolateQueryBuilder.java | 8 +- .../PercolateQueryBuilderTests.java | 23 --- .../percolator/QueryBuilderStoreTests.java | 2 +- .../reindex/remote/RemoteRequestBuilders.java | 9 +- .../index/reindex/RoundTripTests.java | 9 +- .../remote/RemoteRequestBuildersTests.java | 13 +- .../RemoteScrollableHitSourceTests.java | 6 +- .../ICUCollationKeywordFieldMapper.java | 11 +- .../ICUCollationKeywordFieldMapperTests.java | 46 ----- .../mapper/murmur3/Murmur3FieldMapper.java | 5 - .../index/mapper/size/SizeFieldMapper.java | 4 - .../upgrades/FullClusterRestartIT.java | 3 - .../elasticsearch/ElasticsearchException.java | 30 +--- .../main/java/org/elasticsearch/Version.java | 168 +----------------- .../ClusterAllocationExplainRequest.java | 10 -- .../shards/ClusterSearchShardsRequest.java | 9 - .../shards/ClusterSearchShardsResponse.java | 25 ++- .../snapshots/get/GetSnapshotsRequest.java | 9 +- .../storedscripts/PutStoredScriptRequest.java | 10 +- .../indices/analyze/AnalyzeResponse.java | 18 +- .../indices/create/CreateIndexResponse.java | 9 +- .../mapping/put/PutMappingRequest.java | 4 - .../template/put/PutIndexTemplateRequest.java | 5 - .../validate/query/QueryExplanation.java | 10 +- .../validate/query/ValidateQueryRequest.java | 9 +- .../action/bulk/BulkItemResponse.java | 16 +- .../fieldcaps/FieldCapabilitiesRequest.java | 19 +- .../fieldcaps/FieldCapabilitiesResponse.java | 12 +- .../action/ingest/PutPipelineRequest.java | 11 +- .../ingest/SimulatePipelineRequest.java | 11 +- .../action/search/SearchRequest.java | 12 +- .../action/search/SearchResponse.java | 8 +- .../action/search/SearchTransportService.java | 14 +- .../termvectors/TermVectorsRequest.java | 12 +- .../cluster/SnapshotDeletionsInProgress.java | 4 +- .../cluster/SnapshotsInProgress.java | 15 +- .../cluster/block/ClusterBlock.java | 11 +- .../allocation/NodeAllocationResult.java | 17 +- .../index/query/InnerHitBuilder.java | 97 +--------- .../index/query/MoreLikeThisQueryBuilder.java | 12 +- .../index/query/NestedQueryBuilder.java | 11 +- .../index/query/QueryStringQueryBuilder.java | 38 +--- .../index/query/RangeQueryBuilder.java | 25 ++- .../index/query/SimpleQueryStringBuilder.java | 41 +---- .../index/reindex/BulkByScrollTask.java | 25 +-- .../index/reindex/RemoteInfo.java | 15 +- .../indices/flush/SyncedFlushService.java | 3 - .../ingest/PipelineConfiguration.java | 13 +- .../org/elasticsearch/monitor/os/OsStats.java | 10 +- .../PersistentTasksCustomMetaData.java | 2 +- .../org/elasticsearch/plugins/PluginInfo.java | 10 +- .../blobstore/BlobStoreRepository.java | 3 +- .../java/org/elasticsearch/script/Script.java | 138 ++------------ .../elasticsearch/script/ScriptMetaData.java | 51 +----- .../search/SearchShardTarget.java | 11 +- .../bucket/terms/IncludeExclude.java | 16 +- .../search/builder/SearchSourceBuilder.java | 8 +- .../search/collapse/CollapseBuilder.java | 22 +-- .../highlight/AbstractHighlighterBuilder.java | 25 +-- .../internal/ShardSearchLocalRequest.java | 28 +-- .../elasticsearch/snapshots/SnapshotInfo.java | 46 ++--- .../ExceptionSerializationTests.java | 83 +-------- .../java/org/elasticsearch/VersionTests.java | 83 +++++---- .../cluster/node/stats/NodeStatsTests.java | 1 - .../ClusterSearchShardsRequestTests.java | 2 +- .../ClusterSearchShardsResponseTests.java | 8 +- .../create/CreateIndexResponseTests.java | 22 --- .../mapping/put/PutMappingRequestTests.java | 27 --- .../put/PutIndexTemplateRequestTests.java | 84 --------- .../ingest/SimulatePipelineRequestTests.java | 20 --- .../CanMatchPreFilterSearchPhaseTests.java | 12 -- .../action/search/SearchResponseTests.java | 26 --- .../termvectors/TermVectorsUnitTests.java | 32 ---- .../metadata/IndexTemplateMetaDataTests.java | 50 ------ .../MetaDataIndexUpgradeServiceTests.java | 2 +- .../allocation/FailedNodeRoutingTests.java | 2 +- .../allocation/FailedShardsRoutingTests.java | 4 +- .../ResizeAllocationDeciderTests.java | 44 ----- .../common/unit/ByteSizeValueTests.java | 6 - .../common/util/IndexFolderUpgraderTests.java | 8 +- .../discovery/zen/MembershipActionTests.java | 6 +- .../org/elasticsearch/get/GetActionIT.java | 2 +- .../index/IndexSortSettingsTests.java | 11 -- .../index/analysis/AnalysisRegistryTests.java | 2 +- .../index/analysis/PreBuiltAnalyzerTests.java | 14 +- .../index/mapper/DynamicTemplateTests.java | 21 +-- .../mapper/ExternalFieldMapperTests.java | 6 +- .../index/mapper/TypeFieldMapperTests.java | 2 +- .../index/query/MatchQueryBuilderTests.java | 4 - .../query/MoreLikeThisQueryBuilderTests.java | 24 --- .../index/query/NestedQueryBuilderTests.java | 4 - .../reindex/BulkByScrollTaskStatusTests.java | 39 ++-- .../index/shard/ShardGetServiceTests.java | 44 ----- .../indices/analysis/AnalysisModuleTests.java | 8 +- .../indices/stats/IndexStatsIT.java | 2 +- .../plugins/PluginsServiceTests.java | 4 +- .../aggregations/bucket/GeoDistanceIT.java | 2 +- .../aggregations/bucket/GeoHashGridIT.java | 2 +- .../functionscore/DecayFunctionScoreIT.java | 2 +- .../search/geo/GeoBoundingBoxIT.java | 6 +- .../search/geo/GeoDistanceIT.java | 2 +- .../elasticsearch/search/geo/GeoFilterIT.java | 2 +- .../search/geo/GeoPolygonIT.java | 2 +- .../search/sort/GeoDistanceIT.java | 8 +- .../search/sort/GeoDistanceSortBuilderIT.java | 6 +- .../transport/RemoteClusterServiceTests.java | 5 - .../transport/TcpTransportTests.java | 21 ++- .../org/elasticsearch/test/OldIndexUtils.java | 31 ++-- .../section/ClientYamlTestSectionTests.java | 6 +- .../section/ClientYamlTestSuiteTests.java | 10 +- .../rest/yaml/section/SetupSectionTests.java | 6 +- .../rest/yaml/section/SkipSectionTests.java | 12 +- .../yaml/section/TeardownSectionTests.java | 6 +- .../protocol/xpack/XPackInfoResponse.java | 8 +- .../protocol/xpack/security/User.java | 13 +- .../xpack/core/ml/MlMetadata.java | 2 +- .../core/ml/action/DeleteDatafeedAction.java | 9 +- .../xpack/core/ml/action/DeleteJobAction.java | 9 +- .../xpack/core/ml/action/FlushJobAction.java | 17 +- .../core/ml/action/GetBucketsAction.java | 15 +- .../xpack/core/ml/action/OpenJobAction.java | 8 - .../core/ml/datafeed/DatafeedConfig.java | 8 - .../xpack/core/ml/datafeed/DatafeedState.java | 9 - .../core/ml/datafeed/DatafeedUpdate.java | 9 - .../xpack/core/ml/job/config/Detector.java | 12 +- .../xpack/core/ml/job/config/Job.java | 38 ++-- .../xpack/core/ml/job/config/JobState.java | 5 - .../output/FlushAcknowledgement.java | 9 +- .../autodetect/state/ModelSnapshot.java | 7 +- .../core/ml/job/results/AnomalyRecord.java | 9 - .../xpack/core/ml/job/results/Bucket.java | 16 -- .../core/ml/job/results/BucketInfluencer.java | 9 - .../xpack/core/ml/job/results/Influencer.java | 9 - .../xpack/core/ml/job/results/ModelPlot.java | 40 +---- .../core/security/authz/RoleDescriptor.java | 11 +- .../security/user/LogstashSystemUser.java | 3 - .../license/XPackLicenseStateTests.java | 2 +- .../xpack/core/ml/job/config/JobTests.java | 14 -- .../action/role/PutRoleRequestTests.java | 2 +- .../IndexDeprecationChecksTests.java | 151 +--------------- .../TransportIsolateDatafeedAction.java | 7 - .../ml/action/TransportKillProcessAction.java | 8 - .../ml/action/TransportOpenJobAction.java | 12 -- .../action/TransportOpenJobActionTests.java | 48 ----- .../xpack/ml/datafeed/DatafeedStateTests.java | 42 ----- .../xpack/ml/job/config/JobStateTests.java | 42 ----- .../action/MonitoringBulkDocTests.java | 20 --- .../action/MonitoringBulkRequestTests.java | 48 ----- .../monitoring/collector/CollectorTests.java | 28 --- .../exporter/BaseMonitoringDocTestCase.java | 22 --- .../authc/esnative/ReservedRealm.java | 4 +- .../transport/ServerTransportFilter.java | 53 ++---- .../filter/SecurityActionFilterTests.java | 2 +- .../authc/esnative/ReservedRealmTests.java | 8 - .../accesscontrol/IndicesPermissionTests.java | 4 +- .../support/SecurityIndexManagerTests.java | 6 +- .../transport/ServerTransportFilterTests.java | 44 ----- .../security/user/UserSerializationTests.java | 42 ----- .../xpack/upgrade/IndexUpgradeService.java | 2 +- .../elasticsearch/xpack/upgrade/Upgrade.java | 2 +- .../upgrade/IndexUpgradeServiceTests.java | 2 +- .../upgrade/InternalIndexReindexerIT.java | 4 +- .../protocol/xpack/XPackInfoResponse.java | 8 +- .../xpack/ml/job/process/ModelSnapshot.java | 6 +- .../protocol/xpack/security/User.java | 13 +- 174 files changed, 444 insertions(+), 2649 deletions(-) diff --git a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy index 7d5b793254fe4..daab0efc8c69a 100644 --- a/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy +++ b/buildSrc/src/main/groovy/org/elasticsearch/gradle/VersionCollection.groovy @@ -138,9 +138,8 @@ class VersionCollection { break } } - // caveat 0 - now dip back 2 versions to get the last supported snapshot version of the line - Version highestMinor = getHighestPreviousMinor(currentVersion.major - 1) - maintenanceBugfixSnapshot = replaceAsSnapshot(highestMinor) + // caveat 0 - the last supported snapshot of the line is on a version that we don't support (N-2) + maintenanceBugfixSnapshot = null } else { // caveat 3 did not apply. version is not a X.0.0, so we are somewhere on a X.Y line // only check till minor == 0 of the major @@ -293,7 +292,8 @@ class VersionCollection { * If you have a list [5.0.2, 5.1.2, 6.0.1, 6.1.1] and pass in 6 for the nextMajorVersion, it will return you 5.1.2 */ private Version getHighestPreviousMinor(Integer nextMajorVersion) { - return versionSet.headSet(Version.fromString("${nextMajorVersion}.0.0")).last() + SortedSet result = versionSet.headSet(Version.fromString("${nextMajorVersion}.0.0")) + return result.isEmpty() ? null : result.last() } /** diff --git a/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy b/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy index ad36c84078398..f6b9cb5fc95bf 100644 --- a/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy +++ b/buildSrc/src/test/groovy/org/elasticsearch/gradle/VersionCollectionTests.groovy @@ -26,7 +26,7 @@ class VersionCollectionTests extends GradleUnitTestCase { assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT")) assertEquals(vc.stagedMinorSnapshot, Version.fromString("6.2.0-SNAPSHOT")) assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.1.1-SNAPSHOT")) - assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT")) + assertNull(vc.maintenanceBugfixSnapshot) vc.indexCompatible.containsAll(vc.versions) @@ -65,7 +65,7 @@ class VersionCollectionTests extends GradleUnitTestCase { assertEquals(vc.nextMinorSnapshot, Version.fromString("6.3.0-SNAPSHOT")) assertEquals(vc.stagedMinorSnapshot, null) assertEquals(vc.nextBugfixSnapshot, Version.fromString("6.2.1-SNAPSHOT")) - assertEquals(vc.maintenanceBugfixSnapshot, Version.fromString("5.2.1-SNAPSHOT")) + assertNull(vc.maintenanceBugfixSnapshot) vc.indexCompatible.containsAll(vc.versions) diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java index 1d2b8a36810eb..b5dc23fbdb893 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/CommonAnalysisPluginTests.java @@ -64,7 +64,7 @@ public void testNGramDeprecationWarning() throws IOException { public void testNGramNoDeprecationWarningPre6_4() throws IOException { Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_3_0)) + VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_3_0)) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); @@ -104,7 +104,7 @@ public void testEdgeNGramDeprecationWarning() throws IOException { public void testEdgeNGramNoDeprecationWarningPre6_4() throws IOException { Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_3_0)) + VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_3_0)) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); diff --git a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java index 0d5389a6d6594..e284877978851 100644 --- a/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java +++ b/modules/analysis-common/src/test/java/org/elasticsearch/analysis/common/HtmlStripCharFilterFactoryTests.java @@ -60,7 +60,7 @@ public void testDeprecationWarning() throws IOException { public void testNoDeprecationWarningPre6_3() throws IOException { Settings settings = Settings.builder().put(Environment.PATH_HOME_SETTING.getKey(), createTempDir()) .put(IndexMetaData.SETTING_VERSION_CREATED, - VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_2_4)) + VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_2_4)) .build(); IndexSettings idxSettings = IndexSettingsModule.newIndexSettings("index", settings); diff --git a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java index caa9fa4831add..eea9e31d4a79d 100644 --- a/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java +++ b/modules/lang-mustache/src/main/java/org/elasticsearch/script/mustache/MultiSearchTemplateRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.script.mustache; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.CompositeIndicesRequest; @@ -120,21 +119,17 @@ public MultiSearchTemplateRequest indicesOptions(IndicesOptions indicesOptions) @Override public void readFrom(StreamInput in) throws IOException { super.readFrom(in); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - maxConcurrentSearchRequests = in.readVInt(); - } + maxConcurrentSearchRequests = in.readVInt(); requests = in.readStreamableList(SearchTemplateRequest::new); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeVInt(maxConcurrentSearchRequests); - } + out.writeVInt(maxConcurrentSearchRequests); out.writeStreamableList(requests); } - + @Override public boolean equals(Object o) { if (this == o) return true; @@ -148,9 +143,9 @@ public boolean equals(Object o) { @Override public int hashCode() { return Objects.hash(maxConcurrentSearchRequests, requests, indicesOptions); - } - - public static byte[] writeMultiLineFormat(MultiSearchTemplateRequest multiSearchTemplateRequest, + } + + public static byte[] writeMultiLineFormat(MultiSearchTemplateRequest multiSearchTemplateRequest, XContent xContent) throws IOException { ByteArrayOutputStream output = new ByteArrayOutputStream(); for (SearchTemplateRequest templateRequest : multiSearchTemplateRequest.requests()) { @@ -168,5 +163,5 @@ public static byte[] writeMultiLineFormat(MultiSearchTemplateRequest multiSearch } return output.toByteArray(); } - + } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java index 3381356da4171..e37a796009137 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasChildQueryBuilder.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.join.JoinUtil; import org.apache.lucene.search.join.ScoreMode; import org.apache.lucene.search.similarities.Similarity; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -125,15 +124,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeInt(maxChildren); out.writeVInt(scoreMode.ordinal()); out.writeNamedWriteable(query); - if (out.getVersion().before(Version.V_5_5_0)) { - final boolean hasInnerHit = innerHitBuilder != null; - out.writeBoolean(hasInnerHit); - if (hasInnerHit) { - innerHitBuilder.writeToParentChildBWC(out, query, type); - } - } else { - out.writeOptionalWriteable(innerHitBuilder); - } + out.writeOptionalWriteable(innerHitBuilder); out.writeBoolean(ignoreUnmapped); } diff --git a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java index 4e328ea2c984e..e98fdb9e9699d 100644 --- a/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java +++ b/modules/parent-join/src/main/java/org/elasticsearch/join/query/HasParentQueryBuilder.java @@ -21,7 +21,6 @@ import org.apache.lucene.search.MatchNoDocsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.join.ScoreMode; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -97,15 +96,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(type); out.writeBoolean(score); out.writeNamedWriteable(query); - if (out.getVersion().before(Version.V_5_5_0)) { - final boolean hasInnerHit = innerHitBuilder != null; - out.writeBoolean(hasInnerHit); - if (hasInnerHit) { - innerHitBuilder.writeToParentChildBWC(out, query, type); - } - } else { - out.writeOptionalWriteable(innerHitBuilder); - } + out.writeOptionalWriteable(innerHitBuilder); out.writeBoolean(ignoreUnmapped); } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java index 546677a2be4f4..6e4e79d16e5a5 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasChildQueryBuilderTests.java @@ -196,10 +196,6 @@ protected void doAssertLuceneQuery(HasChildQueryBuilder queryBuilder, Query quer public void testSerializationBWC() throws IOException { for (Version version : VersionUtils.allReleasedVersions()) { HasChildQueryBuilder testQuery = createTestQueryBuilder(); - if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) { - // ignore unmapped for inner_hits has been added on 5.2 - testQuery.innerHit().setIgnoreUnmapped(false); - } assertSerialization(testQuery, version); } } diff --git a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java index 6d6822007eee3..164405f653444 100644 --- a/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java +++ b/modules/parent-join/src/test/java/org/elasticsearch/join/query/HasParentQueryBuilderTests.java @@ -171,10 +171,6 @@ protected void doAssertLuceneQuery(HasParentQueryBuilder queryBuilder, Query que public void testSerializationBWC() throws IOException { for (Version version : VersionUtils.allReleasedVersions()) { HasParentQueryBuilder testQuery = createTestQueryBuilder(); - if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) { - // ignore unmapped for inner_hits has been added on 5.2 - testQuery.innerHit().setIgnoreUnmapped(false); - } assertSerialization(testQuery, version); } } diff --git a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java index f18efe4585bc9..445076b8eba07 100644 --- a/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java +++ b/modules/percolator/src/main/java/org/elasticsearch/percolator/PercolateQueryBuilder.java @@ -272,11 +272,7 @@ public PercolateQueryBuilder(String field, String documentType, String indexedDo documents = document != null ? Collections.singletonList(document) : Collections.emptyList(); } if (documents.isEmpty() == false) { - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - documentXContentType = in.readEnum(XContentType.class); - } else { - documentXContentType = XContentHelper.xContentType(documents.iterator().next()); - } + documentXContentType = in.readEnum(XContentType.class); } else { documentXContentType = null; } @@ -329,7 +325,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { BytesReference doc = documents.isEmpty() ? null : documents.iterator().next(); out.writeOptionalBytesReference(doc); } - if (documents.isEmpty() == false && out.getVersion().onOrAfter(Version.V_5_3_0)) { + if (documents.isEmpty() == false) { out.writeEnum(documentXContentType); } } diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java index e7163edef94c9..eb7af5f30d061 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/PercolateQueryBuilderTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.MatchAllDocsQuery; import org.apache.lucene.search.Query; import org.elasticsearch.ResourceNotFoundException; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.action.get.GetRequest; import org.elasticsearch.action.get.GetResponse; @@ -36,7 +35,6 @@ import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; @@ -57,7 +55,6 @@ import java.io.UncheckedIOException; import java.util.ArrayList; import java.util.Arrays; -import java.util.Base64; import java.util.Collection; import java.util.Collections; import java.util.HashSet; @@ -294,26 +291,6 @@ public void testCreateMultiDocumentSearcher() throws Exception { assertThat(result.clauses().get(1).getOccur(), equalTo(BooleanClause.Occur.MUST_NOT)); } - public void testSerializationBwc() throws IOException { - final byte[] data = Base64.getDecoder().decode("P4AAAAAFZmllbGQEdHlwZQAAAAAAAA57ImZvbyI6ImJhciJ9AAAAAA=="); - final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2, - Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0); - try (StreamInput in = StreamInput.wrap(data)) { - in.setVersion(version); - PercolateQueryBuilder queryBuilder = new PercolateQueryBuilder(in); - assertEquals("type", queryBuilder.getDocumentType()); - assertEquals("field", queryBuilder.getField()); - assertEquals("{\"foo\":\"bar\"}", queryBuilder.getDocuments().iterator().next().utf8ToString()); - assertEquals(XContentType.JSON, queryBuilder.getXContentType()); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.setVersion(version); - queryBuilder.writeTo(out); - assertArrayEquals(data, out.bytes().toBytesRef().bytes); - } - } - } - private static BytesReference randomSource(Set usedFields) { try { // If we create two source that have the same field, but these fields have different kind of values (str vs. lng) then diff --git a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java index 5e97eadae83e7..1c7ae3681ac63 100644 --- a/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java +++ b/modules/percolator/src/test/java/org/elasticsearch/percolator/QueryBuilderStoreTests.java @@ -74,7 +74,7 @@ public void testStoringQueryBuilders() throws IOException { BinaryFieldMapper fieldMapper = PercolatorFieldMapper.Builder.createQueryBuilderFieldBuilder( new Mapper.BuilderContext(settings, new ContentPath(0))); - Version version = randomBoolean() ? Version.V_5_6_0 : Version.V_6_0_0_beta2; + Version version = Version.V_6_0_0_beta2; try (IndexWriter indexWriter = new IndexWriter(directory, config)) { for (int i = 0; i < queryBuilders.length; i++) { queryBuilders[i] = new TermQueryBuilder(randomAlphaOfLength(4), randomAlphaOfLength(8)); diff --git a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java index e8e3760882eea..d20be74798066 100644 --- a/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java +++ b/modules/reindex/src/main/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuilders.java @@ -61,7 +61,8 @@ static Request initialSearch(SearchRequest searchRequest, BytesReference query, if (searchRequest.scroll() != null) { TimeValue keepAlive = searchRequest.scroll().keepAlive(); - if (remoteVersion.before(Version.V_5_0_0)) { + // V_5_0_0 + if (remoteVersion.before(Version.fromId(5000099))) { /* Versions of Elasticsearch before 5.0 couldn't parse nanos or micros * so we toss out that resolution, rounding up because more scroll * timeout seems safer than less. */ @@ -117,7 +118,8 @@ static Request initialSearch(SearchRequest searchRequest, BytesReference query, for (int i = 1; i < searchRequest.source().storedFields().fieldNames().size(); i++) { fields.append(',').append(searchRequest.source().storedFields().fieldNames().get(i)); } - String storedFieldsParamName = remoteVersion.before(Version.V_5_0_0_alpha4) ? "fields" : "stored_fields"; + // V_5_0_0 + String storedFieldsParamName = remoteVersion.before(Version.fromId(5000099)) ? "fields" : "stored_fields"; request.addParameter(storedFieldsParamName, fields.toString()); } @@ -186,7 +188,8 @@ private static String sortToUri(SortBuilder sort) { static Request scroll(String scroll, TimeValue keepAlive, Version remoteVersion) { Request request = new Request("POST", "/_search/scroll"); - if (remoteVersion.before(Version.V_5_0_0)) { + // V_5_0_0 + if (remoteVersion.before(Version.fromId(5000099))) { /* Versions of Elasticsearch before 5.0 couldn't parse nanos or micros * so we toss out that resolution, rounding up so we shouldn't end up * with 0s. */ diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java index 97809c9bc8dc3..0efedf449b562 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/RoundTripTests.java @@ -155,13 +155,8 @@ private void assertRequestEquals(Version version, ReindexRequest request, Reinde assertEquals(request.getRemoteInfo().getUsername(), tripped.getRemoteInfo().getUsername()); assertEquals(request.getRemoteInfo().getPassword(), tripped.getRemoteInfo().getPassword()); assertEquals(request.getRemoteInfo().getHeaders(), tripped.getRemoteInfo().getHeaders()); - if (version.onOrAfter(Version.V_5_2_0)) { - assertEquals(request.getRemoteInfo().getSocketTimeout(), tripped.getRemoteInfo().getSocketTimeout()); - assertEquals(request.getRemoteInfo().getConnectTimeout(), tripped.getRemoteInfo().getConnectTimeout()); - } else { - assertEquals(RemoteInfo.DEFAULT_SOCKET_TIMEOUT, tripped.getRemoteInfo().getSocketTimeout()); - assertEquals(RemoteInfo.DEFAULT_CONNECT_TIMEOUT, tripped.getRemoteInfo().getConnectTimeout()); - } + assertEquals(request.getRemoteInfo().getSocketTimeout(), tripped.getRemoteInfo().getSocketTimeout()); + assertEquals(request.getRemoteInfo().getConnectTimeout(), tripped.getRemoteInfo().getConnectTimeout()); } } diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java index b51525f20e3c2..2f801811327b8 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteRequestBuildersTests.java @@ -136,13 +136,15 @@ public void testInitialSearchParamsFields() { // Test stored_fields for versions that support it searchRequest = new SearchRequest().source(new SearchSourceBuilder()); searchRequest.source().storedField("_source").storedField("_id"); - remoteVersion = Version.fromId(between(Version.V_5_0_0_alpha4_ID, Version.CURRENT.id)); + // V_5_0_0_alpha4 => current + remoteVersion = Version.fromId(between(5000004, Version.CURRENT.id)); assertThat(initialSearch(searchRequest, query, remoteVersion).getParameters(), hasEntry("stored_fields", "_source,_id")); // Test fields for versions that support it searchRequest = new SearchRequest().source(new SearchSourceBuilder()); searchRequest.source().storedField("_source").storedField("_id"); - remoteVersion = Version.fromId(between(2000099, Version.V_5_0_0_alpha4_ID - 1)); + // V_2_0_0 => V_5_0_0_alpha3 + remoteVersion = Version.fromId(between(2000099, 5000003)); assertThat(initialSearch(searchRequest, query, remoteVersion).getParameters(), hasEntry("fields", "_source,_id")); // Test extra fields for versions that need it @@ -190,7 +192,8 @@ public void testInitialSearchParamsMisc() { } private void assertScroll(Version remoteVersion, Map params, TimeValue requested) { - if (remoteVersion.before(Version.V_5_0_0)) { + // V_5_0_0 + if (remoteVersion.before(Version.fromId(5000099))) { // Versions of Elasticsearch prior to 5.0 can't parse nanos or micros in TimeValue. assertThat(params.get("scroll"), not(either(endsWith("nanos")).or(endsWith("micros")))); if (requested.getStringRep().endsWith("nanos") || requested.getStringRep().endsWith("micros")) { @@ -242,7 +245,7 @@ public void testScrollParams() { public void testScrollEntity() throws IOException { String scroll = randomAlphaOfLength(30); - HttpEntity entity = scroll(scroll, timeValueMillis(between(1, 1000)), Version.V_5_0_0).getEntity(); + HttpEntity entity = scroll(scroll, timeValueMillis(between(1, 1000)), Version.fromString("5.0.0")).getEntity(); assertEquals(ContentType.APPLICATION_JSON.toString(), entity.getContentType().getValue()); assertThat(Streams.copyToString(new InputStreamReader(entity.getContent(), StandardCharsets.UTF_8)), containsString("\"" + scroll + "\"")); @@ -255,7 +258,7 @@ public void testScrollEntity() throws IOException { public void testClearScroll() throws IOException { String scroll = randomAlphaOfLength(30); - Request request = clearScroll(scroll, Version.V_5_0_0); + Request request = clearScroll(scroll, Version.fromString("5.0.0")); assertEquals(ContentType.APPLICATION_JSON.toString(), request.getEntity().getContentType().getValue()); assertThat(Streams.copyToString(new InputStreamReader(request.getEntity().getContent(), StandardCharsets.UTF_8)), containsString("\"" + scroll + "\"")); diff --git a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java index 92f370f8f6364..d3d3cefea45e1 100644 --- a/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java +++ b/modules/reindex/src/test/java/org/elasticsearch/index/reindex/remote/RemoteScrollableHitSourceTests.java @@ -150,13 +150,15 @@ public void testLookupRemoteVersion() throws Exception { assertTrue(called.get()); called.set(false); sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/5_0_0_alpha_3.json").lookupRemoteVersion(v -> { - assertEquals(Version.V_5_0_0_alpha3, v); + // V_5_0_0_alpha3 + assertEquals(Version.fromId(5000003), v); called.set(true); }); assertTrue(called.get()); called.set(false); sourceWithMockedRemoteCall(false, ContentType.APPLICATION_JSON, "main/with_unknown_fields.json").lookupRemoteVersion(v -> { - assertEquals(Version.V_5_0_0_alpha3, v); + // V_5_0_0_alpha3 + assertEquals(Version.fromId(5000003), v); called.set(true); }); assertTrue(called.get()); diff --git a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java index c4c44222f470e..0235e6e81368f 100644 --- a/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java +++ b/plugins/analysis-icu/src/main/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapper.java @@ -25,7 +25,6 @@ import com.ibm.icu.util.ULocale; import org.apache.lucene.document.Field; -import org.apache.lucene.document.SortedDocValuesField; import org.apache.lucene.document.SortedSetDocValuesField; import org.apache.lucene.index.IndexOptions; import org.apache.lucene.index.IndexableField; @@ -35,7 +34,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.lucene.Lucene; import org.elasticsearch.common.settings.Settings; @@ -56,7 +54,6 @@ import java.util.List; import java.util.Map; import java.util.Objects; -import java.util.function.BiFunction; import java.util.function.LongSupplier; public class ICUCollationKeywordFieldMapper extends FieldMapper { @@ -571,7 +568,6 @@ public static class TypeParser implements Mapper.TypeParser { private final String variableTop; private final boolean hiraganaQuaternaryMode; private final Collator collator; - private final BiFunction getDVField; protected ICUCollationKeywordFieldMapper(String simpleName, MappedFieldType fieldType, MappedFieldType defaultFieldType, Settings indexSettings, MultiFields multiFields, CopyTo copyTo, String rules, String language, @@ -593,11 +589,6 @@ protected ICUCollationKeywordFieldMapper(String simpleName, MappedFieldType fiel this.variableTop = variableTop; this.hiraganaQuaternaryMode = hiraganaQuaternaryMode; this.collator = collator; - if (indexCreatedVersion.onOrAfter(Version.V_5_6_0)) { - getDVField = SortedSetDocValuesField::new; - } else { - getDVField = SortedDocValuesField::new; - } } @Override @@ -754,7 +745,7 @@ protected void parseCreateField(ParseContext context, List field } if (fieldType().hasDocValues()) { - fields.add(getDVField.apply(fieldType().name(), binaryValue)); + fields.add(new SortedSetDocValuesField(fieldType().name(), binaryValue)); } else if (fieldType().indexOptions() != IndexOptions.NONE || fieldType().stored()) { createFieldNamesField(context, fields); } diff --git a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java index fff255970113d..f39ae886dc45b 100644 --- a/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java +++ b/plugins/analysis-icu/src/test/java/org/elasticsearch/index/mapper/ICUCollationKeywordFieldMapperTests.java @@ -28,11 +28,9 @@ import org.apache.lucene.index.IndexableField; import org.apache.lucene.index.IndexableFieldType; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.compress.CompressedXContent; -import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.IndexService; @@ -106,50 +104,6 @@ public void testDefaults() throws Exception { assertEquals(DocValuesType.SORTED_SET, fieldType.docValuesType()); } - public void testBackCompat() throws Exception { - indexService = createIndex("oldindex", Settings.builder().put("index.version.created", Version.V_5_5_0).build()); - parser = indexService.mapperService().documentMapperParser(); - - String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") - .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() - .endObject().endObject()); - - DocumentMapper mapper = parser.parse("type", new CompressedXContent(mapping)); - - assertEquals(mapping, mapper.mappingSource().toString()); - - ParsedDocument doc = mapper.parse(SourceToParse.source("oldindex", "type", "1", BytesReference - .bytes(XContentFactory.jsonBuilder() - .startObject() - .field("field", "1234") - .endObject()), - XContentType.JSON)); - - IndexableField[] fields = doc.rootDoc().getFields("field"); - assertEquals(2, fields.length); - - Collator collator = Collator.getInstance(ULocale.ROOT); - RawCollationKey key = collator.getRawCollationKey("1234", null); - BytesRef expected = new BytesRef(key.bytes, 0, key.size); - - assertEquals(expected, fields[0].binaryValue()); - IndexableFieldType fieldType = fields[0].fieldType(); - assertThat(fieldType.omitNorms(), equalTo(true)); - assertFalse(fieldType.tokenized()); - assertFalse(fieldType.stored()); - assertThat(fieldType.indexOptions(), equalTo(IndexOptions.DOCS)); - assertThat(fieldType.storeTermVectors(), equalTo(false)); - assertThat(fieldType.storeTermVectorOffsets(), equalTo(false)); - assertThat(fieldType.storeTermVectorPositions(), equalTo(false)); - assertThat(fieldType.storeTermVectorPayloads(), equalTo(false)); - assertEquals(DocValuesType.NONE, fieldType.docValuesType()); - - assertEquals(expected, fields[1].binaryValue()); - fieldType = fields[1].fieldType(); - assertThat(fieldType.indexOptions(), equalTo(IndexOptions.NONE)); - assertEquals(DocValuesType.SORTED, fieldType.docValuesType()); - } - public void testNullValue() throws IOException { String mapping = Strings.toString(XContentFactory.jsonBuilder().startObject().startObject("type") .startObject("properties").startObject("field").field("type", FIELD_TYPE).endObject().endObject() diff --git a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java index a6dc27b1f8a1c..50af824fae9bd 100644 --- a/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java +++ b/plugins/mapper-murmur3/src/main/java/org/elasticsearch/index/mapper/murmur3/Murmur3FieldMapper.java @@ -26,7 +26,6 @@ import org.apache.lucene.search.DocValuesFieldExistsQuery; import org.apache.lucene.search.Query; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.hash.MurmurHash3; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.fielddata.IndexFieldData; @@ -93,10 +92,6 @@ public static class TypeParser implements Mapper.TypeParser { throw new MapperParsingException("Setting [index] cannot be modified for field [" + name + "]"); } - if (parserContext.indexVersionCreated().before(Version.V_5_0_0_alpha2)) { - node.remove("precision_step"); - } - TypeParsers.parseField(builder, name, node, parserContext); return builder; diff --git a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java index 04ab7ecd245f6..ac5afeb3a1094 100644 --- a/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java +++ b/plugins/mapper-size/src/main/java/org/elasticsearch/index/mapper/size/SizeFieldMapper.java @@ -82,10 +82,6 @@ public Builder enabled(EnabledAttributeMapper enabled) { @Override public SizeFieldMapper build(BuilderContext context) { setupFieldType(context); - if (context.indexCreatedVersion().onOrBefore(Version.V_5_0_0_alpha4)) { - // Make sure that the doc_values are disabled on indices created before V_5_0_0_alpha4 - fieldType.setHasDocValues(false); - } return new SizeFieldMapper(enabledState, fieldType, context.indexSettings()); } } diff --git a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java index 0b936e44e5beb..d7111f64a1baf 100644 --- a/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java +++ b/qa/full-cluster-restart/src/test/java/org/elasticsearch/upgrades/FullClusterRestartIT.java @@ -908,9 +908,6 @@ public void testHistoryUUIDIsAdded() throws Exception { private void checkSnapshot(String snapshotName, int count, Version tookOnVersion) throws IOException { // Check the snapshot metadata, especially the version Request listSnapshotRequest = new Request("GET", "/_snapshot/repo/" + snapshotName); - if (false == (runningAgainstOldCluster && oldClusterVersion.before(Version.V_5_5_0))) { - listSnapshotRequest.addParameter("verbose", "true"); - } Map listSnapshotResponse = entityAsMap(client().performRequest(listSnapshotRequest)); assertEquals(singletonList(snapshotName), XContentMapValues.extractValue("snapshots.snapshot", listSnapshotResponse)); assertEquals(singletonList("SUCCESS"), XContentMapValues.extractValue("snapshots.state", listSnapshotResponse)); diff --git a/server/src/main/java/org/elasticsearch/ElasticsearchException.java b/server/src/main/java/org/elasticsearch/ElasticsearchException.java index 9a02b76b3e038..c009bb3818cc8 100644 --- a/server/src/main/java/org/elasticsearch/ElasticsearchException.java +++ b/server/src/main/java/org/elasticsearch/ElasticsearchException.java @@ -44,7 +44,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.HashMap; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Set; @@ -137,17 +136,7 @@ public ElasticsearchException(StreamInput in) throws IOException { super(in.readOptionalString(), in.readException()); readStackTrace(this, in); headers.putAll(in.readMapOfLists(StreamInput::readString, StreamInput::readString)); - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - metadata.putAll(in.readMapOfLists(StreamInput::readString, StreamInput::readString)); - } else { - for (Iterator>> iterator = headers.entrySet().iterator(); iterator.hasNext(); ) { - Map.Entry> header = iterator.next(); - if (header.getKey().startsWith("es.")) { - metadata.put(header.getKey(), header.getValue()); - iterator.remove(); - } - } - } + metadata.putAll(in.readMapOfLists(StreamInput::readString, StreamInput::readString)); } /** @@ -287,15 +276,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(this.getMessage()); out.writeException(this.getCause()); writeStackTraces(this, out); - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeMapOfLists(headers, StreamOutput::writeString, StreamOutput::writeString); - out.writeMapOfLists(metadata, StreamOutput::writeString, StreamOutput::writeString); - } else { - Map> finalHeaders = new HashMap<>(headers.size() + metadata.size()); - finalHeaders.putAll(headers); - finalHeaders.putAll(metadata); - out.writeMapOfLists(finalHeaders, StreamOutput::writeString, StreamOutput::writeString); - } + out.writeMapOfLists(headers, StreamOutput::writeString, StreamOutput::writeString); + out.writeMapOfLists(metadata, StreamOutput::writeString, StreamOutput::writeString); } public static ElasticsearchException readException(StreamInput input, int id) throws IOException { @@ -1018,11 +1000,11 @@ private enum ElasticsearchExceptionHandle { STATUS_EXCEPTION(org.elasticsearch.ElasticsearchStatusException.class, org.elasticsearch.ElasticsearchStatusException::new, 145, UNKNOWN_VERSION_ADDED), TASK_CANCELLED_EXCEPTION(org.elasticsearch.tasks.TaskCancelledException.class, - org.elasticsearch.tasks.TaskCancelledException::new, 146, Version.V_5_1_1), + org.elasticsearch.tasks.TaskCancelledException::new, 146, UNKNOWN_VERSION_ADDED), SHARD_LOCK_OBTAIN_FAILED_EXCEPTION(org.elasticsearch.env.ShardLockObtainFailedException.class, - org.elasticsearch.env.ShardLockObtainFailedException::new, 147, Version.V_5_0_2), + org.elasticsearch.env.ShardLockObtainFailedException::new, 147, UNKNOWN_VERSION_ADDED), UNKNOWN_NAMED_OBJECT_EXCEPTION(org.elasticsearch.common.xcontent.UnknownNamedObjectException.class, - org.elasticsearch.common.xcontent.UnknownNamedObjectException::new, 148, Version.V_5_2_0), + org.elasticsearch.common.xcontent.UnknownNamedObjectException::new, 148, UNKNOWN_VERSION_ADDED), TOO_MANY_BUCKETS_EXCEPTION(MultiBucketConsumerService.TooManyBucketsException.class, MultiBucketConsumerService.TooManyBucketsException::new, 149, Version.V_7_0_0_alpha1); diff --git a/server/src/main/java/org/elasticsearch/Version.java b/server/src/main/java/org/elasticsearch/Version.java index 1afe88f8d43ef..7303e8d34c907 100644 --- a/server/src/main/java/org/elasticsearch/Version.java +++ b/server/src/main/java/org/elasticsearch/Version.java @@ -43,87 +43,6 @@ public class Version implements Comparable, ToXContentFragment { * values below 25 are for alpha builder (since 5.0), and above 25 and below 50 are beta builds, and below 99 are RC builds, with 99 * indicating a release the (internal) format of the id is there so we can easily do after/before checks on the id */ - public static final int V_5_0_0_alpha1_ID = 5000001; - public static final Version V_5_0_0_alpha1 = new Version(V_5_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); - public static final int V_5_0_0_alpha2_ID = 5000002; - public static final Version V_5_0_0_alpha2 = new Version(V_5_0_0_alpha2_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); - public static final int V_5_0_0_alpha3_ID = 5000003; - public static final Version V_5_0_0_alpha3 = new Version(V_5_0_0_alpha3_ID, org.apache.lucene.util.Version.LUCENE_6_0_0); - public static final int V_5_0_0_alpha4_ID = 5000004; - public static final Version V_5_0_0_alpha4 = new Version(V_5_0_0_alpha4_ID, org.apache.lucene.util.Version.LUCENE_6_1_0); - public static final int V_5_0_0_alpha5_ID = 5000005; - public static final Version V_5_0_0_alpha5 = new Version(V_5_0_0_alpha5_ID, org.apache.lucene.util.Version.LUCENE_6_1_0); - public static final int V_5_0_0_beta1_ID = 5000026; - public static final Version V_5_0_0_beta1 = new Version(V_5_0_0_beta1_ID, org.apache.lucene.util.Version.LUCENE_6_2_0); - public static final int V_5_0_0_rc1_ID = 5000051; - public static final Version V_5_0_0_rc1 = new Version(V_5_0_0_rc1_ID, org.apache.lucene.util.Version.LUCENE_6_2_0); - public static final int V_5_0_0_ID = 5000099; - public static final Version V_5_0_0 = new Version(V_5_0_0_ID, org.apache.lucene.util.Version.LUCENE_6_2_0); - public static final int V_5_0_1_ID = 5000199; - public static final Version V_5_0_1 = new Version(V_5_0_1_ID, org.apache.lucene.util.Version.LUCENE_6_2_1); - public static final int V_5_0_2_ID = 5000299; - public static final Version V_5_0_2 = new Version(V_5_0_2_ID, org.apache.lucene.util.Version.LUCENE_6_2_1); - // no version constant for 5.1.0 due to inadvertent release - public static final int V_5_1_1_ID = 5010199; - public static final Version V_5_1_1 = new Version(V_5_1_1_ID, org.apache.lucene.util.Version.LUCENE_6_3_0); - public static final int V_5_1_2_ID = 5010299; - public static final Version V_5_1_2 = new Version(V_5_1_2_ID, org.apache.lucene.util.Version.LUCENE_6_3_0); - public static final int V_5_2_0_ID = 5020099; - public static final Version V_5_2_0 = new Version(V_5_2_0_ID, org.apache.lucene.util.Version.LUCENE_6_4_0); - public static final int V_5_2_1_ID = 5020199; - public static final Version V_5_2_1 = new Version(V_5_2_1_ID, org.apache.lucene.util.Version.LUCENE_6_4_1); - public static final int V_5_2_2_ID = 5020299; - public static final Version V_5_2_2 = new Version(V_5_2_2_ID, org.apache.lucene.util.Version.LUCENE_6_4_1); - public static final int V_5_3_0_ID = 5030099; - public static final Version V_5_3_0 = new Version(V_5_3_0_ID, org.apache.lucene.util.Version.LUCENE_6_4_1); - public static final int V_5_3_1_ID = 5030199; - public static final Version V_5_3_1 = new Version(V_5_3_1_ID, org.apache.lucene.util.Version.LUCENE_6_4_2); - public static final int V_5_3_2_ID = 5030299; - public static final Version V_5_3_2 = new Version(V_5_3_2_ID, org.apache.lucene.util.Version.LUCENE_6_4_2); - public static final int V_5_3_3_ID = 5030399; - public static final Version V_5_3_3 = new Version(V_5_3_3_ID, org.apache.lucene.util.Version.LUCENE_6_4_2); - public static final int V_5_4_0_ID = 5040099; - public static final Version V_5_4_0 = new Version(V_5_4_0_ID, org.apache.lucene.util.Version.LUCENE_6_5_0); - public static final int V_5_4_1_ID = 5040199; - public static final Version V_5_4_1 = new Version(V_5_4_1_ID, org.apache.lucene.util.Version.LUCENE_6_5_1); - public static final int V_5_4_2_ID = 5040299; - public static final Version V_5_4_2 = new Version(V_5_4_2_ID, org.apache.lucene.util.Version.LUCENE_6_5_1); - public static final int V_5_4_3_ID = 5040399; - public static final Version V_5_4_3 = new Version(V_5_4_3_ID, org.apache.lucene.util.Version.LUCENE_6_5_1); - public static final int V_5_5_0_ID = 5050099; - public static final Version V_5_5_0 = new Version(V_5_5_0_ID, org.apache.lucene.util.Version.LUCENE_6_6_0); - public static final int V_5_5_1_ID = 5050199; - public static final Version V_5_5_1 = new Version(V_5_5_1_ID, org.apache.lucene.util.Version.LUCENE_6_6_0); - public static final int V_5_5_2_ID = 5050299; - public static final Version V_5_5_2 = new Version(V_5_5_2_ID, org.apache.lucene.util.Version.LUCENE_6_6_0); - public static final int V_5_5_3_ID = 5050399; - public static final Version V_5_5_3 = new Version(V_5_5_3_ID, org.apache.lucene.util.Version.LUCENE_6_6_0); - public static final int V_5_6_0_ID = 5060099; - public static final Version V_5_6_0 = new Version(V_5_6_0_ID, org.apache.lucene.util.Version.LUCENE_6_6_0); - public static final int V_5_6_1_ID = 5060199; - public static final Version V_5_6_1 = new Version(V_5_6_1_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_2_ID = 5060299; - public static final Version V_5_6_2 = new Version(V_5_6_2_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_3_ID = 5060399; - public static final Version V_5_6_3 = new Version(V_5_6_3_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_4_ID = 5060499; - public static final Version V_5_6_4 = new Version(V_5_6_4_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_5_ID = 5060599; - public static final Version V_5_6_5 = new Version(V_5_6_5_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_6_ID = 5060699; - public static final Version V_5_6_6 = new Version(V_5_6_6_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_7_ID = 5060799; - public static final Version V_5_6_7 = new Version(V_5_6_7_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_8_ID = 5060899; - public static final Version V_5_6_8 = new Version(V_5_6_8_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_9_ID = 5060999; - public static final Version V_5_6_9 = new Version(V_5_6_9_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_10_ID = 5061099; - public static final Version V_5_6_10 = new Version(V_5_6_10_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_11_ID = 5061199; - public static final Version V_5_6_11 = new Version(V_5_6_11_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); - public static final int V_5_6_12_ID = 5061299; - public static final Version V_5_6_12 = new Version(V_5_6_12_ID, org.apache.lucene.util.Version.LUCENE_6_6_1); public static final int V_6_0_0_alpha1_ID = 6000001; public static final Version V_6_0_0_alpha1 = new Version(V_6_0_0_alpha1_ID, org.apache.lucene.util.Version.LUCENE_7_0_0); @@ -248,86 +167,6 @@ public static Version fromId(int id) { return V_6_0_0_alpha2; case V_6_0_0_alpha1_ID: return V_6_0_0_alpha1; - case V_5_6_12_ID: - return V_5_6_12; - case V_5_6_11_ID: - return V_5_6_11; - case V_5_6_10_ID: - return V_5_6_10; - case V_5_6_9_ID: - return V_5_6_9; - case V_5_6_8_ID: - return V_5_6_8; - case V_5_6_7_ID: - return V_5_6_7; - case V_5_6_6_ID: - return V_5_6_6; - case V_5_6_5_ID: - return V_5_6_5; - case V_5_6_4_ID: - return V_5_6_4; - case V_5_6_3_ID: - return V_5_6_3; - case V_5_6_2_ID: - return V_5_6_2; - case V_5_6_1_ID: - return V_5_6_1; - case V_5_6_0_ID: - return V_5_6_0; - case V_5_5_3_ID: - return V_5_5_3; - case V_5_5_2_ID: - return V_5_5_2; - case V_5_5_1_ID: - return V_5_5_1; - case V_5_5_0_ID: - return V_5_5_0; - case V_5_4_3_ID: - return V_5_4_3; - case V_5_4_2_ID: - return V_5_4_2; - case V_5_4_1_ID: - return V_5_4_1; - case V_5_4_0_ID: - return V_5_4_0; - case V_5_3_3_ID: - return V_5_3_3; - case V_5_3_2_ID: - return V_5_3_2; - case V_5_3_1_ID: - return V_5_3_1; - case V_5_3_0_ID: - return V_5_3_0; - case V_5_2_2_ID: - return V_5_2_2; - case V_5_2_1_ID: - return V_5_2_1; - case V_5_2_0_ID: - return V_5_2_0; - case V_5_1_2_ID: - return V_5_1_2; - case V_5_1_1_ID: - return V_5_1_1; - case V_5_0_2_ID: - return V_5_0_2; - case V_5_0_1_ID: - return V_5_0_1; - case V_5_0_0_ID: - return V_5_0_0; - case V_5_0_0_rc1_ID: - return V_5_0_0_rc1; - case V_5_0_0_beta1_ID: - return V_5_0_0_beta1; - case V_5_0_0_alpha5_ID: - return V_5_0_0_alpha5; - case V_5_0_0_alpha4_ID: - return V_5_0_0_alpha4; - case V_5_0_0_alpha3_ID: - return V_5_0_0_alpha3; - case V_5_0_0_alpha2_ID: - return V_5_0_0_alpha2; - case V_5_0_0_alpha1_ID: - return V_5_0_0_alpha1; default: return new Version(id, org.apache.lucene.util.Version.LATEST); } @@ -477,8 +316,11 @@ private static class DeclaredVersionsHolder { * is a beta or RC release then the version itself is returned. */ public Version minimumCompatibilityVersion() { - if (major >= 6) { - // all major versions from 6 onwards are compatible with last minor series of the previous major + if (major == 6) { + // force the minimum compatibility for version 6 to 5.6 since we don't reference version 5 anymore + return Version.fromId(5060099); + } else if (major >= 7) { + // all major versions from 7 onwards are compatible with last minor series of the previous major Version bwcVersion = null; for (int i = DeclaredVersionsHolder.DECLARED_VERSIONS.size() - 1; i >= 0; i--) { diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java index 40960c3362086..b6959afba5d89 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/allocation/ClusterAllocationExplainRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.allocation; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.MasterNodeRequest; import org.elasticsearch.common.Nullable; @@ -69,7 +68,6 @@ public ClusterAllocationExplainRequest() { public ClusterAllocationExplainRequest(StreamInput in) throws IOException { super(in); - checkVersion(in.getVersion()); this.index = in.readOptionalString(); this.shard = in.readOptionalVInt(); this.primary = in.readOptionalBoolean(); @@ -94,7 +92,6 @@ public ClusterAllocationExplainRequest(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - checkVersion(out.getVersion()); super.writeTo(out); out.writeOptionalString(index); out.writeOptionalVInt(shard); @@ -251,11 +248,4 @@ public static ClusterAllocationExplainRequest parse(XContentParser parser) throw public void readFrom(StreamInput in) throws IOException { throw new UnsupportedOperationException("usage of Streamable is to be replaced by Writeable"); } - - private void checkVersion(Version version) { - if (version.before(Version.V_5_2_0)) { - throw new IllegalArgumentException("cannot explain shards in a mixed-cluster with pre-" + Version.V_5_2_0 + - " nodes, node version [" + version + "]"); - } - } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java index 3ae5c2d683a27..4798aeb67c199 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.shards; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; import org.elasticsearch.action.support.IndicesOptions; @@ -59,10 +58,6 @@ public ClusterSearchShardsRequest(StreamInput in) throws IOException { routing = in.readOptionalString(); preference = in.readOptionalString(); - if (in.getVersion().onOrBefore(Version.V_5_1_1)) { - //types - in.readStringArray(); - } indicesOptions = IndicesOptions.readIndicesOptions(in); } @@ -78,10 +73,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(routing); out.writeOptionalString(preference); - if (out.getVersion().onOrBefore(Version.V_5_1_1)) { - //types - out.writeStringArray(Strings.EMPTY_ARRAY); - } indicesOptions.writeIndicesOptions(out); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java index 28c7903efde81..f8d448d0fe11c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponse.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.cluster.shards; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.common.io.stream.StreamInput; @@ -77,14 +76,12 @@ public void readFrom(StreamInput in) throws IOException { for (int i = 0; i < nodes.length; i++) { nodes[i] = new DiscoveryNode(in); } - if (in.getVersion().onOrAfter(Version.V_5_1_1)) { - int size = in.readVInt(); - indicesAndFilters = new HashMap<>(); - for (int i = 0; i < size; i++) { - String index = in.readString(); - AliasFilter aliasFilter = new AliasFilter(in); - indicesAndFilters.put(index, aliasFilter); - } + int size = in.readVInt(); + indicesAndFilters = new HashMap<>(); + for (int i = 0; i < size; i++) { + String index = in.readString(); + AliasFilter aliasFilter = new AliasFilter(in); + indicesAndFilters.put(index, aliasFilter); } } @@ -99,12 +96,10 @@ public void writeTo(StreamOutput out) throws IOException { for (DiscoveryNode node : nodes) { node.writeTo(out); } - if (out.getVersion().onOrAfter(Version.V_5_1_1)) { - out.writeVInt(indicesAndFilters.size()); - for (Map.Entry entry : indicesAndFilters.entrySet()) { - out.writeString(entry.getKey()); - entry.getValue().writeTo(out); - } + out.writeVInt(indicesAndFilters.size()); + for (Map.Entry entry : indicesAndFilters.entrySet()) { + out.writeString(entry.getKey()); + entry.getValue().writeTo(out); } } diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java index b3b24b570eeda..41ae57031d320 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/snapshots/get/GetSnapshotsRequest.java @@ -28,7 +28,6 @@ import java.io.IOException; import static org.elasticsearch.action.ValidateActions.addValidationError; -import static org.elasticsearch.snapshots.SnapshotInfo.VERBOSE_INTRODUCED; /** * Get snapshot request @@ -75,9 +74,7 @@ public GetSnapshotsRequest(StreamInput in) throws IOException { repository = in.readString(); snapshots = in.readStringArray(); ignoreUnavailable = in.readBoolean(); - if (in.getVersion().onOrAfter(VERBOSE_INTRODUCED)) { - verbose = in.readBoolean(); - } + verbose = in.readBoolean(); } @Override @@ -86,9 +83,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(repository); out.writeStringArray(snapshots); out.writeBoolean(ignoreUnavailable); - if (out.getVersion().onOrAfter(VERBOSE_INTRODUCED)) { - out.writeBoolean(verbose); - } + out.writeBoolean(verbose); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java index 6f702cbbe7c0a..d02d6272c9514 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/cluster/storedscripts/PutStoredScriptRequest.java @@ -121,11 +121,7 @@ public void readFrom(StreamInput in) throws IOException { } id = in.readOptionalString(); content = in.readBytesReference(); - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - xContentType = in.readEnum(XContentType.class); - } else { - xContentType = XContentHelper.xContentType(content); - } + xContentType = in.readEnum(XContentType.class); if (in.getVersion().onOrAfter(Version.V_6_0_0_alpha2)) { context = in.readOptionalString(); source = new StoredScriptSource(in); @@ -143,9 +139,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeOptionalString(id); out.writeBytesReference(content); - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeEnum(xContentType); - } + out.writeEnum(xContentType); if (out.getVersion().onOrAfter(Version.V_6_0_0_alpha2)) { out.writeOptionalString(context); source.writeTo(out); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java index d45ab2682a5ec..e571db951cbc1 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/analyze/AnalyzeResponse.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.action.admin.indices.analyze; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -191,15 +190,10 @@ public void readFrom(StreamInput in) throws IOException { startOffset = in.readInt(); endOffset = in.readInt(); position = in.readVInt(); - if (in.getVersion().onOrAfter(Version.V_5_2_0)) { - Integer len = in.readOptionalVInt(); - if (len != null) { - positionLength = len; - } else { - positionLength = 1; - } - } - else { + Integer len = in.readOptionalVInt(); + if (len != null) { + positionLength = len; + } else { positionLength = 1; } type = in.readOptionalString(); @@ -212,9 +206,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeInt(startOffset); out.writeInt(endOffset); out.writeVInt(position); - if (out.getVersion().onOrAfter(Version.V_5_2_0)) { - out.writeOptionalVInt(positionLength > 1 ? positionLength : null); - } + out.writeOptionalVInt(positionLength > 1 ? positionLength : null); out.writeOptionalString(type); out.writeMapWithConsistentOrder(attributes); } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java index c858d0bb10651..79192693620dd 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponse.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.create; -import org.elasticsearch.Version; import org.elasticsearch.action.support.master.ShardsAcknowledgedResponse; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; @@ -66,18 +65,14 @@ protected CreateIndexResponse(boolean acknowledged, boolean shardsAcknowledged, public void readFrom(StreamInput in) throws IOException { super.readFrom(in); readShardsAcknowledged(in); - if (in.getVersion().onOrAfter(Version.V_5_6_0)) { - index = in.readString(); - } + index = in.readString(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); writeShardsAcknowledged(out); - if (out.getVersion().onOrAfter(Version.V_5_6_0)) { - out.writeString(index); - } + out.writeString(index); } public String index() { diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java index 1556ee2341d27..a827444acb8c5 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequest.java @@ -297,10 +297,6 @@ public void readFrom(StreamInput in) throws IOException { indicesOptions = IndicesOptions.readIndicesOptions(in); type = in.readOptionalString(); source = in.readString(); - if (in.getVersion().before(Version.V_5_3_0)) { - // we do not know the format from earlier versions so convert if necessary - source = XContentHelper.convertToJson(new BytesArray(source), false, false, XContentFactory.xContentType(source)); - } if (in.getVersion().before(Version.V_7_0_0_alpha1)) { in.readBoolean(); // updateAllTypes } diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java index d194b9acd1b7f..f9431a3ad02b0 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequest.java @@ -492,11 +492,6 @@ public void readFrom(StreamInput in) throws IOException { for (int i = 0; i < size; i++) { final String type = in.readString(); String mappingSource = in.readString(); - if (in.getVersion().before(Version.V_5_3_0)) { - // we do not know the incoming type so convert it if needed - mappingSource = - XContentHelper.convertToJson(new BytesArray(mappingSource), false, false, XContentFactory.xContentType(mappingSource)); - } mappings.put(type, mappingSource); } int customSize = in.readVInt(); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java index d0a62fe771d1f..b60bc407ce70c 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/QueryExplanation.java @@ -120,11 +120,7 @@ public void readFrom(StreamInput in) throws IOException { } else { index = in.readString(); } - if (in.getVersion().onOrAfter(Version.V_5_4_0)) { - shard = in.readInt(); - } else { - shard = RANDOM_SHARD; - } + shard = in.readInt(); valid = in.readBoolean(); explanation = in.readOptionalString(); error = in.readOptionalString(); @@ -137,9 +133,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeString(index); } - if (out.getVersion().onOrAfter(Version.V_5_4_0)) { - out.writeInt(shard); - } + out.writeInt(shard); out.writeBoolean(valid); out.writeOptionalString(explanation); out.writeOptionalString(error); diff --git a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java index 7694e7583c898..a30c9ba846107 100644 --- a/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java +++ b/server/src/main/java/org/elasticsearch/action/admin/indices/validate/query/ValidateQueryRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.admin.indices.validate.query; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.ValidateActions; import org.elasticsearch.action.support.IndicesOptions; @@ -156,9 +155,7 @@ public void readFrom(StreamInput in) throws IOException { } explain = in.readBoolean(); rewrite = in.readBoolean(); - if (in.getVersion().onOrAfter(Version.V_5_4_0)) { - allShards = in.readBoolean(); - } + allShards = in.readBoolean(); } @Override @@ -171,9 +168,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeBoolean(explain); out.writeBoolean(rewrite); - if (out.getVersion().onOrAfter(Version.V_5_4_0)) { - out.writeBoolean(allShards); - } + out.writeBoolean(allShards); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java index fb535d312cf65..9b9be3a41476e 100644 --- a/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java +++ b/server/src/main/java/org/elasticsearch/action/bulk/BulkItemResponse.java @@ -244,8 +244,8 @@ public void writeTo(StreamOutput out) throws IOException { } private static boolean supportsAbortedFlag(Version version) { - // The "aborted" flag was added for 5.5.3 and 5.6.0, but was not in 6.0.0-beta2 - return version.after(Version.V_6_0_0_beta2) || (version.major == 5 && version.onOrAfter(Version.V_5_5_3)); + // The "aborted" flag was not in 6.0.0-beta2 + return version.after(Version.V_6_0_0_beta2); } /** @@ -447,11 +447,7 @@ public static BulkItemResponse readBulkItem(StreamInput in) throws IOException { @Override public void readFrom(StreamInput in) throws IOException { id = in.readVInt(); - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - opType = OpType.fromId(in.readByte()); - } else { - opType = OpType.fromString(in.readString()); - } + opType = OpType.fromId(in.readByte()); byte type = in.readByte(); if (type == 0) { @@ -474,11 +470,7 @@ public void readFrom(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeVInt(id); - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeByte(opType.getId()); - } else { - out.writeString(opType.getLowercase()); - } + out.writeByte(opType.getId()); if (response == null) { out.writeByte((byte) 2); diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java index 636af6101ae0e..22d231d3711be 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.fieldcaps; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.IndicesRequest; @@ -81,24 +80,18 @@ void setMergeResults(boolean mergeResults) { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); fields = in.readStringArray(); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - indices = in.readStringArray(); - indicesOptions = IndicesOptions.readIndicesOptions(in); - mergeResults = in.readBoolean(); - } else { - mergeResults = true; - } + indices = in.readStringArray(); + indicesOptions = IndicesOptions.readIndicesOptions(in); + mergeResults = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeStringArray(fields); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeStringArray(indices); - indicesOptions.writeIndicesOptions(out); - out.writeBoolean(mergeResults); - } + out.writeStringArray(indices); + indicesOptions.writeIndicesOptions(out); + out.writeBoolean(mergeResults); } /** diff --git a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java index 959b4e572b714..178639bd4348f 100644 --- a/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java +++ b/server/src/main/java/org/elasticsearch/action/fieldcaps/FieldCapabilitiesResponse.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.fieldcaps; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.collect.Tuple; @@ -95,11 +94,7 @@ public void readFrom(StreamInput in) throws IOException { super.readFrom(in); this.responseMap = in.readMap(StreamInput::readString, FieldCapabilitiesResponse::readField); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - indexResponses = in.readList(FieldCapabilitiesIndexResponse::new); - } else { - indexResponses = Collections.emptyList(); - } + indexResponses = in.readList(FieldCapabilitiesIndexResponse::new); } private static Map readField(StreamInput in) throws IOException { @@ -110,10 +105,7 @@ private static Map readField(StreamInput in) throws I public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeMap(responseMap, StreamOutput::writeString, FieldCapabilitiesResponse::writeField); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeList(indexResponses); - } - + out.writeList(indexResponses); } private static void writeField(StreamOutput out, diff --git a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java index 6447b0557db0c..abff28bcf553c 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/PutPipelineRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; import org.elasticsearch.common.bytes.BytesReference; @@ -82,11 +81,7 @@ public void readFrom(StreamInput in) throws IOException { super.readFrom(in); id = in.readString(); source = in.readBytesReference(); - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - xContentType = in.readEnum(XContentType.class); - } else { - xContentType = XContentHelper.xContentType(source); - } + xContentType = in.readEnum(XContentType.class); } @Override @@ -94,9 +89,7 @@ public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(id); out.writeBytesReference(source); - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeEnum(xContentType); - } + out.writeEnum(xContentType); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java index 8405bb85b4b11..fecee5f265fe9 100644 --- a/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java +++ b/server/src/main/java/org/elasticsearch/action/ingest/SimulatePipelineRequest.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.bytes.BytesReference; @@ -76,11 +75,7 @@ public SimulatePipelineRequest(BytesReference source, XContentType xContentType) id = in.readOptionalString(); verbose = in.readBoolean(); source = in.readBytesReference(); - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - xContentType = in.readEnum(XContentType.class); - } else { - xContentType = XContentHelper.xContentType(source); - } + xContentType = in.readEnum(XContentType.class); } @Override @@ -123,9 +118,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(id); out.writeBoolean(verbose); out.writeBytesReference(source); - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeEnum(xContentType); - } + out.writeEnum(xContentType); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java index e67517c4852b8..e560e53ed7b64 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchRequest.java @@ -135,10 +135,8 @@ public SearchRequest(StreamInput in) throws IOException { indicesOptions = IndicesOptions.readIndicesOptions(in); requestCache = in.readOptionalBoolean(); batchedReduceSize = in.readVInt(); - if (in.getVersion().onOrAfter(Version.V_5_6_0)) { - maxConcurrentShardRequests = in.readVInt(); - preFilterShardSize = in.readVInt(); - } + maxConcurrentShardRequests = in.readVInt(); + preFilterShardSize = in.readVInt(); if (in.getVersion().onOrAfter(Version.V_6_3_0)) { allowPartialSearchResults = in.readOptionalBoolean(); } @@ -160,10 +158,8 @@ public void writeTo(StreamOutput out) throws IOException { indicesOptions.writeIndicesOptions(out); out.writeOptionalBoolean(requestCache); out.writeVInt(batchedReduceSize); - if (out.getVersion().onOrAfter(Version.V_5_6_0)) { - out.writeVInt(maxConcurrentShardRequests); - out.writeVInt(preFilterShardSize); - } + out.writeVInt(maxConcurrentShardRequests); + out.writeVInt(preFilterShardSize); if (out.getVersion().onOrAfter(Version.V_6_3_0)) { out.writeOptionalBoolean(allowPartialSearchResults); } diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java index 2a97798764e59..0273d5e58219a 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchResponse.java @@ -374,9 +374,7 @@ public void readFrom(StreamInput in) throws IOException { } scrollId = in.readOptionalString(); tookInMillis = in.readVLong(); - if (in.getVersion().onOrAfter(Version.V_5_6_0)) { - skippedShards = in.readVInt(); - } + skippedShards = in.readVInt(); } @Override @@ -395,9 +393,7 @@ public void writeTo(StreamOutput out) throws IOException { } out.writeOptionalString(scrollId); out.writeVLong(tookInMillis); - if(out.getVersion().onOrAfter(Version.V_5_6_0)) { - out.writeVInt(skippedShards); - } + out.writeVInt(skippedShards); } @Override diff --git a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java index 133d0291df597..a4ea2616e0a21 100644 --- a/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java +++ b/server/src/main/java/org/elasticsearch/action/search/SearchTransportService.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.search; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.ActionListenerResponseHandler; import org.elasticsearch.action.IndicesRequest; @@ -113,17 +112,8 @@ public void sendFreeContext(Transport.Connection connection, long contextId, fin public void sendCanMatch(Transport.Connection connection, final ShardSearchTransportRequest request, SearchTask task, final ActionListener listener) { - if (connection.getNode().getVersion().onOrAfter(Version.V_5_6_0)) { - transportService.sendChildRequest(connection, QUERY_CAN_MATCH_NAME, request, task, - TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, CanMatchResponse::new)); - } else { - // this might look weird but if we are in a CrossClusterSearch environment we can get a connection - // to a pre 5.latest node which is proxied by a 5.latest node under the hood since we are only compatible with 5.latest - // instead of sending the request we shortcut it here and let the caller deal with this -- see #25704 - // also failing the request instead of returning a fake answer might trigger a retry on a replica which might be on a - // compatible node - throw new IllegalArgumentException("can_match is not supported on pre 5.6 nodes"); - } + transportService.sendChildRequest(connection, QUERY_CAN_MATCH_NAME, request, task, + TransportRequestOptions.EMPTY, new ActionListenerResponseHandler<>(listener, CanMatchResponse::new)); } public void sendClearAllScrollContexts(Transport.Connection connection, final ActionListener listener) { diff --git a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java index f416627c1e088..d6bf911e572c3 100644 --- a/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java +++ b/server/src/main/java/org/elasticsearch/action/termvectors/TermVectorsRequest.java @@ -498,14 +498,10 @@ public void readFrom(StreamInput in) throws IOException { if (in.readBoolean()) { doc = in.readBytesReference(); - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - xContentType = in.readEnum(XContentType.class); - } else { - xContentType = XContentHelper.xContentType(doc); - } + xContentType = in.readEnum(XContentType.class); } routing = in.readOptionalString(); - + if (in.getVersion().before(Version.V_7_0_0_alpha1)) { in.readOptionalString(); // _parent } @@ -546,9 +542,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(doc != null); if (doc != null) { out.writeBytesReference(doc); - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeEnum(xContentType); - } + out.writeEnum(xContentType); } out.writeOptionalString(routing); if (out.getVersion().before(Version.V_7_0_0_alpha1)) { diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java index 234d1ef9f17fd..0134b798c72fd 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotDeletionsInProgress.java @@ -40,8 +40,6 @@ public class SnapshotDeletionsInProgress extends AbstractNamedDiffable implements Custom { public static final String TYPE = "snapshot_deletions"; - // the version where SnapshotDeletionsInProgress was introduced - public static final Version VERSION_INTRODUCED = Version.V_5_2_0; // the list of snapshot deletion request entries private final List entries; @@ -135,7 +133,7 @@ public static NamedDiff readDiffFrom(StreamInput in) throws IOException @Override public Version getMinimalSupportedVersion() { - return VERSION_INTRODUCED; + return Version.CURRENT.minimumCompatibilityVersion(); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java index 87563c968af17..565c5134d1b38 100644 --- a/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java +++ b/server/src/main/java/org/elasticsearch/cluster/SnapshotsInProgress.java @@ -48,12 +48,6 @@ public class SnapshotsInProgress extends AbstractNamedDiffable implements Custom { public static final String TYPE = "snapshots"; - // denotes an undefined repository state id, which will happen when receiving a cluster state with - // a snapshot in progress from a pre 5.2.x node - public static final long UNDEFINED_REPOSITORY_STATE_ID = -2L; - // the version where repository state ids were introduced - private static final Version REPOSITORY_ID_INTRODUCED_VERSION = Version.V_5_2_0; - @Override public boolean equals(Object o) { if (this == o) return true; @@ -432,10 +426,7 @@ public SnapshotsInProgress(StreamInput in) throws IOException { builder.put(shardId, new ShardSnapshotStatus(nodeId, shardState, reason)); } } - long repositoryStateId = UNDEFINED_REPOSITORY_STATE_ID; - if (in.getVersion().onOrAfter(REPOSITORY_ID_INTRODUCED_VERSION)) { - repositoryStateId = in.readLong(); - } + long repositoryStateId = in.readLong(); entries[i] = new Entry(snapshot, includeGlobalState, partial, @@ -471,9 +462,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeByte(shardEntry.value.state().value()); } } - if (out.getVersion().onOrAfter(REPOSITORY_ID_INTRODUCED_VERSION)) { - out.writeLong(entry.repositoryStateId); - } + out.writeLong(entry.repositoryStateId); } } diff --git a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java index efbd262b16dda..fc09741f4d9c2 100644 --- a/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java +++ b/server/src/main/java/org/elasticsearch/cluster/block/ClusterBlock.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.block; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Streamable; @@ -138,11 +137,7 @@ public void readFrom(StreamInput in) throws IOException { retryable = in.readBoolean(); disableStatePersistence = in.readBoolean(); status = RestStatus.readFrom(in); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - allowReleaseResources = in.readBoolean(); - } else { - allowReleaseResources = false; - } + allowReleaseResources = in.readBoolean(); } @Override @@ -156,9 +151,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(retryable); out.writeBoolean(disableStatePersistence); RestStatus.writeTo(out, status); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeBoolean(allowReleaseResources); - } + out.writeBoolean(allowReleaseResources); } @Override diff --git a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResult.java b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResult.java index 153fc2cbe3e7d..8b97f1357fa00 100644 --- a/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResult.java +++ b/server/src/main/java/org/elasticsearch/cluster/routing/allocation/NodeAllocationResult.java @@ -20,7 +20,6 @@ package org.elasticsearch.cluster.routing.allocation; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.cluster.node.DiscoveryNode; import org.elasticsearch.cluster.routing.allocation.decider.Decision; import org.elasticsearch.common.Nullable; @@ -82,11 +81,7 @@ public NodeAllocationResult(DiscoveryNode node, Decision decision, int weightRan public NodeAllocationResult(StreamInput in) throws IOException { node = new DiscoveryNode(in); shardStoreInfo = in.readOptionalWriteable(ShardStoreInfo::new); - if (in.getVersion().before(Version.V_5_2_1)) { - canAllocateDecision = Decision.readFrom(in); - } else { - canAllocateDecision = in.readOptionalWriteable(Decision::readFrom); - } + canAllocateDecision = in.readOptionalWriteable(Decision::readFrom); nodeDecision = AllocationDecision.readFrom(in); weightRanking = in.readVInt(); } @@ -95,15 +90,7 @@ public NodeAllocationResult(StreamInput in) throws IOException { public void writeTo(StreamOutput out) throws IOException { node.writeTo(out); out.writeOptionalWriteable(shardStoreInfo); - if (out.getVersion().before(Version.V_5_2_1)) { - if (canAllocateDecision == null) { - Decision.NO.writeTo(out); - } else { - canAllocateDecision.writeTo(out); - } - } else { - out.writeOptionalWriteable(canAllocateDecision); - } + out.writeOptionalWriteable(canAllocateDecision); nodeDecision.writeTo(out); out.writeVInt(weightRanking); } diff --git a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java index 8b2db374c8da9..894a886182d35 100644 --- a/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/InnerHitBuilder.java @@ -150,13 +150,7 @@ public InnerHitBuilder(String name) { */ public InnerHitBuilder(StreamInput in) throws IOException { name = in.readOptionalString(); - if (in.getVersion().before(Version.V_5_5_0)) { - in.readOptionalString(); - in.readOptionalString(); - } - if (in.getVersion().onOrAfter(Version.V_5_2_0)) { - ignoreUnmapped = in.readBoolean(); - } + ignoreUnmapped = in.readBoolean(); from = in.readVInt(); size = in.readVInt(); explain = in.readBoolean(); @@ -191,14 +185,6 @@ public InnerHitBuilder(StreamInput in) throws IOException { } } highlightBuilder = in.readOptionalWriteable(HighlightBuilder::new); - if (in.getVersion().before(Version.V_5_5_0)) { - /** - * this is needed for BWC with nodes pre 5.5 - */ - in.readNamedWriteable(QueryBuilder.class); - boolean hasChildren = in.readBoolean(); - assert hasChildren == false; - } if (in.getVersion().onOrAfter(Version.V_6_4_0)) { this.innerCollapseBuilder = in.readOptionalWriteable(CollapseBuilder::new); } @@ -206,9 +192,6 @@ public InnerHitBuilder(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().before(Version.V_5_5_0)) { - throw new IOException("Invalid output version, must >= " + Version.V_5_5_0.toString()); - } out.writeOptionalString(name); out.writeBoolean(ignoreUnmapped); out.writeVInt(from); @@ -252,84 +235,6 @@ public void writeTo(StreamOutput out) throws IOException { } } - /** - * BWC serialization for nested {@link InnerHitBuilder}. - * Should only be used to send nested inner hits to nodes pre 5.5. - */ - protected void writeToNestedBWC(StreamOutput out, QueryBuilder query, String nestedPath) throws IOException { - assert out.getVersion().before(Version.V_5_5_0) : - "invalid output version, must be < " + Version.V_5_5_0.toString(); - writeToBWC(out, query, nestedPath, null); - } - - /** - * BWC serialization for collapsing {@link InnerHitBuilder}. - * Should only be used to send collapsing inner hits to nodes pre 5.5. - */ - public void writeToCollapseBWC(StreamOutput out) throws IOException { - assert out.getVersion().before(Version.V_5_5_0) : - "invalid output version, must be < " + Version.V_5_5_0.toString(); - writeToBWC(out, new MatchAllQueryBuilder(), null, null); - } - - /** - * BWC serialization for parent/child {@link InnerHitBuilder}. - * Should only be used to send hasParent or hasChild inner hits to nodes pre 5.5. - */ - public void writeToParentChildBWC(StreamOutput out, QueryBuilder query, String parentChildPath) throws IOException { - assert(out.getVersion().before(Version.V_5_5_0)) : - "invalid output version, must be < " + Version.V_5_5_0.toString(); - writeToBWC(out, query, null, parentChildPath); - } - - private void writeToBWC(StreamOutput out, - QueryBuilder query, - String nestedPath, - String parentChildPath) throws IOException { - out.writeOptionalString(name); - if (nestedPath != null) { - out.writeOptionalString(nestedPath); - out.writeOptionalString(null); - } else { - out.writeOptionalString(null); - out.writeOptionalString(parentChildPath); - } - if (out.getVersion().onOrAfter(Version.V_5_2_0)) { - out.writeBoolean(ignoreUnmapped); - } - out.writeVInt(from); - out.writeVInt(size); - out.writeBoolean(explain); - out.writeBoolean(version); - out.writeBoolean(trackScores); - out.writeOptionalWriteable(storedFieldsContext); - out.writeGenericValue(docValueFields == null - ? null - : docValueFields.stream().map(ff -> ff.field).collect(Collectors.toList())); - boolean hasScriptFields = scriptFields != null; - out.writeBoolean(hasScriptFields); - if (hasScriptFields) { - out.writeVInt(scriptFields.size()); - Iterator iterator = scriptFields.stream() - .sorted(Comparator.comparing(ScriptField::fieldName)).iterator(); - while (iterator.hasNext()) { - iterator.next().writeTo(out); - } - } - out.writeOptionalWriteable(fetchSourceContext); - boolean hasSorts = sorts != null; - out.writeBoolean(hasSorts); - if (hasSorts) { - out.writeVInt(sorts.size()); - for (SortBuilder sort : sorts) { - out.writeNamedWriteable(sort); - } - } - out.writeOptionalWriteable(highlightBuilder); - out.writeNamedWriteable(query); - out.writeBoolean(false); - } - public String getName() { return name; } diff --git a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java index 0de474f8b9901..950c9e052adae 100644 --- a/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilder.java @@ -26,7 +26,6 @@ import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchParseException; import org.elasticsearch.ExceptionsHelper; -import org.elasticsearch.Version; import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; @@ -47,7 +46,6 @@ import org.elasticsearch.common.xcontent.ToXContentObject; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.index.VersionType; @@ -220,11 +218,7 @@ public Item(@Nullable String index, @Nullable String type, XContentBuilder doc) type = in.readOptionalString(); if (in.readBoolean()) { doc = (BytesReference) in.readGenericValue(); - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - xContentType = in.readEnum(XContentType.class); - } else { - xContentType = XContentHelper.xContentType(doc); - } + xContentType = in.readEnum(XContentType.class); } else { id = in.readString(); } @@ -242,9 +236,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(doc != null); if (doc != null) { out.writeGenericValue(doc); - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeEnum(xContentType); - } + out.writeEnum(xContentType); } else { out.writeString(id); } diff --git a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java index 991628578942c..8d7c0190eb210 100644 --- a/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/NestedQueryBuilder.java @@ -32,7 +32,6 @@ import org.apache.lucene.search.join.BitSetProducer; import org.apache.lucene.search.join.ParentChildrenBlockJoinQuery; import org.apache.lucene.search.join.ScoreMode; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.io.stream.StreamInput; @@ -103,15 +102,7 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeString(path); out.writeVInt(scoreMode.ordinal()); out.writeNamedWriteable(query); - if (out.getVersion().before(Version.V_5_5_0)) { - final boolean hasInnerHit = innerHitBuilder != null; - out.writeBoolean(hasInnerHit); - if (hasInnerHit) { - innerHitBuilder.writeToNestedBWC(out, query, path); - } - } else { - out.writeOptionalWriteable(innerHitBuilder); - } + out.writeOptionalWriteable(innerHitBuilder); out.writeBoolean(ignoreUnmapped); } diff --git a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java index 19687464edca7..0289ce6f6ae44 100644 --- a/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/QueryStringQueryBuilder.java @@ -175,9 +175,6 @@ public QueryStringQueryBuilder(StreamInput in) throws IOException { analyzer = in.readOptionalString(); quoteAnalyzer = in.readOptionalString(); quoteFieldSuffix = in.readOptionalString(); - if (in.getVersion().before(Version.V_6_0_0_beta1)) { - in.readBoolean(); // auto_generate_phrase_query - } allowLeadingWildcard = in.readOptionalBoolean(); analyzeWildcard = in.readOptionalBoolean(); enablePositionIncrements = in.readBoolean(); @@ -186,27 +183,15 @@ public QueryStringQueryBuilder(StreamInput in) throws IOException { fuzzyMaxExpansions = in.readVInt(); fuzzyRewrite = in.readOptionalString(); phraseSlop = in.readVInt(); - if (in.getVersion().before(Version.V_6_0_0_beta1)) { - in.readBoolean(); // use_dismax - tieBreaker = in.readFloat(); - type = DEFAULT_TYPE; - } else { - type = MultiMatchQueryBuilder.Type.readFromStream(in); - tieBreaker = in.readOptionalFloat(); - } + type = MultiMatchQueryBuilder.Type.readFromStream(in); + tieBreaker = in.readOptionalFloat(); + rewrite = in.readOptionalString(); minimumShouldMatch = in.readOptionalString(); lenient = in.readOptionalBoolean(); timeZone = in.readOptionalTimeZone(); escape = in.readBoolean(); maxDeterminizedStates = in.readVInt(); - if (in.getVersion().onOrAfter(Version.V_5_1_1) && in.getVersion().before(Version.V_6_0_0_beta1)) { - in.readBoolean(); // split_on_whitespace - Boolean useAllField = in.readOptionalBoolean(); - if (useAllField != null && useAllField) { - defaultField = "*"; - } - } if (in.getVersion().onOrAfter(Version.V_6_1_0)) { autoGenerateSynonymsPhraseQuery = in.readBoolean(); fuzzyTranspositions = in.readBoolean(); @@ -226,9 +211,6 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeOptionalString(this.analyzer); out.writeOptionalString(this.quoteAnalyzer); out.writeOptionalString(this.quoteFieldSuffix); - if (out.getVersion().before(Version.V_6_0_0_beta1)) { - out.writeBoolean(false); // auto_generate_phrase_query - } out.writeOptionalBoolean(this.allowLeadingWildcard); out.writeOptionalBoolean(this.analyzeWildcard); out.writeBoolean(this.enablePositionIncrements); @@ -237,24 +219,14 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeVInt(this.fuzzyMaxExpansions); out.writeOptionalString(this.fuzzyRewrite); out.writeVInt(this.phraseSlop); - if (out.getVersion().before(Version.V_6_0_0_beta1)) { - out.writeBoolean(true); // use_dismax - out.writeFloat(tieBreaker != null ? tieBreaker : 0.0f); - } else { - type.writeTo(out); - out.writeOptionalFloat(tieBreaker); - } + type.writeTo(out); + out.writeOptionalFloat(tieBreaker); out.writeOptionalString(this.rewrite); out.writeOptionalString(this.minimumShouldMatch); out.writeOptionalBoolean(this.lenient); out.writeOptionalTimeZone(timeZone); out.writeBoolean(this.escape); out.writeVInt(this.maxDeterminizedStates); - if (out.getVersion().onOrAfter(Version.V_5_1_1) && out.getVersion().before(Version.V_6_0_0_beta1)) { - out.writeBoolean(false); // split_on_whitespace - Boolean useAllFields = defaultField == null ? null : Regex.isMatchAllPattern(defaultField); - out.writeOptionalBoolean(useAllFields); - } if (out.getVersion().onOrAfter(Version.V_6_1_0)) { out.writeBoolean(autoGenerateSynonymsPhraseQuery); out.writeBoolean(fuzzyTranspositions); diff --git a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java index 6223254874d07..b297036f2f37b 100644 --- a/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/RangeQueryBuilder.java @@ -23,7 +23,6 @@ import org.apache.lucene.search.Query; import org.apache.lucene.search.TermRangeQuery; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -108,14 +107,12 @@ public RangeQueryBuilder(StreamInput in) throws IOException { if (formatString != null) { format = Joda.forPattern(formatString); } - if (in.getVersion().onOrAfter(Version.V_5_2_0)) { - String relationString = in.readOptionalString(); - if (relationString != null) { - relation = ShapeRelation.getRelationByName(relationString); - if (relation != null && !isRelationAllowed(relation)) { - throw new IllegalArgumentException( - "[range] query does not support relation [" + relationString + "]"); - } + String relationString = in.readOptionalString(); + if (relationString != null) { + relation = ShapeRelation.getRelationByName(relationString); + if (relation != null && !isRelationAllowed(relation)) { + throw new IllegalArgumentException( + "[range] query does not support relation [" + relationString + "]"); } } } @@ -139,13 +136,11 @@ protected void doWriteTo(StreamOutput out) throws IOException { formatString = this.format.format(); } out.writeOptionalString(formatString); - if (out.getVersion().onOrAfter(Version.V_5_2_0)) { - String relationString = null; - if (this.relation != null) { - relationString = this.relation.getRelationName(); - } - out.writeOptionalString(relationString); + String relationString = null; + if (this.relation != null) { + relationString = this.relation.getRelationName(); } + out.writeOptionalString(relationString); } /** diff --git a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java index 46a958b58fe28..473aa636caab0 100644 --- a/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java +++ b/server/src/main/java/org/elasticsearch/index/query/SimpleQueryStringBuilder.java @@ -168,27 +168,11 @@ public SimpleQueryStringBuilder(StreamInput in) throws IOException { flags = in.readInt(); analyzer = in.readOptionalString(); defaultOperator = Operator.readFromStream(in); - if (in.getVersion().before(Version.V_5_1_1)) { - in.readBoolean(); // lowercase_expanded_terms - } settings.lenient(in.readBoolean()); - if (in.getVersion().onOrAfter(Version.V_5_1_1)) { - this.lenientSet = in.readBoolean(); - } + this.lenientSet = in.readBoolean(); settings.analyzeWildcard(in.readBoolean()); - if (in.getVersion().before(Version.V_5_1_1)) { - in.readString(); // locale - } minimumShouldMatch = in.readOptionalString(); - if (in.getVersion().onOrAfter(Version.V_5_1_1)) { - settings.quoteFieldSuffix(in.readOptionalString()); - if (in.getVersion().before(Version.V_6_0_0_beta2)) { - Boolean useAllFields = in.readOptionalBoolean(); - if (useAllFields != null && useAllFields) { - useAllFields(true); - } - } - } + settings.quoteFieldSuffix(in.readOptionalString()); if (in.getVersion().onOrAfter(Version.V_6_1_0)) { settings.autoGenerateSynonymsPhraseQuery(in.readBoolean()); settings.fuzzyPrefixLength(in.readVInt()); @@ -208,28 +192,11 @@ protected void doWriteTo(StreamOutput out) throws IOException { out.writeInt(flags); out.writeOptionalString(analyzer); defaultOperator.writeTo(out); - if (out.getVersion().before(Version.V_5_1_1)) { - out.writeBoolean(true); // lowercase_expanded_terms - } out.writeBoolean(settings.lenient()); - if (out.getVersion().onOrAfter(Version.V_5_1_1)) { - out.writeBoolean(lenientSet); - } + out.writeBoolean(lenientSet); out.writeBoolean(settings.analyzeWildcard()); - if (out.getVersion().before(Version.V_5_1_1)) { - out.writeString(Locale.ROOT.toLanguageTag()); // locale - } out.writeOptionalString(minimumShouldMatch); - if (out.getVersion().onOrAfter(Version.V_5_1_1)) { - out.writeOptionalString(settings.quoteFieldSuffix()); - if (out.getVersion().before(Version.V_6_0_0_beta2)) { - if (useAllFields()) { - out.writeOptionalBoolean(true); - } else { - out.writeOptionalBoolean(null); - } - } - } + out.writeOptionalString(settings.quoteFieldSuffix()); if (out.getVersion().onOrAfter(Version.V_6_1_0)) { out.writeBoolean(settings.autoGenerateSynonymsPhraseQuery()); out.writeVInt(settings.fuzzyPrefixLength()); diff --git a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java index 9ff26b13212c7..66e83907d4993 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/BulkByScrollTask.java @@ -20,7 +20,6 @@ package org.elasticsearch.index.reindex; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -303,11 +302,7 @@ public Status(List sliceStatuses, @Nullable String reasonCanc } public Status(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_5_1_1)) { - sliceId = in.readOptionalVInt(); - } else { - sliceId = null; - } + sliceId = in.readOptionalVInt(); total = in.readVLong(); updated = in.readVLong(); created = in.readVLong(); @@ -321,18 +316,12 @@ public Status(StreamInput in) throws IOException { requestsPerSecond = in.readFloat(); reasonCancelled = in.readOptionalString(); throttledUntil = in.readTimeValue(); - if (in.getVersion().onOrAfter(Version.V_5_1_1)) { - sliceStatuses = in.readList(stream -> stream.readOptionalWriteable(StatusOrException::new)); - } else { - sliceStatuses = emptyList(); - } + sliceStatuses = in.readList(stream -> stream.readOptionalWriteable(StatusOrException::new)); } @Override public void writeTo(StreamOutput out) throws IOException { - if (out.getVersion().onOrAfter(Version.V_5_1_1)) { - out.writeOptionalVInt(sliceId); - } + out.writeOptionalVInt(sliceId); out.writeVLong(total); out.writeVLong(updated); out.writeVLong(created); @@ -346,11 +335,9 @@ public void writeTo(StreamOutput out) throws IOException { out.writeFloat(requestsPerSecond); out.writeOptionalString(reasonCancelled); out.writeTimeValue(throttledUntil); - if (out.getVersion().onOrAfter(Version.V_5_1_1)) { - out.writeVInt(sliceStatuses.size()); - for (StatusOrException sliceStatus : sliceStatuses) { - out.writeOptionalWriteable(sliceStatus); - } + out.writeVInt(sliceStatuses.size()); + for (StatusOrException sliceStatus : sliceStatuses) { + out.writeOptionalWriteable(sliceStatus); } } diff --git a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java index 70f79a9def605..3ebd261b5847c 100644 --- a/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java +++ b/server/src/main/java/org/elasticsearch/index/reindex/RemoteInfo.java @@ -92,13 +92,8 @@ public RemoteInfo(StreamInput in) throws IOException { headers.put(in.readString(), in.readString()); } this.headers = unmodifiableMap(headers); - if (in.getVersion().onOrAfter(Version.V_5_2_0)) { - socketTimeout = in.readTimeValue(); - connectTimeout = in.readTimeValue(); - } else { - socketTimeout = DEFAULT_SOCKET_TIMEOUT; - connectTimeout = DEFAULT_CONNECT_TIMEOUT; - } + socketTimeout = in.readTimeValue(); + connectTimeout = in.readTimeValue(); if (in.getVersion().onOrAfter(Version.V_6_4_0)) { pathPrefix = in.readOptionalString(); } else { @@ -119,10 +114,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeString(header.getKey()); out.writeString(header.getValue()); } - if (out.getVersion().onOrAfter(Version.V_5_2_0)) { - out.writeTimeValue(socketTimeout); - out.writeTimeValue(connectTimeout); - } + out.writeTimeValue(socketTimeout); + out.writeTimeValue(connectTimeout); if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeOptionalString(pathPrefix); } diff --git a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java index f01b4bb312174..fb7885a217e01 100644 --- a/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java +++ b/server/src/main/java/org/elasticsearch/indices/flush/SyncedFlushService.java @@ -560,9 +560,6 @@ static final class PreSyncedFlushResponse extends TransportResponse { } boolean includeNumDocs(Version version) { - if (version.major == Version.V_5_6_8.major) { - return version.onOrAfter(Version.V_5_6_8); - } return version.onOrAfter(Version.V_6_2_2); } diff --git a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java index a2aa8e385e3f9..6778f3d1eaa6a 100644 --- a/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java +++ b/server/src/main/java/org/elasticsearch/ingest/PipelineConfiguration.java @@ -19,7 +19,6 @@ package org.elasticsearch.ingest; -import org.elasticsearch.Version; import org.elasticsearch.cluster.AbstractDiffable; import org.elasticsearch.cluster.Diff; import org.elasticsearch.common.ParseField; @@ -117,13 +116,7 @@ public XContentBuilder toXContent(XContentBuilder builder, Params params) throws } public static PipelineConfiguration readFrom(StreamInput in) throws IOException { - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - return new PipelineConfiguration(in.readString(), in.readBytesReference(), in.readEnum(XContentType.class)); - } else { - final String id = in.readString(); - final BytesReference config = in.readBytesReference(); - return new PipelineConfiguration(id, config, XContentHelper.xContentType(config)); - } + return new PipelineConfiguration(in.readString(), in.readBytesReference(), in.readEnum(XContentType.class)); } public static Diff readDiffFrom(StreamInput in) throws IOException { @@ -134,9 +127,7 @@ public static Diff readDiffFrom(StreamInput in) throws IO public void writeTo(StreamOutput out) throws IOException { out.writeString(id); out.writeBytesReference(config); - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeEnum(xContentType); - } + out.writeEnum(xContentType); } @Override diff --git a/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java b/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java index 637f4cf1cbe00..3bdfe95f1e2c6 100644 --- a/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java +++ b/server/src/main/java/org/elasticsearch/monitor/os/OsStats.java @@ -52,11 +52,7 @@ public OsStats(StreamInput in) throws IOException { this.cpu = new Cpu(in); this.mem = new Mem(in); this.swap = new Swap(in); - if (in.getVersion().onOrAfter(Version.V_5_1_1)) { - this.cgroup = in.readOptionalWriteable(Cgroup::new); - } else { - this.cgroup = null; - } + this.cgroup = in.readOptionalWriteable(Cgroup::new); } @Override @@ -65,9 +61,7 @@ public void writeTo(StreamOutput out) throws IOException { cpu.writeTo(out); mem.writeTo(out); swap.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_5_1_1)) { - out.writeOptionalWriteable(cgroup); - } + out.writeOptionalWriteable(cgroup); } public long getTimestamp() { diff --git a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java index f81b7c770e56c..b7a179e41e381 100644 --- a/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java +++ b/server/src/main/java/org/elasticsearch/persistent/PersistentTasksCustomMetaData.java @@ -188,7 +188,7 @@ public long getNumberOfTasksOnNode(String nodeId, String taskName) { @Override public Version getMinimalSupportedVersion() { - return Version.V_5_4_0; + return Version.CURRENT.minimumCompatibilityVersion(); } @Override diff --git a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java index 74a911b0ae4fc..d211efef5173e 100644 --- a/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java +++ b/server/src/main/java/org/elasticsearch/plugins/PluginInfo.java @@ -107,11 +107,7 @@ public PluginInfo(final StreamInput in) throws IOException { } else { extendedPlugins = Collections.emptyList(); } - if (in.getVersion().onOrAfter(Version.V_5_4_0)) { - hasNativeController = in.readBoolean(); - } else { - hasNativeController = false; - } + hasNativeController = in.readBoolean(); if (in.getVersion().onOrAfter(Version.V_6_0_0_beta2) && in.getVersion().before(Version.V_6_3_0)) { /* * Elasticsearch versions in [6.0.0-beta2, 6.3.0) allowed plugins to specify that they require the keystore and this was @@ -134,9 +130,7 @@ public void writeTo(final StreamOutput out) throws IOException { if (out.getVersion().onOrAfter(Version.V_6_2_0)) { out.writeStringList(extendedPlugins); } - if (out.getVersion().onOrAfter(Version.V_5_4_0)) { - out.writeBoolean(hasNativeController); - } + out.writeBoolean(hasNativeController); if (out.getVersion().onOrAfter(Version.V_6_0_0_beta2) && out.getVersion().before(Version.V_6_3_0)) { /* * Elasticsearch versions in [6.0.0-beta2, 6.3.0) allowed plugins to specify that they require the keystore and this was diff --git a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java index cc1d27425e133..a4d6518e9af92 100644 --- a/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java +++ b/server/src/main/java/org/elasticsearch/repositories/blobstore/BlobStoreRepository.java @@ -39,7 +39,6 @@ import org.elasticsearch.ExceptionsHelper; import org.elasticsearch.ResourceNotFoundException; import org.elasticsearch.Version; -import org.elasticsearch.cluster.SnapshotsInProgress; import org.elasticsearch.cluster.metadata.IndexMetaData; import org.elasticsearch.cluster.metadata.MetaData; import org.elasticsearch.cluster.metadata.RepositoryMetaData; @@ -719,7 +718,7 @@ public boolean isReadOnly() { protected void writeIndexGen(final RepositoryData repositoryData, final long repositoryStateId) throws IOException { assert isReadOnly() == false; // can not write to a read only repository final long currentGen = latestIndexBlobId(); - if (repositoryStateId != SnapshotsInProgress.UNDEFINED_REPOSITORY_STATE_ID && currentGen != repositoryStateId) { + if (currentGen != repositoryStateId) { // the index file was updated by a concurrent operation, so we were operating on stale // repository data throw new RepositoryException(metadata.name(), "concurrent modification of the index-N file, expected current generation [" + diff --git a/server/src/main/java/org/elasticsearch/script/Script.java b/server/src/main/java/org/elasticsearch/script/Script.java index a64a3ecd37640..67ea4f24b83f8 100644 --- a/server/src/main/java/org/elasticsearch/script/Script.java +++ b/server/src/main/java/org/elasticsearch/script/Script.java @@ -19,7 +19,6 @@ package org.elasticsearch.script; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; @@ -451,133 +450,24 @@ public Script(ScriptType type, String lang, String idOrCode, Map * Creates a {@link Script} read from an input stream. */ public Script(StreamInput in) throws IOException { - // Version 5.3 allows lang to be an optional parameter for stored scripts and expects - // options to be null for stored and file scripts. - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - this.type = ScriptType.readFrom(in); - this.lang = in.readOptionalString(); - this.idOrCode = in.readString(); - @SuppressWarnings("unchecked") - Map options = (Map)(Map)in.readMap(); - this.options = options; - this.params = in.readMap(); - // Version 5.1 to 5.3 (exclusive) requires all Script members to be non-null and supports the potential - // for more options than just XContentType. Reorders the read in contents to be in - // same order as the constructor. - } else if (in.getVersion().onOrAfter(Version.V_5_1_1)) { - this.type = ScriptType.readFrom(in); - String lang = in.readString(); - this.lang = this.type == ScriptType.STORED ? null : lang; - - this.idOrCode = in.readString(); - @SuppressWarnings("unchecked") - Map options = (Map)(Map)in.readMap(); - - if (this.type != ScriptType.INLINE && options.isEmpty()) { - this.options = null; - } else { - this.options = options; - } - - this.params = in.readMap(); - // Prior to version 5.1 the script members are read in certain cases as optional and given - // default values when necessary. Also the only option supported is for XContentType. - } else { - this.idOrCode = in.readString(); - - if (in.readBoolean()) { - this.type = ScriptType.readFrom(in); - } else { - this.type = DEFAULT_SCRIPT_TYPE; - } - - String lang = in.readOptionalString(); - - if (lang == null) { - this.lang = this.type == ScriptType.STORED ? null : DEFAULT_SCRIPT_LANG; - } else { - this.lang = lang; - } - - Map params = in.readMap(); - - if (params == null) { - this.params = new HashMap<>(); - } else { - this.params = params; - } - - if (in.readBoolean()) { - this.options = new HashMap<>(); - XContentType contentType = in.readEnum(XContentType.class); - this.options.put(CONTENT_TYPE_OPTION, contentType.mediaType()); - } else if (type == ScriptType.INLINE) { - options = new HashMap<>(); - } else { - this.options = null; - } - } + this.type = ScriptType.readFrom(in); + this.lang = in.readOptionalString(); + this.idOrCode = in.readString(); + @SuppressWarnings("unchecked") + Map options = (Map)(Map)in.readMap(); + this.options = options; + this.params = in.readMap(); } @Override public void writeTo(StreamOutput out) throws IOException { - // Version 5.3+ allows lang to be an optional parameter for stored scripts and expects - // options to be null for stored and file scripts. - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - type.writeTo(out); - out.writeOptionalString(lang); - out.writeString(idOrCode); - @SuppressWarnings("unchecked") - Map options = (Map)(Map)this.options; - out.writeMap(options); - out.writeMap(params); - // Version 5.1 to 5.3 (exclusive) requires all Script members to be non-null and supports the potential - // for more options than just XContentType. Reorders the written out contents to be in - // same order as the constructor. - } else if (out.getVersion().onOrAfter(Version.V_5_1_1)) { - type.writeTo(out); - - if (lang == null) { - out.writeString(""); - } else { - out.writeString(lang); - } - - out.writeString(idOrCode); - @SuppressWarnings("unchecked") - Map options = (Map)(Map)this.options; - - if (options == null) { - out.writeMap(new HashMap<>()); - } else { - out.writeMap(options); - } - - out.writeMap(params); - // Prior to version 5.1 the Script members were possibly written as optional or null, though there is no case where a null - // value wasn't equivalent to it's default value when actually compiling/executing a script. Meaning, there are no - // backwards compatibility issues, and now there's enforced consistency. Also the only supported compiler - // option was XContentType. - } else { - out.writeString(idOrCode); - out.writeBoolean(true); - type.writeTo(out); - out.writeOptionalString(lang); - - if (params.isEmpty()) { - out.writeMap(null); - } else { - out.writeMap(params); - } - - if (options != null && options.containsKey(CONTENT_TYPE_OPTION)) { - XContentType contentType = XContentType.fromMediaTypeOrFormat(options.get(CONTENT_TYPE_OPTION)); - out.writeBoolean(true); - out.writeEnum(contentType); - } else { - out.writeBoolean(false); - } - } + type.writeTo(out); + out.writeOptionalString(lang); + out.writeString(idOrCode); + @SuppressWarnings("unchecked") + Map options = (Map) (Map) this.options; + out.writeMap(options); + out.writeMap(params); } /** diff --git a/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java b/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java index 59d824eb313e0..35a7c2e60d685 100644 --- a/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java +++ b/server/src/main/java/org/elasticsearch/script/ScriptMetaData.java @@ -292,25 +292,7 @@ public ScriptMetaData(StreamInput in) throws IOException { for (int i = 0; i < size; i++) { String id = in.readString(); - - // Prior to version 5.3 all scripts were stored using the deprecated namespace. - // Split the id to find the language then use StoredScriptSource to parse the - // expected BytesReference after which a new StoredScriptSource is created - // with the appropriate language and options. - if (in.getVersion().before(Version.V_5_3_0)) { - int split = id.indexOf('#'); - - if (split == -1) { - throw new IllegalArgumentException("illegal stored script id [" + id + "], does not contain lang"); - } else { - source = new StoredScriptSource(in); - source = new StoredScriptSource(id.substring(0, split), source.getSource(), Collections.emptyMap()); - } - // Version 5.3+ can just be parsed normally using StoredScriptSource. - } else { - source = new StoredScriptSource(in); - } - + source = new StoredScriptSource(in); scripts.put(id, source); } @@ -319,34 +301,11 @@ public ScriptMetaData(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { - // Version 5.3+ will output the contents of the scripts' Map using - // StoredScriptSource to stored the language, code, and options. - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeVInt(scripts.size()); - - for (Map.Entry entry : scripts.entrySet()) { - out.writeString(entry.getKey()); - entry.getValue().writeTo(out); - } - // Prior to Version 5.3, stored scripts can only be read using the deprecated - // namespace. Scripts using the deprecated namespace are first isolated in a - // temporary Map, then written out. Since all scripts will be stored using the - // deprecated namespace, no scripts will be lost. - } else { - Map filtered = new HashMap<>(); - - for (Map.Entry entry : scripts.entrySet()) { - if (entry.getKey().contains("#")) { - filtered.put(entry.getKey(), entry.getValue()); - } - } - - out.writeVInt(filtered.size()); + out.writeVInt(scripts.size()); - for (Map.Entry entry : filtered.entrySet()) { - out.writeString(entry.getKey()); - entry.getValue().writeTo(out); - } + for (Map.Entry entry : scripts.entrySet()) { + out.writeString(entry.getKey()); + entry.getValue().writeTo(out); } } diff --git a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java index 19c0f8c64d58b..4a46c7202d14e 100644 --- a/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java +++ b/server/src/main/java/org/elasticsearch/search/SearchShardTarget.java @@ -19,7 +19,6 @@ package org.elasticsearch.search; -import org.elasticsearch.Version; import org.elasticsearch.action.OriginalIndices; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.io.stream.StreamInput; @@ -52,11 +51,7 @@ public SearchShardTarget(StreamInput in) throws IOException { } shardId = ShardId.readShardId(in); this.originalIndices = null; - if (in.getVersion().onOrAfter(Version.V_5_6_0)) { - clusterAlias = in.readOptionalString(); - } else { - clusterAlias = null; - } + clusterAlias = in.readOptionalString(); } public SearchShardTarget(String nodeId, ShardId shardId, String clusterAlias, OriginalIndices originalIndices) { @@ -121,9 +116,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeText(nodeId); } shardId.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_5_6_0)) { - out.writeOptionalString(clusterAlias); - } + out.writeOptionalString(clusterAlias); } @Override diff --git a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java index 9e3012c5eb9d6..8154108f9f0bc 100644 --- a/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java +++ b/server/src/main/java/org/elasticsearch/search/aggregations/bucket/terms/IncludeExclude.java @@ -36,7 +36,6 @@ import org.apache.lucene.util.automaton.Operations; import org.apache.lucene.util.automaton.RegExp; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -424,13 +423,8 @@ public IncludeExclude(StreamInput in) throws IOException { } else { excludeValues = null; } - if (in.getVersion().onOrAfter(Version.V_5_2_0)) { - incNumPartitions = in.readVInt(); - incZeroBasedPartition = in.readVInt(); - } else { - incNumPartitions = 0; - incZeroBasedPartition = 0; - } + incNumPartitions = in.readVInt(); + incZeroBasedPartition = in.readVInt(); } @Override @@ -457,10 +451,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBytesRef(value); } } - if (out.getVersion().onOrAfter(Version.V_5_2_0)) { - out.writeVInt(incNumPartitions); - out.writeVInt(incZeroBasedPartition); - } + out.writeVInt(incNumPartitions); + out.writeVInt(incZeroBasedPartition); } } diff --git a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java index c42a1a12a1877..c7564dc5ea835 100644 --- a/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/builder/SearchSourceBuilder.java @@ -248,9 +248,7 @@ public SearchSourceBuilder(StreamInput in) throws IOException { profile = in.readBoolean(); searchAfterBuilder = in.readOptionalWriteable(SearchAfterBuilder::new); sliceBuilder = in.readOptionalWriteable(SliceBuilder::new); - if (in.getVersion().onOrAfter(Version.V_5_3_0)) { - collapse = in.readOptionalWriteable(CollapseBuilder::new); - } + collapse = in.readOptionalWriteable(CollapseBuilder::new); if (in.getVersion().onOrAfter(Version.V_6_0_0_beta1)) { trackTotalHits = in.readBoolean(); } else { @@ -313,9 +311,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(profile); out.writeOptionalWriteable(searchAfterBuilder); out.writeOptionalWriteable(sliceBuilder); - if (out.getVersion().onOrAfter(Version.V_5_3_0)) { - out.writeOptionalWriteable(collapse); - } + out.writeOptionalWriteable(collapse); if (out.getVersion().onOrAfter(Version.V_6_0_0_beta1)) { out.writeBoolean(trackTotalHits); } diff --git a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java index ccab5e2cb93b3..2ebf413b1405d 100644 --- a/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/collapse/CollapseBuilder.java @@ -19,7 +19,6 @@ package org.elasticsearch.search.collapse; import org.apache.lucene.index.IndexOptions; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -94,31 +93,14 @@ public CollapseBuilder(String field) { public CollapseBuilder(StreamInput in) throws IOException { this.field = in.readString(); this.maxConcurrentGroupRequests = in.readVInt(); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - this.innerHits = in.readList(InnerHitBuilder::new); - } else { - InnerHitBuilder innerHitBuilder = in.readOptionalWriteable(InnerHitBuilder::new); - if (innerHitBuilder != null) { - this.innerHits = Collections.singletonList(innerHitBuilder); - } else { - this.innerHits = Collections.emptyList(); - } - } + this.innerHits = in.readList(InnerHitBuilder::new); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(field); out.writeVInt(maxConcurrentGroupRequests); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeList(innerHits); - } else { - boolean hasInnerHit = innerHits.isEmpty() == false; - out.writeBoolean(hasInnerHit); - if (hasInnerHit) { - innerHits.get(0).writeToCollapseBWC(out); - } - } + out.writeList(innerHits); } public static CollapseBuilder fromXContent(XContentParser parser) { diff --git a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java index 7888f6cd5a098..161ca9279f094 100644 --- a/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java +++ b/server/src/main/java/org/elasticsearch/search/fetch/subphase/highlight/AbstractHighlighterBuilder.java @@ -21,7 +21,6 @@ import org.apache.lucene.search.highlight.SimpleFragmenter; import org.apache.lucene.search.highlight.SimpleSpanFragmenter; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -152,17 +151,13 @@ protected AbstractHighlighterBuilder(StreamInput in) throws IOException { order(in.readOptionalWriteable(Order::readFromStream)); highlightFilter(in.readOptionalBoolean()); forceSource(in.readOptionalBoolean()); - if (in.getVersion().onOrAfter(Version.V_5_4_0)) { - boundaryScannerType(in.readOptionalWriteable(BoundaryScannerType::readFromStream)); - } + boundaryScannerType(in.readOptionalWriteable(BoundaryScannerType::readFromStream)); boundaryMaxScan(in.readOptionalVInt()); if (in.readBoolean()) { boundaryChars(in.readString().toCharArray()); } - if (in.getVersion().onOrAfter(Version.V_5_4_0)) { - if (in.readBoolean()) { - boundaryScannerLocale(in.readString()); - } + if (in.readBoolean()) { + boundaryScannerLocale(in.readString()); } noMatchSize(in.readOptionalVInt()); phraseLimit(in.readOptionalVInt()); @@ -191,21 +186,17 @@ public final void writeTo(StreamOutput out) throws IOException { out.writeOptionalWriteable(order); out.writeOptionalBoolean(highlightFilter); out.writeOptionalBoolean(forceSource); - if (out.getVersion().onOrAfter(Version.V_5_4_0)) { - out.writeOptionalWriteable(boundaryScannerType); - } + out.writeOptionalWriteable(boundaryScannerType); out.writeOptionalVInt(boundaryMaxScan); boolean hasBounaryChars = boundaryChars != null; out.writeBoolean(hasBounaryChars); if (hasBounaryChars) { out.writeString(String.valueOf(boundaryChars)); } - if (out.getVersion().onOrAfter(Version.V_5_4_0)) { - boolean hasBoundaryScannerLocale = boundaryScannerLocale != null; - out.writeBoolean(hasBoundaryScannerLocale); - if (hasBoundaryScannerLocale) { - out.writeString(boundaryScannerLocale.toLanguageTag()); - } + boolean hasBoundaryScannerLocale = boundaryScannerLocale != null; + out.writeBoolean(hasBoundaryScannerLocale); + if (hasBoundaryScannerLocale) { + out.writeString(boundaryScannerLocale.toLanguageTag()); } out.writeOptionalVInt(noMatchSize); out.writeOptionalVInt(phraseLimit); diff --git a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java index cf656ed3b9cb2..72a12b805eb17 100644 --- a/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java +++ b/server/src/main/java/org/elasticsearch/search/internal/ShardSearchLocalRequest.java @@ -35,7 +35,6 @@ import org.elasticsearch.search.builder.SearchSourceBuilder; import java.io.IOException; -import java.util.Optional; /** * Shard level search request that gets created and consumed on the local node. @@ -213,25 +212,10 @@ protected void innerReadFrom(StreamInput in) throws IOException { source = in.readOptionalWriteable(SearchSourceBuilder::new); types = in.readStringArray(); aliasFilter = new AliasFilter(in); - if (in.getVersion().onOrAfter(Version.V_5_2_0)) { - indexBoost = in.readFloat(); - } else { - // Nodes < 5.2.0 doesn't send index boost. Read it from source. - if (source != null) { - Optional boost = source.indexBoosts() - .stream() - .filter(ib -> ib.getIndex().equals(shardId.getIndexName())) - .findFirst(); - indexBoost = boost.isPresent() ? boost.get().getBoost() : 1.0f; - } else { - indexBoost = 1.0f; - } - } + indexBoost = in.readFloat(); nowInMillis = in.readVLong(); requestCache = in.readOptionalBoolean(); - if (in.getVersion().onOrAfter(Version.V_5_6_0)) { - clusterAlias = in.readOptionalString(); - } + clusterAlias = in.readOptionalString(); if (in.getVersion().onOrAfter(Version.V_6_3_0)) { allowPartialSearchResults = in.readOptionalBoolean(); } @@ -254,16 +238,12 @@ protected void innerWriteTo(StreamOutput out, boolean asKey) throws IOException out.writeOptionalWriteable(source); out.writeStringArray(types); aliasFilter.writeTo(out); - if (out.getVersion().onOrAfter(Version.V_5_2_0)) { - out.writeFloat(indexBoost); - } + out.writeFloat(indexBoost); if (asKey == false) { out.writeVLong(nowInMillis); } out.writeOptionalBoolean(requestCache); - if (out.getVersion().onOrAfter(Version.V_5_6_0)) { - out.writeOptionalString(clusterAlias); - } + out.writeOptionalString(clusterAlias); if (out.getVersion().onOrAfter(Version.V_6_3_0)) { out.writeOptionalBoolean(allowPartialSearchResults); } diff --git a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java index 67ddabc37fa30..fdbe74d8d4dd9 100644 --- a/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java +++ b/server/src/main/java/org/elasticsearch/snapshots/SnapshotInfo.java @@ -76,9 +76,7 @@ public final class SnapshotInfo implements Comparable, ToXContent, private static final String SUCCESSFUL_SHARDS = "successful_shards"; private static final String INCLUDE_GLOBAL_STATE = "include_global_state"; - private static final Version VERSION_INCOMPATIBLE_INTRODUCED = Version.V_5_2_0; private static final Version INCLUDE_GLOBAL_STATE_INTRODUCED = Version.V_6_2_0; - public static final Version VERBOSE_INTRODUCED = Version.V_5_5_0; private static final Comparator COMPARATOR = Comparator.comparing(SnapshotInfo::startTime).thenComparing(SnapshotInfo::snapshotId); @@ -275,11 +273,7 @@ public SnapshotInfo(final StreamInput in) throws IOException { indicesListBuilder.add(in.readString()); } indices = Collections.unmodifiableList(indicesListBuilder); - if (in.getVersion().onOrAfter(VERBOSE_INTRODUCED)) { - state = in.readBoolean() ? SnapshotState.fromValue(in.readByte()) : null; - } else { - state = SnapshotState.fromValue(in.readByte()); - } + state = in.readBoolean() ? SnapshotState.fromValue(in.readByte()) : null; reason = in.readOptionalString(); startTime = in.readVLong(); endTime = in.readVLong(); @@ -295,11 +289,7 @@ public SnapshotInfo(final StreamInput in) throws IOException { } else { shardFailures = Collections.emptyList(); } - if (in.getVersion().before(VERSION_INCOMPATIBLE_INTRODUCED)) { - version = Version.readVersion(in); - } else { - version = in.readBoolean() ? Version.readVersion(in) : null; - } + version = in.readBoolean() ? Version.readVersion(in) : null; if (in.getVersion().onOrAfter(INCLUDE_GLOBAL_STATE_INTRODUCED)) { includeGlobalState = in.readOptionalBoolean(); } @@ -681,19 +671,11 @@ public void writeTo(final StreamOutput out) throws IOException { for (String index : indices) { out.writeString(index); } - if (out.getVersion().onOrAfter(VERBOSE_INTRODUCED)) { - if (state != null) { - out.writeBoolean(true); - out.writeByte(state.value()); - } else { - out.writeBoolean(false); - } + if (state != null) { + out.writeBoolean(true); + out.writeByte(state.value()); } else { - if (out.getVersion().before(VERSION_INCOMPATIBLE_INTRODUCED) && state == SnapshotState.INCOMPATIBLE) { - out.writeByte(SnapshotState.FAILED.value()); - } else { - out.writeByte(state.value()); - } + out.writeBoolean(false); } out.writeOptionalString(reason); out.writeVLong(startTime); @@ -704,19 +686,11 @@ public void writeTo(final StreamOutput out) throws IOException { for (SnapshotShardFailure failure : shardFailures) { failure.writeTo(out); } - if (out.getVersion().before(VERSION_INCOMPATIBLE_INTRODUCED)) { - Version versionToWrite = version; - if (versionToWrite == null) { - versionToWrite = Version.CURRENT; - } - Version.writeVersion(versionToWrite, out); + if (version != null) { + out.writeBoolean(true); + Version.writeVersion(version, out); } else { - if (version != null) { - out.writeBoolean(true); - Version.writeVersion(version, out); - } else { - out.writeBoolean(false); - } + out.writeBoolean(false); } if (out.getVersion().onOrAfter(INCLUDE_GLOBAL_STATE_INTRODUCED)) { out.writeOptionalBoolean(includeGlobalState); diff --git a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java index 1f62eb706a84b..5c8c25cbfddfe 100644 --- a/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java +++ b/server/src/test/java/org/elasticsearch/ExceptionSerializationTests.java @@ -41,8 +41,6 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.breaker.CircuitBreakingException; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.Tuple; import org.elasticsearch.common.io.PathUtils; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -104,7 +102,6 @@ import java.nio.file.Path; import java.nio.file.attribute.BasicFileAttributes; import java.util.Arrays; -import java.util.Base64; import java.util.HashMap; import java.util.HashSet; import java.util.Map; @@ -116,7 +113,6 @@ import static java.util.Collections.emptySet; import static java.util.Collections.singleton; import static org.hamcrest.Matchers.greaterThanOrEqualTo; -import static org.hamcrest.Matchers.instanceOf; public class ExceptionSerializationTests extends ESTestCase { @@ -872,89 +868,12 @@ public void testElasticsearchRemoteException() throws IOException { public void testShardLockObtainFailedException() throws IOException { ShardId shardId = new ShardId("foo", "_na_", 1); ShardLockObtainFailedException orig = new ShardLockObtainFailedException(shardId, "boom"); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); - if (version.before(Version.V_5_0_2)) { - version = Version.V_5_0_2; - } + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); ShardLockObtainFailedException ex = serialize(orig, version); assertEquals(orig.getMessage(), ex.getMessage()); assertEquals(orig.getShardId(), ex.getShardId()); } - public void testBWCShardLockObtainFailedException() throws IOException { - ShardId shardId = new ShardId("foo", "_na_", 1); - ShardLockObtainFailedException orig = new ShardLockObtainFailedException(shardId, "boom"); - Exception ex = serialize((Exception)orig, randomFrom(Version.V_5_0_0, Version.V_5_0_1)); - assertThat(ex, instanceOf(NotSerializableExceptionWrapper.class)); - assertEquals("shard_lock_obtain_failed_exception: [foo][1]: boom", ex.getMessage()); - } - - public void testBWCHeadersAndMetadata() throws IOException { - //this is a request serialized with headers only, no metadata as they were added in 5.3.0 - BytesReference decoded = new BytesArray(Base64.getDecoder().decode - ("AQ10ZXN0ICBtZXNzYWdlACYtb3JnLmVsYXN0aWNzZWFyY2guRXhjZXB0aW9uU2VyaWFsaXphdGlvblRlc3RzASBFeGNlcHRpb25TZXJpYWxpemF0aW9uVG" + - "VzdHMuamF2YQR0ZXN03wYkc3VuLnJlZmxlY3QuTmF0aXZlTWV0aG9kQWNjZXNzb3JJbXBsAR1OYXRpdmVNZXRob2RBY2Nlc3NvckltcGwuamF2Y" + - "QdpbnZva2Uw/v///w8kc3VuLnJlZmxlY3QuTmF0aXZlTWV0aG9kQWNjZXNzb3JJbXBsAR1OYXRpdmVNZXRob2RBY2Nlc3NvckltcGwuamF2YQZp" + - "bnZva2U+KHN1bi5yZWZsZWN0LkRlbGVnYXRpbmdNZXRob2RBY2Nlc3NvckltcGwBIURlbGVnYXRpbmdNZXRob2RBY2Nlc3NvckltcGwuamF2YQZ" + - "pbnZva2UrGGphdmEubGFuZy5yZWZsZWN0Lk1ldGhvZAELTWV0aG9kLmphdmEGaW52b2tl8QMzY29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdG" + - "VzdGluZy5SYW5kb21pemVkUnVubmVyARVSYW5kb21pemVkUnVubmVyLmphdmEGaW52b2tlsQ01Y29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkd" + - "GVzdGluZy5SYW5kb21pemVkUnVubmVyJDgBFVJhbmRvbWl6ZWRSdW5uZXIuamF2YQhldmFsdWF0ZYsHNWNvbS5jYXJyb3RzZWFyY2gucmFuZG9t" + - "aXplZHRlc3RpbmcuUmFuZG9taXplZFJ1bm5lciQ5ARVSYW5kb21pemVkUnVubmVyLmphdmEIZXZhbHVhdGWvBzZjb20uY2Fycm90c2VhcmNoLnJ" + - "hbmRvbWl6ZWR0ZXN0aW5nLlJhbmRvbWl6ZWRSdW5uZXIkMTABFVJhbmRvbWl6ZWRSdW5uZXIuamF2YQhldmFsdWF0Zb0HOWNvbS5jYXJyb3RzZW" + - "FyY2gucmFuZG9taXplZHRlc3RpbmcucnVsZXMuU3RhdGVtZW50QWRhcHRlcgEVU3RhdGVtZW50QWRhcHRlci5qYXZhCGV2YWx1YXRlJDVvcmcuY" + - "XBhY2hlLmx1Y2VuZS51dGlsLlRlc3RSdWxlU2V0dXBUZWFyZG93bkNoYWluZWQkMQEhVGVzdFJ1bGVTZXR1cFRlYXJkb3duQ2hhaW5lZC5qYXZh" + - "CGV2YWx1YXRlMTBvcmcuYXBhY2hlLmx1Y2VuZS51dGlsLkFic3RyYWN0QmVmb3JlQWZ0ZXJSdWxlJDEBHEFic3RyYWN0QmVmb3JlQWZ0ZXJSdWx" + - "lLmphdmEIZXZhbHVhdGUtMm9yZy5hcGFjaGUubHVjZW5lLnV0aWwuVGVzdFJ1bGVUaHJlYWRBbmRUZXN0TmFtZSQxAR5UZXN0UnVsZVRocmVhZE" + - "FuZFRlc3ROYW1lLmphdmEIZXZhbHVhdGUwN29yZy5hcGFjaGUubHVjZW5lLnV0aWwuVGVzdFJ1bGVJZ25vcmVBZnRlck1heEZhaWx1cmVzJDEBI" + - "1Rlc3RSdWxlSWdub3JlQWZ0ZXJNYXhGYWlsdXJlcy5qYXZhCGV2YWx1YXRlQCxvcmcuYXBhY2hlLmx1Y2VuZS51dGlsLlRlc3RSdWxlTWFya0Zh" + - "aWx1cmUkMQEYVGVzdFJ1bGVNYXJrRmFpbHVyZS5qYXZhCGV2YWx1YXRlLzljb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLnJ1bGV" + - "zLlN0YXRlbWVudEFkYXB0ZXIBFVN0YXRlbWVudEFkYXB0ZXIuamF2YQhldmFsdWF0ZSREY29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdGVzdG" + - "luZy5UaHJlYWRMZWFrQ29udHJvbCRTdGF0ZW1lbnRSdW5uZXIBFlRocmVhZExlYWtDb250cm9sLmphdmEDcnVu7wI0Y29tLmNhcnJvdHNlYXJja" + - "C5yYW5kb21pemVkdGVzdGluZy5UaHJlYWRMZWFrQ29udHJvbAEWVGhyZWFkTGVha0NvbnRyb2wuamF2YRJmb3JrVGltZW91dGluZ1Rhc2urBjZj" + - "b20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLlRocmVhZExlYWtDb250cm9sJDMBFlRocmVhZExlYWtDb250cm9sLmphdmEIZXZhbHV" + - "hdGXOAzNjb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLlJhbmRvbWl6ZWRSdW5uZXIBFVJhbmRvbWl6ZWRSdW5uZXIuamF2YQ1ydW" + - "5TaW5nbGVUZXN0lAc1Y29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdGVzdGluZy5SYW5kb21pemVkUnVubmVyJDUBFVJhbmRvbWl6ZWRSdW5uZ" + - "XIuamF2YQhldmFsdWF0ZaIGNWNvbS5jYXJyb3RzZWFyY2gucmFuZG9taXplZHRlc3RpbmcuUmFuZG9taXplZFJ1bm5lciQ2ARVSYW5kb21pemVk" + - "UnVubmVyLmphdmEIZXZhbHVhdGXUBjVjb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLlJhbmRvbWl6ZWRSdW5uZXIkNwEVUmFuZG9" + - "taXplZFJ1bm5lci5qYXZhCGV2YWx1YXRl3wYwb3JnLmFwYWNoZS5sdWNlbmUudXRpbC5BYnN0cmFjdEJlZm9yZUFmdGVyUnVsZSQxARxBYnN0cm" + - "FjdEJlZm9yZUFmdGVyUnVsZS5qYXZhCGV2YWx1YXRlLTljb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLnJ1bGVzLlN0YXRlbWVud" + - "EFkYXB0ZXIBFVN0YXRlbWVudEFkYXB0ZXIuamF2YQhldmFsdWF0ZSQvb3JnLmFwYWNoZS5sdWNlbmUudXRpbC5UZXN0UnVsZVN0b3JlQ2xhc3NO" + - "YW1lJDEBG1Rlc3RSdWxlU3RvcmVDbGFzc05hbWUuamF2YQhldmFsdWF0ZSlOY29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdGVzdGluZy5ydWx" + - "lcy5Ob1NoYWRvd2luZ09yT3ZlcnJpZGVzT25NZXRob2RzUnVsZSQxAShOb1NoYWRvd2luZ09yT3ZlcnJpZGVzT25NZXRob2RzUnVsZS5qYXZhCG" + - "V2YWx1YXRlKE5jb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLnJ1bGVzLk5vU2hhZG93aW5nT3JPdmVycmlkZXNPbk1ldGhvZHNSd" + - "WxlJDEBKE5vU2hhZG93aW5nT3JPdmVycmlkZXNPbk1ldGhvZHNSdWxlLmphdmEIZXZhbHVhdGUoOWNvbS5jYXJyb3RzZWFyY2gucmFuZG9taXpl" + - "ZHRlc3RpbmcucnVsZXMuU3RhdGVtZW50QWRhcHRlcgEVU3RhdGVtZW50QWRhcHRlci5qYXZhCGV2YWx1YXRlJDljb20uY2Fycm90c2VhcmNoLnJ" + - "hbmRvbWl6ZWR0ZXN0aW5nLnJ1bGVzLlN0YXRlbWVudEFkYXB0ZXIBFVN0YXRlbWVudEFkYXB0ZXIuamF2YQhldmFsdWF0ZSQ5Y29tLmNhcnJvdH" + - "NlYXJjaC5yYW5kb21pemVkdGVzdGluZy5ydWxlcy5TdGF0ZW1lbnRBZGFwdGVyARVTdGF0ZW1lbnRBZGFwdGVyLmphdmEIZXZhbHVhdGUkM29yZ" + - "y5hcGFjaGUubHVjZW5lLnV0aWwuVGVzdFJ1bGVBc3NlcnRpb25zUmVxdWlyZWQkMQEfVGVzdFJ1bGVBc3NlcnRpb25zUmVxdWlyZWQuamF2YQhl" + - "dmFsdWF0ZTUsb3JnLmFwYWNoZS5sdWNlbmUudXRpbC5UZXN0UnVsZU1hcmtGYWlsdXJlJDEBGFRlc3RSdWxlTWFya0ZhaWx1cmUuamF2YQhldmF" + - "sdWF0ZS83b3JnLmFwYWNoZS5sdWNlbmUudXRpbC5UZXN0UnVsZUlnbm9yZUFmdGVyTWF4RmFpbHVyZXMkMQEjVGVzdFJ1bGVJZ25vcmVBZnRlck" + - "1heEZhaWx1cmVzLmphdmEIZXZhbHVhdGVAMW9yZy5hcGFjaGUubHVjZW5lLnV0aWwuVGVzdFJ1bGVJZ25vcmVUZXN0U3VpdGVzJDEBHVRlc3RSd" + - "WxlSWdub3JlVGVzdFN1aXRlcy5qYXZhCGV2YWx1YXRlNjljb20uY2Fycm90c2VhcmNoLnJhbmRvbWl6ZWR0ZXN0aW5nLnJ1bGVzLlN0YXRlbWVu" + - "dEFkYXB0ZXIBFVN0YXRlbWVudEFkYXB0ZXIuamF2YQhldmFsdWF0ZSREY29tLmNhcnJvdHNlYXJjaC5yYW5kb21pemVkdGVzdGluZy5UaHJlYWR" + - "MZWFrQ29udHJvbCRTdGF0ZW1lbnRSdW5uZXIBFlRocmVhZExlYWtDb250cm9sLmphdmEDcnVu7wIQamF2YS5sYW5nLlRocmVhZAELVGhyZWFkLm" + - "phdmEDcnVu6QUABAdoZWFkZXIyAQZ2YWx1ZTIKZXMuaGVhZGVyMwEGdmFsdWUzB2hlYWRlcjEBBnZhbHVlMQplcy5oZWFkZXI0AQZ2YWx1ZTQAA" + - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + - "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA" + - "AAAAA")); - - try (StreamInput in = decoded.streamInput()) { - //randomize the version across released and unreleased ones - Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2, - Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0); - in.setVersion(version); - ElasticsearchException exception = new ElasticsearchException(in); - assertEquals("test message", exception.getMessage()); - //the headers received as part of a single set get split based on their prefix - assertEquals(2, exception.getHeaderKeys().size()); - assertEquals("value1", exception.getHeader("header1").get(0)); - assertEquals("value2", exception.getHeader("header2").get(0)); - assertEquals(2, exception.getMetadataKeys().size()); - assertEquals("value3", exception.getMetadata("es.header3").get(0)); - assertEquals("value4", exception.getMetadata("es.header4").get(0)); - } - } - private static class UnknownException extends Exception { UnknownException(final String message, final Exception cause) { super(message, cause); diff --git a/server/src/test/java/org/elasticsearch/VersionTests.java b/server/src/test/java/org/elasticsearch/VersionTests.java index 74303bfb6d851..4c7dc9eb094b7 100644 --- a/server/src/test/java/org/elasticsearch/VersionTests.java +++ b/server/src/test/java/org/elasticsearch/VersionTests.java @@ -36,8 +36,8 @@ import java.util.Map; import java.util.Set; -import static org.elasticsearch.Version.V_5_3_0; -import static org.elasticsearch.Version.V_6_0_0_beta1; +import static org.elasticsearch.Version.V_6_3_0; +import static org.elasticsearch.Version.V_7_0_0_alpha1; import static org.elasticsearch.test.VersionUtils.allVersions; import static org.elasticsearch.test.VersionUtils.randomVersion; import static org.hamcrest.CoreMatchers.equalTo; @@ -50,30 +50,30 @@ public class VersionTests extends ESTestCase { public void testVersionComparison() throws Exception { - assertThat(V_5_3_0.before(V_6_0_0_beta1), is(true)); - assertThat(V_5_3_0.before(V_5_3_0), is(false)); - assertThat(V_6_0_0_beta1.before(V_5_3_0), is(false)); + assertThat(V_6_3_0.before(V_7_0_0_alpha1), is(true)); + assertThat(V_6_3_0.before(V_6_3_0), is(false)); + assertThat(V_7_0_0_alpha1.before(V_6_3_0), is(false)); - assertThat(V_5_3_0.onOrBefore(V_6_0_0_beta1), is(true)); - assertThat(V_5_3_0.onOrBefore(V_5_3_0), is(true)); - assertThat(V_6_0_0_beta1.onOrBefore(V_5_3_0), is(false)); + assertThat(V_6_3_0.onOrBefore(V_7_0_0_alpha1), is(true)); + assertThat(V_6_3_0.onOrBefore(V_6_3_0), is(true)); + assertThat(V_7_0_0_alpha1.onOrBefore(V_6_3_0), is(false)); - assertThat(V_5_3_0.after(V_6_0_0_beta1), is(false)); - assertThat(V_5_3_0.after(V_5_3_0), is(false)); - assertThat(V_6_0_0_beta1.after(V_5_3_0), is(true)); + assertThat(V_6_3_0.after(V_7_0_0_alpha1), is(false)); + assertThat(V_6_3_0.after(V_6_3_0), is(false)); + assertThat(V_7_0_0_alpha1.after(V_6_3_0), is(true)); - assertThat(V_5_3_0.onOrAfter(V_6_0_0_beta1), is(false)); - assertThat(V_5_3_0.onOrAfter(V_5_3_0), is(true)); - assertThat(V_6_0_0_beta1.onOrAfter(V_5_3_0), is(true)); + assertThat(V_6_3_0.onOrAfter(V_7_0_0_alpha1), is(false)); + assertThat(V_6_3_0.onOrAfter(V_6_3_0), is(true)); + assertThat(V_7_0_0_alpha1.onOrAfter(V_6_3_0), is(true)); assertTrue(Version.fromString("5.0.0-alpha2").onOrAfter(Version.fromString("5.0.0-alpha1"))); assertTrue(Version.fromString("5.0.0").onOrAfter(Version.fromString("5.0.0-beta2"))); assertTrue(Version.fromString("5.0.0-rc1").onOrAfter(Version.fromString("5.0.0-beta24"))); assertTrue(Version.fromString("5.0.0-alpha24").before(Version.fromString("5.0.0-beta0"))); - assertThat(V_5_3_0, is(lessThan(V_6_0_0_beta1))); - assertThat(V_5_3_0.compareTo(V_5_3_0), is(0)); - assertThat(V_6_0_0_beta1, is(greaterThan(V_5_3_0))); + assertThat(V_6_3_0, is(lessThan(V_7_0_0_alpha1))); + assertThat(V_6_3_0.compareTo(V_6_3_0), is(0)); + assertThat(V_7_0_0_alpha1, is(greaterThan(V_6_3_0))); } public void testMin() { @@ -101,12 +101,12 @@ public void testMax() { } public void testMinimumIndexCompatibilityVersion() { - assertEquals(Version.V_5_0_0, Version.V_6_0_0_beta1.minimumIndexCompatibilityVersion()); - assertEquals(Version.fromId(2000099), Version.V_5_0_0.minimumIndexCompatibilityVersion()); + assertEquals(Version.fromId(5000099), Version.V_6_0_0_beta1.minimumIndexCompatibilityVersion()); + assertEquals(Version.fromId(2000099), Version.fromId(5000099).minimumIndexCompatibilityVersion()); assertEquals(Version.fromId(2000099), - Version.V_5_1_1.minimumIndexCompatibilityVersion()); + Version.fromId(5010000).minimumIndexCompatibilityVersion()); assertEquals(Version.fromId(2000099), - Version.V_5_0_0_alpha1.minimumIndexCompatibilityVersion()); + Version.fromId(5000001).minimumIndexCompatibilityVersion()); } public void testVersionConstantPresent() { @@ -160,31 +160,38 @@ public void testVersionNoPresentInSettings() { public void testIndexCreatedVersion() { // an actual index has a IndexMetaData.SETTING_INDEX_UUID - final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_2, - Version.V_5_2_0, Version.V_6_0_0_beta1); + final Version version = Version.V_6_0_0_beta1; assertEquals(version, Version.indexCreated(Settings.builder().put(IndexMetaData.SETTING_INDEX_UUID, "foo").put(IndexMetaData.SETTING_VERSION_CREATED, version).build())); } public void testMinCompatVersion() { - Version prerelease = VersionUtils.getFirstVersion(); - assertThat(prerelease.minimumCompatibilityVersion(), equalTo(prerelease)); Version major = Version.fromString("2.0.0"); assertThat(Version.fromString("2.0.0").minimumCompatibilityVersion(), equalTo(major)); assertThat(Version.fromString("2.2.0").minimumCompatibilityVersion(), equalTo(major)); assertThat(Version.fromString("2.3.0").minimumCompatibilityVersion(), equalTo(major)); - // from 6.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is + + Version major5x = Version.fromString("5.0.0"); + assertThat(Version.fromString("5.0.0").minimumCompatibilityVersion(), equalTo(major5x)); + assertThat(Version.fromString("5.2.0").minimumCompatibilityVersion(), equalTo(major5x)); + assertThat(Version.fromString("5.3.0").minimumCompatibilityVersion(), equalTo(major5x)); + + Version major56x = Version.fromString("5.6.0"); + assertThat(Version.V_6_5_0.minimumCompatibilityVersion(), equalTo(major56x)); + assertThat(Version.V_6_3_1.minimumCompatibilityVersion(), equalTo(major56x)); + + // from 7.0 on we are supporting the latest minor of the previous major... this might fail once we add a new version ie. 5.x is // released since we need to bump the supported minor in Version#minimumCompatibilityVersion() - Version lastVersion = Version.V_5_6_0; // TODO: remove this once min compat version is a constant instead of method - assertEquals(lastVersion.major, Version.V_6_0_0_beta1.minimumCompatibilityVersion().major); + Version lastVersion = Version.V_6_5_0; // TODO: remove this once min compat version is a constant instead of method + assertEquals(lastVersion.major, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().major); assertEquals("did you miss to bump the minor in Version#minimumCompatibilityVersion()", - lastVersion.minor, Version.V_6_0_0_beta1.minimumCompatibilityVersion().minor); - assertEquals(0, Version.V_6_0_0_beta1.minimumCompatibilityVersion().revision); + lastVersion.minor, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().minor); + assertEquals(0, Version.V_7_0_0_alpha1.minimumCompatibilityVersion().revision); } public void testToString() { // with 2.0.beta we lowercase assertEquals("2.0.0-beta1", Version.fromString("2.0.0-beta1").toString()); - assertEquals("5.0.0-alpha1", Version.V_5_0_0_alpha1.toString()); + assertEquals("5.0.0-alpha1", Version.fromId(5000001).toString()); assertEquals("2.3.0", Version.fromString("2.3.0").toString()); assertEquals("0.90.0.Beta1", Version.fromString("0.90.0.Beta1").toString()); assertEquals("1.0.0.Beta1", Version.fromString("1.0.0.Beta1").toString()); @@ -334,11 +341,11 @@ public static void assertUnknownVersion(Version version) { public void testIsCompatible() { assertTrue(isCompatible(Version.CURRENT, Version.CURRENT.minimumCompatibilityVersion())); - assertTrue(isCompatible(Version.V_5_6_0, Version.V_6_0_0_alpha2)); - assertFalse(isCompatible(Version.fromId(2000099), Version.V_6_0_0_alpha2)); - assertFalse(isCompatible(Version.fromId(2000099), Version.V_5_0_0)); - assertFalse(isCompatible(Version.fromString("6.0.0"), Version.fromString("7.0.0"))); - assertFalse(isCompatible(Version.fromString("6.0.0-alpha1"), Version.fromString("7.0.0"))); + assertTrue(isCompatible(Version.V_6_5_0, Version.V_7_0_0_alpha1)); + assertFalse(isCompatible(Version.fromId(2000099), Version.V_7_0_0_alpha1)); + assertFalse(isCompatible(Version.fromId(2000099), Version.V_6_5_0)); + assertFalse(isCompatible(Version.fromString("7.0.0"), Version.fromString("8.0.0"))); + assertFalse(isCompatible(Version.fromString("7.0.0-alpha1"), Version.fromString("8.0.0"))); final Version currentMajorVersion = Version.fromId(Version.CURRENT.major * 1000000 + 99); final Version currentOrNextMajorVersion; @@ -373,8 +380,8 @@ public void testIsCompatible() { isCompatible(VersionUtils.getPreviousMinorVersion(), currentOrNextMajorVersion), equalTo(isCompatible)); - assertFalse(isCompatible(Version.V_5_0_0, Version.fromString("6.0.0"))); - assertFalse(isCompatible(Version.V_5_0_0, Version.fromString("7.0.0"))); + assertFalse(isCompatible(Version.fromId(5000099), Version.fromString("6.0.0"))); + assertFalse(isCompatible(Version.fromId(5000099), Version.fromString("7.0.0"))); Version a = randomVersion(random()); Version b = randomVersion(random()); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java index 7bf43b828c05a..3384efcf836c6 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/node/stats/NodeStatsTests.java @@ -49,7 +49,6 @@ import java.util.List; import java.util.Map; -import static com.carrotsearch.randomizedtesting.RandomizedTest.randomLongBetween; import static java.util.Collections.emptyMap; import static java.util.Collections.emptySet; diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestTests.java index 232259948fb2f..5f5fe54321bbb 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsRequestTests.java @@ -54,7 +54,7 @@ public void testSerialization() throws Exception { request.routing(routings); } - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); try (BytesStreamOutput out = new BytesStreamOutput()) { out.setVersion(version); request.writeTo(out); diff --git a/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java index 90eb7cdcfd46a..f685be02141ad 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/cluster/shards/ClusterSearchShardsResponseTests.java @@ -77,7 +77,7 @@ public void testSerialization() throws Exception { List entries = new ArrayList<>(); entries.addAll(searchModule.getNamedWriteables()); NamedWriteableRegistry namedWriteableRegistry = new NamedWriteableRegistry(entries); - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); try(BytesStreamOutput out = new BytesStreamOutput()) { out.setVersion(version); clusterSearchShardsResponse.writeTo(out); @@ -93,11 +93,7 @@ public void testSerialization() throws Exception { assertEquals(clusterSearchShardsGroup.getShardId(), deserializedGroup.getShardId()); assertArrayEquals(clusterSearchShardsGroup.getShards(), deserializedGroup.getShards()); } - if (version.onOrAfter(Version.V_5_1_1)) { - assertEquals(clusterSearchShardsResponse.getIndicesAndFilters(), deserialized.getIndicesAndFilters()); - } else { - assertNull(deserialized.getIndicesAndFilters()); - } + assertEquals(clusterSearchShardsResponse.getIndicesAndFilters(), deserialized.getIndicesAndFilters()); } } } diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponseTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponseTests.java index 0cb0063727fe7..c0685d5d17d29 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/create/CreateIndexResponseTests.java @@ -19,10 +19,7 @@ package org.elasticsearch.action.admin.indices.create; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.AbstractStreamableXContentTestCase; @@ -67,25 +64,6 @@ protected CreateIndexResponse doParseInstance(XContentParser parser) { return CreateIndexResponse.fromXContent(parser); } - public void testSerializationWithOldVersion() throws IOException { - Version oldVersion = Version.V_5_4_0; - CreateIndexResponse response = new CreateIndexResponse(true, true, "foo"); - - try (BytesStreamOutput output = new BytesStreamOutput()) { - output.setVersion(oldVersion); - response.writeTo(output); - - try (StreamInput in = output.bytes().streamInput()) { - in.setVersion(oldVersion); - CreateIndexResponse serialized = new CreateIndexResponse(); - serialized.readFrom(in); - assertEquals(response.isShardsAcknowledged(), serialized.isShardsAcknowledged()); - assertEquals(response.isAcknowledged(), serialized.isAcknowledged()); - assertNull(serialized.index()); - } - } - } - public void testToXContent() { CreateIndexResponse response = new CreateIndexResponse(true, false, "index_name"); String output = Strings.toString(response); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java index 86c2b67be9c54..5243ffd33b39c 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/mapping/put/PutMappingRequestTests.java @@ -19,20 +19,14 @@ package org.elasticsearch.action.admin.indices.mapping.put; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; -import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.index.Index; import org.elasticsearch.index.RandomCreateIndexGenerator; import org.elasticsearch.test.ESTestCase; @@ -87,27 +81,6 @@ public void testBuildFromSimplifiedDef() { assertEquals("mapping source must be pairs of fieldnames and properties definition.", e.getMessage()); } - public void testPutMappingRequestSerialization() throws IOException { - PutMappingRequest request = new PutMappingRequest("foo"); - String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject()); - request.source(mapping, XContentType.YAML); - assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.source()); - - final Version version = randomFrom(Version.CURRENT, Version.V_5_3_0, Version.V_5_3_1, Version.V_5_3_2, Version.V_5_4_0); - try (BytesStreamOutput bytesStreamOutput = new BytesStreamOutput()) { - bytesStreamOutput.setVersion(version); - request.writeTo(bytesStreamOutput); - try (StreamInput in = StreamInput.wrap(bytesStreamOutput.bytes().toBytesRef().bytes)) { - in.setVersion(version); - PutMappingRequest serialized = new PutMappingRequest(); - serialized.readFrom(in); - - String source = serialized.source(); - assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), source); - } - } - } - public void testToXContent() throws IOException { PutMappingRequest request = new PutMappingRequest("foo"); request.type("my_type"); diff --git a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java index c21e6b3c225f0..2d037d7c024d5 100644 --- a/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/admin/indices/template/put/PutIndexTemplateRequestTests.java @@ -18,25 +18,16 @@ */ package org.elasticsearch.action.admin.indices.template.put; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.admin.indices.alias.Alias; -import org.elasticsearch.common.Strings; -import org.elasticsearch.common.bytes.BytesArray; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.XContentFactory; -import org.elasticsearch.common.xcontent.XContentHelper; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; -import org.elasticsearch.common.xcontent.yaml.YamlXContent; import org.elasticsearch.test.AbstractXContentTestCase; import java.io.IOException; import java.io.UncheckedIOException; import java.util.Arrays; -import java.util.Base64; import java.util.Collections; import static org.hamcrest.Matchers.containsString; @@ -46,81 +37,6 @@ import static org.hamcrest.core.Is.is; public class PutIndexTemplateRequestTests extends AbstractXContentTestCase { - - // bwc for #21009 - public void testPutIndexTemplateRequest510() throws IOException { - PutIndexTemplateRequest putRequest = new PutIndexTemplateRequest("test"); - putRequest.patterns(Collections.singletonList("test*")); - putRequest.order(5); - - PutIndexTemplateRequest multiPatternRequest = new PutIndexTemplateRequest("test"); - multiPatternRequest.patterns(Arrays.asList("test*", "*test2", "*test3*")); - multiPatternRequest.order(5); - - // These bytes were retrieved by Base64 encoding the result of the above with 5_0_0 code. - // Note: Instead of a list for the template, in 5_0_0 the element was provided as a string. - String putRequestBytes = "ADwDAAR0ZXN0BXRlc3QqAAAABQAAAAAAAA=="; - BytesArray bytes = new BytesArray(Base64.getDecoder().decode(putRequestBytes)); - - try (StreamInput in = bytes.streamInput()) { - in.setVersion(Version.V_5_0_0); - PutIndexTemplateRequest readRequest = new PutIndexTemplateRequest(); - readRequest.readFrom(in); - assertEquals(putRequest.patterns(), readRequest.patterns()); - assertEquals(putRequest.order(), readRequest.order()); - - BytesStreamOutput output = new BytesStreamOutput(); - output.setVersion(Version.V_5_0_0); - readRequest.writeTo(output); - assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef()); - - // test that multi templates are reverse-compatible. - // for the bwc case, if multiple patterns, use only the first pattern seen. - output.reset(); - multiPatternRequest.writeTo(output); - assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef()); - } - } - - public void testPutIndexTemplateRequestSerializationXContent() throws IOException { - PutIndexTemplateRequest request = new PutIndexTemplateRequest("foo"); - String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject()); - request.patterns(Collections.singletonList("foo")); - request.mapping("bar", mapping, XContentType.YAML); - assertNotEquals(mapping, request.mappings().get("bar")); - assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.mappings().get("bar")); - - final Version version = randomFrom(Version.CURRENT, Version.V_5_3_0, Version.V_5_3_1, Version.V_5_3_2, Version.V_5_4_0); - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.setVersion(version); - request.writeTo(out); - - try (StreamInput in = StreamInput.wrap(out.bytes().toBytesRef().bytes)) { - in.setVersion(version); - PutIndexTemplateRequest serialized = new PutIndexTemplateRequest(); - serialized.readFrom(in); - assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), - serialized.mappings().get("bar")); - } - } - } - - public void testPutIndexTemplateRequestSerializationXContentBwc() throws IOException { - final byte[] data = Base64.getDecoder().decode("ADwDAANmb28IdGVtcGxhdGUAAAAAAAABA2Jhcg8tLS0KZm9vOiAiYmFyIgoAAAAAAAAAAAAAAAA="); - final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2, - Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0); - try (StreamInput in = StreamInput.wrap(data)) { - in.setVersion(version); - PutIndexTemplateRequest request = new PutIndexTemplateRequest(); - request.readFrom(in); - String mapping = Strings.toString(YamlXContent.contentBuilder().startObject().field("foo", "bar").endObject()); - assertNotEquals(mapping, request.mappings().get("bar")); - assertEquals(XContentHelper.convertToJson(new BytesArray(mapping), false, XContentType.YAML), request.mappings().get("bar")); - assertEquals("foo", request.name()); - assertEquals("template", request.patterns().get(0)); - } - } - public void testValidateErrorMessage() throws Exception { PutIndexTemplateRequest request = new PutIndexTemplateRequest(); ActionRequestValidationException withoutNameAndPattern = request.validate(); diff --git a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java index 5cd82be8cb04c..53c307c430815 100644 --- a/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java +++ b/server/src/test/java/org/elasticsearch/action/ingest/SimulatePipelineRequestTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.ingest; -import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.StreamInput; @@ -28,7 +27,6 @@ import java.io.IOException; import java.nio.charset.StandardCharsets; -import java.util.Base64; import static org.hamcrest.CoreMatchers.equalTo; @@ -68,22 +66,4 @@ public void testSerializationWithXContent() throws IOException { assertEquals(XContentType.JSON, serialized.getXContentType()); assertEquals("{}", serialized.getSource().utf8ToString()); } - - public void testSerializationWithXContentBwc() throws IOException { - final byte[] data = Base64.getDecoder().decode("AAAAAnt9AAA="); - final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2, - Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0); - try (StreamInput in = StreamInput.wrap(data)) { - in.setVersion(version); - SimulatePipelineRequest request = new SimulatePipelineRequest(in); - assertEquals(XContentType.JSON, request.getXContentType()); - assertEquals("{}", request.getSource().utf8ToString()); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.setVersion(version); - request.writeTo(out); - assertArrayEquals(data, out.bytes().toBytesRef().bytes); - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java index 8b1741967734c..50bbad16ab73b 100644 --- a/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/CanMatchPreFilterSearchPhaseTests.java @@ -33,7 +33,6 @@ import org.elasticsearch.search.internal.AliasFilter; import org.elasticsearch.search.internal.ShardSearchTransportRequest; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.transport.Transport; import java.io.IOException; @@ -110,17 +109,6 @@ public void run() throws IOException { } } - public void testOldNodesTriggerException() { - SearchTransportService searchTransportService = new SearchTransportService( - Settings.builder().put("search.remote.connect", false).build(), null, null); - DiscoveryNode node = new DiscoveryNode("node_1", buildNewFakeTransportAddress(), VersionUtils.randomVersionBetween(random(), - VersionUtils.getFirstVersion(), VersionUtils.getPreviousVersion(Version.V_5_6_0))); - SearchAsyncActionTests.MockConnection mockConnection = new SearchAsyncActionTests.MockConnection(node); - IllegalArgumentException illegalArgumentException = expectThrows(IllegalArgumentException.class, - () -> searchTransportService.sendCanMatch(mockConnection, null, null, null)); - assertEquals("can_match is not supported on pre 5.6 nodes", illegalArgumentException.getMessage()); - } - public void testFilterWithFailure() throws InterruptedException { final TransportSearchAction.SearchTimeProvider timeProvider = new TransportSearchAction.SearchTimeProvider(0, System.nanoTime(), System::nanoTime); diff --git a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java index 87e66477a0411..feb5ef50795dc 100644 --- a/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java +++ b/server/src/test/java/org/elasticsearch/action/search/SearchResponseTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.action.search; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; @@ -47,13 +46,11 @@ import org.elasticsearch.search.suggest.SuggestTests; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.InternalAggregationTestCase; -import org.elasticsearch.test.VersionUtils; import org.junit.After; import org.junit.Before; import java.io.IOException; import java.util.ArrayList; -import java.util.Base64; import java.util.Collections; import java.util.List; @@ -290,27 +287,4 @@ public void testSerialization() throws IOException { assertEquals(searchResponse.getClusters(), serialized.getClusters()); } } - - public void testSerializationBwc() throws IOException { - final byte[] data = Base64.getDecoder().decode("AAAAAAAAAAAAAgABBQUAAAoAAAAAAAAA"); - final Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_6_5, Version.V_6_0_0); - try (StreamInput in = new NamedWriteableAwareStreamInput(StreamInput.wrap(data), namedWriteableRegistry)) { - in.setVersion(version); - SearchResponse deserialized = new SearchResponse(); - deserialized.readFrom(in); - assertSame(SearchResponse.Clusters.EMPTY, deserialized.getClusters()); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.setVersion(version); - deserialized.writeTo(out); - try (StreamInput in2 = new NamedWriteableAwareStreamInput(StreamInput.wrap(out.bytes().toBytesRef().bytes), - namedWriteableRegistry)) { - in2.setVersion(version); - SearchResponse deserialized2 = new SearchResponse(); - deserialized2.readFrom(in2); - assertSame(SearchResponse.Clusters.EMPTY, deserialized2.getClusters()); - } - } - } - } } diff --git a/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java b/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java index a16a8f628f98b..216c1802956e8 100644 --- a/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java +++ b/server/src/test/java/org/elasticsearch/action/termvectors/TermVectorsUnitTests.java @@ -36,14 +36,11 @@ import org.apache.lucene.search.TermQuery; import org.apache.lucene.search.TopDocs; import org.apache.lucene.store.Directory; -import org.elasticsearch.Version; import org.elasticsearch.action.termvectors.TermVectorsRequest.Flag; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; -import org.elasticsearch.common.io.stream.BytesStreamOutput; import org.elasticsearch.common.io.stream.InputStreamStreamInput; import org.elasticsearch.common.io.stream.OutputStreamStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentParser; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; @@ -60,7 +57,6 @@ import java.io.ByteArrayOutputStream; import java.io.IOException; import java.util.Arrays; -import java.util.Base64; import java.util.EnumSet; import java.util.HashSet; import java.util.Set; @@ -264,34 +260,6 @@ public void testStreamRequest() throws IOException { } } - public void testStreamRequestWithXContentBwc() throws IOException { - final byte[] data = Base64.getDecoder().decode("AAABBWluZGV4BHR5cGUCaWQBAnt9AAABDnNvbWVQcmVmZXJlbmNlFgAAAAEA//////////0AAAA="); - final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2, - Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0); - try (StreamInput in = StreamInput.wrap(data)) { - in.setVersion(version); - TermVectorsRequest request = new TermVectorsRequest(); - request.readFrom(in); - assertEquals("index", request.index()); - assertEquals("type", request.type()); - assertEquals("id", request.id()); - assertTrue(request.offsets()); - assertFalse(request.fieldStatistics()); - assertTrue(request.payloads()); - assertFalse(request.positions()); - assertTrue(request.termStatistics()); - assertEquals("somePreference", request.preference()); - assertEquals("{}", request.doc().utf8ToString()); - assertEquals(XContentType.JSON, request.xContentType()); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.setVersion(version); - request.writeTo(out); - assertArrayEquals(data, out.bytes().toBytesRef().bytes); - } - } - } - public void testFieldTypeToTermVectorString() throws Exception { FieldType ft = new FieldType(); ft.setStoreTermVectorOffsets(false); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java index 6d489f5feb314..c98587c4cc63f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/IndexTemplateMetaDataTests.java @@ -18,12 +18,9 @@ */ package org.elasticsearch.cluster.metadata; -import org.elasticsearch.Version; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.collect.ImmutableOpenMap; -import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.settings.Settings; import org.elasticsearch.common.xcontent.DeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -35,62 +32,15 @@ import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.test.ESTestCase; -import java.io.IOException; import java.util.Arrays; -import java.util.Base64; import java.util.Collections; import static java.util.Collections.singletonMap; -import static org.elasticsearch.cluster.metadata.AliasMetaData.newAliasMetaDataBuilder; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.contains; public class IndexTemplateMetaDataTests extends ESTestCase { - // bwc for #21009 - public void testIndexTemplateMetaData510() throws IOException { - IndexTemplateMetaData metaData = IndexTemplateMetaData.builder("foo") - .patterns(Collections.singletonList("bar")) - .order(1) - .settings(Settings.builder() - .put("setting1", "value1") - .put("setting2", "value2")) - .putAlias(newAliasMetaDataBuilder("alias-bar1")).build(); - - IndexTemplateMetaData multiMetaData = IndexTemplateMetaData.builder("foo") - .patterns(Arrays.asList("bar", "foo")) - .order(1) - .settings(Settings.builder() - .put("setting1", "value1") - .put("setting2", "value2")) - .putAlias(newAliasMetaDataBuilder("alias-bar1")).build(); - - // These bytes were retrieved by Base64 encoding the result of the above with 5_0_0 code - String templateBytes = "A2ZvbwAAAAEDYmFyAghzZXR0aW5nMQEGdmFsdWUxCHNldHRpbmcyAQZ2YWx1ZTIAAQphbGlhcy1iYXIxAAAAAAA="; - BytesArray bytes = new BytesArray(Base64.getDecoder().decode(templateBytes)); - - try (StreamInput in = bytes.streamInput()) { - in.setVersion(Version.V_5_0_0); - IndexTemplateMetaData readMetaData = IndexTemplateMetaData.readFrom(in); - assertEquals(0, in.available()); - assertEquals(metaData.getName(), readMetaData.getName()); - assertEquals(metaData.getPatterns(), readMetaData.getPatterns()); - assertTrue(metaData.aliases().containsKey("alias-bar1")); - assertEquals(1, metaData.aliases().size()); - - BytesStreamOutput output = new BytesStreamOutput(); - output.setVersion(Version.V_5_0_0); - readMetaData.writeTo(output); - assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef()); - - // test that multi templates are reverse-compatible. - // for the bwc case, if multiple patterns, use only the first pattern seen. - output.reset(); - multiMetaData.writeTo(output); - assertEquals(bytes.toBytesRef(), output.bytes().toBytesRef()); - } - } - public void testIndexTemplateMetaDataXContentRoundTrip() throws Exception { ToXContent.Params params = new ToXContent.MapParams(singletonMap("reduce_mappings", "true")); diff --git a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java index e329e70134c0c..c1e341fd5bc2f 100644 --- a/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/metadata/MetaDataIndexUpgradeServiceTests.java @@ -147,7 +147,7 @@ public static IndexMetaData newIndexMeta(String name, Settings indexSettings) { .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_CREATION_DATE, 1) .put(IndexMetaData.SETTING_INDEX_UUID, "BOOM") - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_6_0_0_alpha1) .put(indexSettings) .build(); return IndexMetaData.builder(name).settings(build).build(); diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java index 8038d9b5e18de..d4645208071a3 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedNodeRoutingTests.java @@ -228,7 +228,7 @@ protected DiscoveryNode createNode(DiscoveryNode.Role... mustHaveRoles) { } final String id = String.format(Locale.ROOT, "node_%03d", nodeIdGenerator.incrementAndGet()); return new DiscoveryNode(id, id, buildNewFakeTransportAddress(), Collections.emptyMap(), roles, - VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, null)); + VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1, null)); } } diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java index 1fa1ff3a154af..787789d410ff9 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/FailedShardsRoutingTests.java @@ -576,7 +576,7 @@ public void testReplicaOnNewestVersionIsPromoted() { // add a single node clusterState = ClusterState.builder(clusterState).nodes( DiscoveryNodes.builder() - .add(newNode("node1-5.x", Version.V_5_6_0))) + .add(newNode("node1-5.x", Version.fromId(5060099)))) .build(); clusterState = ClusterState.builder(clusterState).routingTable(allocation.reroute(clusterState, "reroute").routingTable()).build(); assertThat(clusterState.getRoutingNodes().shardsWithState(INITIALIZING).size(), equalTo(1)); @@ -590,7 +590,7 @@ public void testReplicaOnNewestVersionIsPromoted() { // add another 5.6 node clusterState = ClusterState.builder(clusterState).nodes( DiscoveryNodes.builder(clusterState.nodes()) - .add(newNode("node2-5.x", Version.V_5_6_0))) + .add(newNode("node2-5.x", Version.fromId(5060099)))) .build(); // start the shards, should have 1 primary and 1 replica available diff --git a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java index 2022ecb945ba0..536e3cbb7e08d 100644 --- a/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java +++ b/server/src/test/java/org/elasticsearch/cluster/routing/allocation/ResizeAllocationDeciderTests.java @@ -19,7 +19,6 @@ package org.elasticsearch.cluster.routing.allocation; import org.elasticsearch.Version; -import org.elasticsearch.action.admin.indices.shrink.ResizeAction; import org.elasticsearch.cluster.ClusterName; import org.elasticsearch.cluster.ClusterState; import org.elasticsearch.cluster.ESAllocationTestCase; @@ -39,7 +38,6 @@ import org.elasticsearch.common.settings.Settings; import org.elasticsearch.index.Index; import org.elasticsearch.index.shard.ShardId; -import org.elasticsearch.test.VersionUtils; import org.elasticsearch.test.gateway.TestGatewayAllocator; import java.util.Arrays; @@ -243,46 +241,4 @@ public void testSourcePrimaryActive() { routingAllocation).getExplanation()); } } - - public void testAllocateOnOldNode() { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, - VersionUtils.getPreviousVersion(ResizeAction.COMPATIBILITY_VERSION)); - ClusterState clusterState = createInitialClusterState(true, version); - MetaData.Builder metaBuilder = MetaData.builder(clusterState.metaData()); - metaBuilder.put(IndexMetaData.builder("target").settings(settings(Version.CURRENT) - .put(IndexMetaData.INDEX_RESIZE_SOURCE_NAME.getKey(), "source") - .put(IndexMetaData.INDEX_RESIZE_SOURCE_UUID_KEY, IndexMetaData.INDEX_UUID_NA_VALUE)) - .numberOfShards(4).numberOfReplicas(0)); - MetaData metaData = metaBuilder.build(); - RoutingTable.Builder routingTableBuilder = RoutingTable.builder(clusterState.routingTable()); - routingTableBuilder.addAsNew(metaData.index("target")); - - clusterState = ClusterState.builder(clusterState) - .routingTable(routingTableBuilder.build()) - .metaData(metaData).build(); - Index idx = clusterState.metaData().index("target").getIndex(); - - - ResizeAllocationDecider resizeAllocationDecider = new ResizeAllocationDecider(Settings.EMPTY); - RoutingAllocation routingAllocation = new RoutingAllocation(null, clusterState.getRoutingNodes(), clusterState, null, 0); - int shardId = randomIntBetween(0, 3); - int sourceShardId = IndexMetaData.selectSplitShard(shardId, clusterState.metaData().index("source"), 4).id(); - ShardRouting shardRouting = TestShardRouting.newShardRouting(new ShardId(idx, shardId), null, true, RecoverySource - .LocalShardsRecoverySource.INSTANCE, ShardRoutingState.UNASSIGNED); - assertEquals(Decision.YES, resizeAllocationDecider.canAllocate(shardRouting, routingAllocation)); - - assertEquals(Decision.NO, resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), - routingAllocation)); - assertEquals(Decision.NO, resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"), - routingAllocation)); - - routingAllocation.debugDecision(true); - assertEquals("source primary is active", resizeAllocationDecider.canAllocate(shardRouting, routingAllocation).getExplanation()); - assertEquals("node [node1] is too old to split a shard", - resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node1"), - routingAllocation).getExplanation()); - assertEquals("node [node2] is too old to split a shard", - resizeAllocationDecider.canAllocate(shardRouting, clusterState.getRoutingNodes().node("node2"), - routingAllocation).getExplanation()); - } } diff --git a/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java b/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java index e193ea34498cf..feaa7c4a0ae58 100644 --- a/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java +++ b/server/src/test/java/org/elasticsearch/common/unit/ByteSizeValueTests.java @@ -20,7 +20,6 @@ package org.elasticsearch.common.unit; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.Writeable.Reader; import org.elasticsearch.test.AbstractWireSerializingTestCase; import org.hamcrest.MatcherAssert; @@ -319,9 +318,4 @@ public void testGetBytesAsInt() { } } } - - public void testOldSerialisation() throws IOException { - ByteSizeValue original = createTestInstance(); - assertSerialization(original, randomFrom(Version.V_5_6_4, Version.V_5_6_5, Version.V_6_0_0, Version.V_6_0_1, Version.V_6_1_0)); - } } diff --git a/server/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java b/server/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java index 76dd8e343a266..dd2627f4bc206 100644 --- a/server/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java +++ b/server/src/test/java/org/elasticsearch/common/util/IndexFolderUpgraderTests.java @@ -63,7 +63,7 @@ public void testUpgradeCustomDataPath() throws IOException { Settings settings = Settings.builder() .put(nodeSettings) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0) .put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) @@ -91,7 +91,7 @@ public void testPartialUpgradeCustomDataPath() throws IOException { Settings settings = Settings.builder() .put(nodeSettings) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0) .put(IndexMetaData.SETTING_DATA_PATH, customPath.toAbsolutePath().toString()) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) @@ -129,7 +129,7 @@ public void testUpgrade() throws IOException { Settings settings = Settings.builder() .put(nodeSettings) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build(); @@ -153,7 +153,7 @@ public void testUpgradeIndices() throws IOException { Settings settings = Settings.builder() .put(nodeSettings) .put(IndexMetaData.SETTING_INDEX_UUID, index.getUUID()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0) .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, randomIntBetween(1, 5)) .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 0) .build(); diff --git a/server/src/test/java/org/elasticsearch/discovery/zen/MembershipActionTests.java b/server/src/test/java/org/elasticsearch/discovery/zen/MembershipActionTests.java index 2f4be2fcd5394..3c06838593fb9 100644 --- a/server/src/test/java/org/elasticsearch/discovery/zen/MembershipActionTests.java +++ b/server/src/test/java/org/elasticsearch/discovery/zen/MembershipActionTests.java @@ -80,7 +80,7 @@ public void testPreventJoinClusterWithUnsupportedNodeVersions() { final Version maxNodeVersion = nodes.getMaxNodeVersion(); final Version minNodeVersion = nodes.getMinNodeVersion(); - if (maxNodeVersion.onOrAfter(Version.V_6_0_0_alpha1)) { + if (maxNodeVersion.onOrAfter(Version.V_7_0_0_alpha1)) { final Version tooLow = getPreviousVersion(maxNodeVersion.minimumCompatibilityVersion()); expectThrows(IllegalStateException.class, () -> { if (randomBoolean()) { @@ -91,7 +91,7 @@ public void testPreventJoinClusterWithUnsupportedNodeVersions() { }); } - if (minNodeVersion.before(Version.V_5_5_0)) { + if (minNodeVersion.before(Version.V_6_0_0)) { Version tooHigh = incompatibleFutureVersion(minNodeVersion); expectThrows(IllegalStateException.class, () -> { if (randomBoolean()) { @@ -102,7 +102,7 @@ public void testPreventJoinClusterWithUnsupportedNodeVersions() { }); } - if (minNodeVersion.onOrAfter(Version.V_6_0_0_alpha1)) { + if (minNodeVersion.onOrAfter(Version.V_7_0_0_alpha1)) { Version oldMajor = randomFrom(allVersions().stream().filter(v -> v.major < 6).collect(Collectors.toList())); expectThrows(IllegalStateException.class, () -> MembershipAction.ensureMajorVersionBarrier(oldMajor, minNodeVersion)); } diff --git a/server/src/test/java/org/elasticsearch/get/GetActionIT.java b/server/src/test/java/org/elasticsearch/get/GetActionIT.java index 5ed6b957c78a4..829d6ff7c1458 100644 --- a/server/src/test/java/org/elasticsearch/get/GetActionIT.java +++ b/server/src/test/java/org/elasticsearch/get/GetActionIT.java @@ -528,7 +528,7 @@ public void testGetFieldsMetaDataWithRouting() throws Exception { assertAcked(prepareCreate("test") .addMapping("_doc", "field1", "type=keyword,store=true") .addAlias(new Alias("alias")) - .setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.version.created", Version.V_5_6_0.id))); + .setSettings(Settings.builder().put("index.refresh_interval", -1).put("index.version.created", Version.V_6_0_0.id))); // multi types in 5.6 client().prepareIndex("test", "_doc", "1") diff --git a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java index 78569d927be76..0dcba53df88e7 100644 --- a/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java +++ b/server/src/test/java/org/elasticsearch/index/IndexSortSettingsTests.java @@ -146,15 +146,4 @@ public void testInvalidMissing() throws IOException { assertThat(exc.getMessage(), containsString("Illegal missing value:[default]," + " must be one of [_last, _first]")); } - - public void testInvalidVersion() throws IOException { - final Settings settings = Settings.builder() - .put("index.sort.field", "field1") - .build(); - IllegalArgumentException exc = - expectThrows(IllegalArgumentException.class, () -> indexSettings(settings, Version.V_5_4_0)); - assertThat(exc.getMessage(), - containsString("unsupported index.version.created:5.4.0, " + - "can't set index.sort on versions prior to 6.0.0-alpha1")); - } } diff --git a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java index 26a5b87866c21..04dc98deb7bf5 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/AnalysisRegistryTests.java @@ -103,7 +103,7 @@ public void testOverrideDefaultAnalyzer() throws IOException { } public void testOverrideDefaultIndexAnalyzerIsUnsupported() { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_alpha1, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0_alpha1, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); AnalyzerProvider defaultIndex = new PreBuiltAnalyzerProvider("default_index", AnalyzerScope.INDEX, new EnglishAnalyzer()); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, diff --git a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java index 9aba48f7de55b..33ec090c61e01 100644 --- a/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java +++ b/server/src/test/java/org/elasticsearch/index/analysis/PreBuiltAnalyzerTests.java @@ -56,21 +56,21 @@ public void testThatDefaultAndStandardAnalyzerAreTheSameInstance() { public void testThatInstancesAreTheSameAlwaysForKeywordAnalyzer() { assertThat(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.CURRENT), - is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_5_0_0))); + is(PreBuiltAnalyzers.KEYWORD.getAnalyzer(Version.V_6_0_0))); } public void testThatInstancesAreCachedAndReused() { assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT), PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.CURRENT)); // same es version should be cached - assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1), - PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_2_1)); - assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_0), - PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_5_0_1)); + assertSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_2_1), + PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_2_1)); + assertNotSame(PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_0_0), + PreBuiltAnalyzers.STANDARD.getAnalyzer(Version.V_6_0_1)); // Same Lucene version should be cached: - assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_1), - PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_5_2_2)); + assertSame(PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_6_2_1), + PreBuiltAnalyzers.STOP.getAnalyzer(Version.V_6_2_2)); } public void testThatAnalyzersAreUsedInMapping() throws IOException { diff --git a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java index f48603d30515f..a910c2c86bab8 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/DynamicTemplateTests.java @@ -40,18 +40,11 @@ public void testParseUnknownParam() throws Exception { templateDef.put("random_param", "random_value"); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, - () -> DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1)); + () -> DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1)); assertEquals("Illegal dynamic template parameter: [random_param]", e.getMessage()); } public void testParseUnknownMatchType() { - Map templateDef = new HashMap<>(); - templateDef.put("match_mapping_type", "short"); - templateDef.put("mapping", Collections.singletonMap("store", true)); - // if a wrong match type is specified, we ignore the template - assertNull(DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5)); - assertWarnings("match_mapping_type [short] is invalid and will be ignored: No field type matched on [short], " + - "possible values are [object, string, long, double, boolean, date, binary]"); Map templateDef2 = new HashMap<>(); templateDef2.put("match_mapping_type", "text"); templateDef2.put("mapping", Collections.singletonMap("store", true)); @@ -79,7 +72,7 @@ public void testMatchAllTemplate() { Map templateDef = new HashMap<>(); templateDef.put("match_mapping_type", "*"); templateDef.put("mapping", Collections.singletonMap("store", true)); - DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5); + DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); assertTrue(template.match("a.b", "b", randomFrom(XContentFieldType.values()))); } @@ -87,7 +80,7 @@ public void testMatchTypeTemplate() { Map templateDef = new HashMap<>(); templateDef.put("match_mapping_type", "string"); templateDef.put("mapping", Collections.singletonMap("store", true)); - DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha5); + DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); assertTrue(template.match("a.b", "b", XContentFieldType.STRING)); assertFalse(template.match("a.b", "b", XContentFieldType.BOOLEAN)); } @@ -97,7 +90,7 @@ public void testSerialization() throws Exception { Map templateDef = new HashMap<>(); templateDef.put("match_mapping_type", "string"); templateDef.put("mapping", Collections.singletonMap("store", true)); - DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); + DynamicTemplate template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); XContentBuilder builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"match_mapping_type\":\"string\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); @@ -107,7 +100,7 @@ public void testSerialization() throws Exception { templateDef.put("match", "*name"); templateDef.put("unmatch", "first_name"); templateDef.put("mapping", Collections.singletonMap("store", true)); - template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); + template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"match\":\"*name\",\"unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); @@ -117,7 +110,7 @@ public void testSerialization() throws Exception { templateDef.put("path_match", "*name"); templateDef.put("path_unmatch", "first_name"); templateDef.put("mapping", Collections.singletonMap("store", true)); - template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); + template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"path_match\":\"*name\",\"path_unmatch\":\"first_name\",\"mapping\":{\"store\":true}}", @@ -128,7 +121,7 @@ public void testSerialization() throws Exception { templateDef.put("match", "^a$"); templateDef.put("match_pattern", "regex"); templateDef.put("mapping", Collections.singletonMap("store", true)); - template = DynamicTemplate.parse("my_template", templateDef, Version.V_5_0_0_alpha1); + template = DynamicTemplate.parse("my_template", templateDef, Version.V_6_0_0_alpha1); builder = JsonXContent.contentBuilder(); template.toXContent(builder, ToXContent.EMPTY_PARAMS); assertEquals("{\"match\":\"^a$\",\"match_pattern\":\"regex\",\"mapping\":{\"store\":true}}", Strings.toString(builder)); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java index 8f2a51bbfc2bd..5172e7b0b8839 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/ExternalFieldMapperTests.java @@ -57,7 +57,7 @@ protected Collection> getPlugins() { } public void testExternalValues() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); @@ -107,7 +107,7 @@ public void testExternalValues() throws Exception { } public void testExternalValuesWithMultifield() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); @@ -173,7 +173,7 @@ public void testExternalValuesWithMultifield() throws Exception { } public void testExternalValuesWithMultifieldTwoLevels() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); IndexService indexService = createIndex("test", settings); diff --git a/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java b/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java index 0af663219903f..3bec98d33eec7 100644 --- a/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java +++ b/server/src/test/java/org/elasticsearch/index/mapper/TypeFieldMapperTests.java @@ -61,7 +61,7 @@ public void testDocValuesSingleType() throws Exception { public void testDocValues(boolean singleType) throws IOException { Settings indexSettings = singleType ? Settings.EMPTY : Settings.builder() - .put("index.version.created", Version.V_5_6_0) + .put("index.version.created", Version.V_6_0_0) .build(); MapperService mapperService = createIndex("test", indexSettings).mapperService(); DocumentMapper mapper = mapperService.merge("type", new CompressedXContent("{\"type\":{}}"), MergeReason.MAPPING_UPDATE); diff --git a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java index 0de9cac885502..496d8512d4e28 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MatchQueryBuilderTests.java @@ -30,7 +30,6 @@ import org.apache.lucene.search.PointRangeQuery; import org.apache.lucene.search.Query; import org.apache.lucene.search.TermQuery; -import org.elasticsearch.Version; import org.elasticsearch.action.admin.indices.mapping.put.PutMappingRequest; import org.elasticsearch.common.ParsingException; import org.elasticsearch.common.Strings; @@ -366,9 +365,6 @@ protected void initializeAdditionalMappings(MapperService mapperService) throws public void testMatchPhrasePrefixWithBoost() throws Exception { QueryShardContext context = createShardContext(); - assumeTrue("test runs only when the index version is on or after V_5_0_0_alpha1", - context.indexVersionCreated().onOrAfter(Version.V_5_0_0_alpha1)); - { // field boost is applied on a single term query MatchPhrasePrefixQueryBuilder builder = new MatchPhrasePrefixQueryBuilder("string_boost", "foo"); diff --git a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java index 6ac97373dfa1a..72898dd3911cd 100644 --- a/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/MoreLikeThisQueryBuilderTests.java @@ -27,7 +27,6 @@ import org.apache.lucene.search.BooleanQuery; import org.apache.lucene.search.Query; import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.action.termvectors.MultiTermVectorsItemResponse; import org.elasticsearch.action.termvectors.MultiTermVectorsRequest; import org.elasticsearch.action.termvectors.MultiTermVectorsResponse; @@ -36,13 +35,11 @@ import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.lucene.search.MoreLikeThisQuery; import org.elasticsearch.common.xcontent.ToXContent; import org.elasticsearch.common.xcontent.XContentBuilder; import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.common.xcontent.XContentParser; -import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.common.xcontent.json.JsonXContent; import org.elasticsearch.index.VersionType; import org.elasticsearch.index.query.MoreLikeThisQueryBuilder.Item; @@ -52,7 +49,6 @@ import java.io.IOException; import java.util.Arrays; -import java.util.Base64; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; @@ -333,26 +329,6 @@ public void testItemFromXContent() throws IOException { assertEquals(expectedItem, newItem); } - public void testItemSerializationBwc() throws IOException { - final byte[] data = Base64.getDecoder().decode("AQVpbmRleAEEdHlwZQEODXsiZm9vIjoiYmFyIn0A/wD//////////QAAAAAAAAAA"); - final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2, - Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0); - try (StreamInput in = StreamInput.wrap(data)) { - in.setVersion(version); - Item item = new Item(in); - assertEquals(XContentType.JSON, item.xContentType()); - assertEquals("{\"foo\":\"bar\"}", item.doc().utf8ToString()); - assertEquals("index", item.index()); - assertEquals("type", item.type()); - - try (BytesStreamOutput out = new BytesStreamOutput()) { - out.setVersion(version); - item.writeTo(out); - assertArrayEquals(data, out.bytes().toBytesRef().bytes); - } - } - } - @Override protected boolean isCachable(MoreLikeThisQueryBuilder queryBuilder) { return queryBuilder.likeItems().length == 0; // items are always fetched diff --git a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java index a2e6018d0ef6b..76479791283b4 100644 --- a/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java +++ b/server/src/test/java/org/elasticsearch/index/query/NestedQueryBuilderTests.java @@ -124,10 +124,6 @@ protected void doAssertLuceneQuery(NestedQueryBuilder queryBuilder, Query query, public void testSerializationBWC() throws IOException { for (Version version : VersionUtils.allReleasedVersions()) { NestedQueryBuilder testQuery = createTestQueryBuilder(); - if (version.before(Version.V_5_2_0) && testQuery.innerHit() != null) { - // ignore unmapped for inner_hits has been added on 5.2 - testQuery.innerHit().setIgnoreUnmapped(false); - } assertSerialization(testQuery, version); } } diff --git a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java index 9e5383a259adc..dff07e0f215e7 100644 --- a/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java +++ b/server/src/test/java/org/elasticsearch/index/reindex/BulkByScrollTaskStatusTests.java @@ -24,7 +24,6 @@ import org.elasticsearch.Version; import org.elasticsearch.common.Randomness; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; import org.hamcrest.Matchers; @@ -33,7 +32,6 @@ import java.util.stream.IntStream; import static java.lang.Math.abs; -import static java.util.Collections.emptyList; import static java.util.stream.Collectors.toList; import static org.apache.lucene.util.TestUtil.randomSimpleString; import static org.elasticsearch.common.unit.TimeValue.parseTimeValue; @@ -45,15 +43,6 @@ public void testBulkByTaskStatus() throws IOException { status.writeTo(out); BulkByScrollTask.Status tripped = new BulkByScrollTask.Status(out.bytes().streamInput()); assertTaskStatusEquals(out.getVersion(), status, tripped); - - // Also check round tripping pre-5.1 which is the first version to support parallelized scroll - out = new BytesStreamOutput(); - out.setVersion(Version.V_5_0_0_rc1); // This can be V_5_0_0 - status.writeTo(out); - StreamInput in = out.bytes().streamInput(); - in.setVersion(Version.V_5_0_0_rc1); - tripped = new BulkByScrollTask.Status(in); - assertTaskStatusEquals(Version.V_5_0_0_rc1, status, tripped); } /** @@ -74,23 +63,19 @@ public static void assertTaskStatusEquals(Version version, BulkByScrollTask.Stat assertEquals(expected.getRequestsPerSecond(), actual.getRequestsPerSecond(), 0f); assertEquals(expected.getReasonCancelled(), actual.getReasonCancelled()); assertEquals(expected.getThrottledUntil(), actual.getThrottledUntil()); - if (version.onOrAfter(Version.V_5_1_1)) { - assertThat(actual.getSliceStatuses(), Matchers.hasSize(expected.getSliceStatuses().size())); - for (int i = 0; i < expected.getSliceStatuses().size(); i++) { - BulkByScrollTask.StatusOrException sliceStatus = expected.getSliceStatuses().get(i); - if (sliceStatus == null) { - assertNull(actual.getSliceStatuses().get(i)); - } else if (sliceStatus.getException() == null) { - assertNull(actual.getSliceStatuses().get(i).getException()); - assertTaskStatusEquals(version, sliceStatus.getStatus(), actual.getSliceStatuses().get(i).getStatus()); - } else { - assertNull(actual.getSliceStatuses().get(i).getStatus()); - // Just check the message because we're not testing exception serialization in general here. - assertEquals(sliceStatus.getException().getMessage(), actual.getSliceStatuses().get(i).getException().getMessage()); - } + assertThat(actual.getSliceStatuses(), Matchers.hasSize(expected.getSliceStatuses().size())); + for (int i = 0; i < expected.getSliceStatuses().size(); i++) { + BulkByScrollTask.StatusOrException sliceStatus = expected.getSliceStatuses().get(i); + if (sliceStatus == null) { + assertNull(actual.getSliceStatuses().get(i)); + } else if (sliceStatus.getException() == null) { + assertNull(actual.getSliceStatuses().get(i).getException()); + assertTaskStatusEquals(version, sliceStatus.getStatus(), actual.getSliceStatuses().get(i).getStatus()); + } else { + assertNull(actual.getSliceStatuses().get(i).getStatus()); + // Just check the message because we're not testing exception serialization in general here. + assertEquals(sliceStatus.getException().getMessage(), actual.getSliceStatuses().get(i).getException().getMessage()); } - } else { - assertEquals(emptyList(), actual.getSliceStatuses()); } } diff --git a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java index 95772910747c4..04d15d39b58e9 100644 --- a/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java +++ b/server/src/test/java/org/elasticsearch/index/shard/ShardGetServiceTests.java @@ -77,48 +77,4 @@ public void testGetForUpdate() throws IOException { closeShards(primary); } - - public void testGetForUpdateWithParentField() throws IOException { - Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, Version.CURRENT) - .put(IndexMetaData.SETTING_NUMBER_OF_REPLICAS, 1) - .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) - .put("index.version.created", Version.V_5_6_0) // for parent field mapper - .build(); - IndexMetaData metaData = IndexMetaData.builder("test") - .putMapping("test", "{ \"properties\": { \"foo\": { \"type\": \"text\"}}}") - .settings(settings) - .primaryTerm(0, 1).build(); - IndexShard primary = newShard(new ShardId(metaData.getIndex(), 0), true, "n1", metaData, null); - recoverShardFromStore(primary); - Engine.IndexResult test = indexDoc(primary, "test", "0", "{\"foo\" : \"bar\"}"); - assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet = primary.getService().getForUpdate("test", "0", test.getVersion(), VersionType.INTERNAL); - assertFalse(testGet.getFields().containsKey(RoutingFieldMapper.NAME)); - assertEquals(new String(testGet.source(), StandardCharsets.UTF_8), "{\"foo\" : \"bar\"}"); - try (Engine.Searcher searcher = primary.getEngine().acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { - assertEquals(searcher.reader().maxDoc(), 1); // we refreshed - } - - Engine.IndexResult test1 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, null); - assertTrue(primary.getEngine().refreshNeeded()); - GetResult testGet1 = primary.getService().getForUpdate("test", "1", test1.getVersion(), VersionType.INTERNAL); - assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); - assertFalse(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); - try (Engine.Searcher searcher = primary.getEngine().acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { - assertEquals(searcher.reader().maxDoc(), 1); // we read from the translog - } - primary.getEngine().refresh("test"); - try (Engine.Searcher searcher = primary.getEngine().acquireSearcher("test", Engine.SearcherScope.INTERNAL)) { - assertEquals(searcher.reader().maxDoc(), 2); - } - - // now again from the reader - test1 = indexDoc(primary, "test", "1", "{\"foo\" : \"baz\"}", XContentType.JSON, null); - assertTrue(primary.getEngine().refreshNeeded()); - testGet1 = primary.getService().getForUpdate("test", "1", test1.getVersion(), VersionType.INTERNAL); - assertEquals(new String(testGet1.source(), StandardCharsets.UTF_8), "{\"foo\" : \"baz\"}"); - assertFalse(testGet1.getFields().containsKey(RoutingFieldMapper.NAME)); - - closeShards(primary); - } } diff --git a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java index 47f30e10ef912..485fd92099630 100644 --- a/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java +++ b/server/src/test/java/org/elasticsearch/indices/analysis/AnalysisModuleTests.java @@ -136,7 +136,7 @@ public void testAnalyzerAliasNotAllowedPost5x() throws IOException { .put("index.analysis.analyzer.foobar.type", "standard") .put("index.analysis.analyzer.foobar.alias","foobaz") // analyzer aliases were removed in v5.0.0 alpha6 - .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_5_0_0_beta1, null)) + .put(IndexMetaData.SETTING_VERSION_CREATED, VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, null)) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) .build(); AnalysisRegistry registry = getNewRegistry(settings); @@ -149,7 +149,7 @@ public void testVersionedAnalyzers() throws Exception { Settings settings2 = Settings.builder() .loadFromStream(yaml, getClass().getResourceAsStream(yaml), false) .put(Environment.PATH_HOME_SETTING.getKey(), createTempDir().toString()) - .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_5_0_0) + .put(IndexMetaData.SETTING_VERSION_CREATED, Version.V_6_0_0) .build(); AnalysisRegistry newRegistry = getNewRegistry(settings2); IndexAnalyzers indexAnalyzers = getIndexAnalyzers(newRegistry, settings2); @@ -162,9 +162,9 @@ public void testVersionedAnalyzers() throws Exception { // analysis service has the expected version assertThat(indexAnalyzers.get("standard").analyzer(), is(instanceOf(StandardAnalyzer.class))); - assertEquals(Version.V_5_0_0.luceneVersion, + assertEquals(Version.V_6_0_0.luceneVersion, indexAnalyzers.get("standard").analyzer().getVersion()); - assertEquals(Version.V_5_0_0.luceneVersion, + assertEquals(Version.V_6_0_0.luceneVersion, indexAnalyzers.get("stop").analyzer().getVersion()); assertThat(indexAnalyzers.get("custom7").analyzer(), is(instanceOf(StandardAnalyzer.class))); diff --git a/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java b/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java index 5ed4b37030786..fa591411bba11 100644 --- a/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java +++ b/server/src/test/java/org/elasticsearch/indices/stats/IndexStatsIT.java @@ -725,7 +725,7 @@ public void testMultiIndex() throws Exception { public void testFieldDataFieldsParam() throws Exception { assertAcked(client().admin().indices().prepareCreate("test1") - .setSettings(Settings.builder().put("index.version.created", Version.V_5_6_0.id)) + .setSettings(Settings.builder().put("index.version.created", Version.V_6_0_0.id)) .addMapping("_doc", "bar", "type=text,fielddata=true", "baz", "type=text,fielddata=true").get()); diff --git a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java index 5f1d1f612d7ad..f6649853eda10 100644 --- a/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java +++ b/server/src/test/java/org/elasticsearch/plugins/PluginsServiceTests.java @@ -590,10 +590,10 @@ public void testNonExtensibleDep() throws Exception { } public void testIncompatibleElasticsearchVersion() throws Exception { - PluginInfo info = new PluginInfo("my_plugin", "desc", "1.0", Version.V_5_0_0, + PluginInfo info = new PluginInfo("my_plugin", "desc", "1.0", Version.V_6_0_0, "1.8", "FakePlugin", Collections.emptyList(), false); IllegalArgumentException e = expectThrows(IllegalArgumentException.class, () -> PluginsService.verifyCompatibility(info)); - assertThat(e.getMessage(), containsString("was built for Elasticsearch version 5.0.0")); + assertThat(e.getMessage(), containsString("was built for Elasticsearch version 6.0.0")); } public void testIncompatibleJavaVersion() throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java index c50fb89f334af..ce45d222dd757 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoDistanceIT.java @@ -67,7 +67,7 @@ protected Collection> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created } - private Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); private IndexRequestBuilder indexCity(String idx, String name, String... latLons) throws Exception { diff --git a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java index fc080dd0f04c4..971742aec2d04 100644 --- a/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java +++ b/server/src/test/java/org/elasticsearch/search/aggregations/bucket/GeoHashGridIT.java @@ -65,7 +65,7 @@ protected Collection> nodePlugins() { return Arrays.asList(InternalSettingsPlugin.class); // uses index.version.created } - private Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + private Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); static ObjectIntMap expectedDocCountsForGeoHash = null; diff --git a/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java b/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java index a21893db3920f..0a860a636d4aa 100644 --- a/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java +++ b/server/src/test/java/org/elasticsearch/search/functionscore/DecayFunctionScoreIT.java @@ -613,7 +613,7 @@ public void testDateWithoutOrigin() throws Exception { } public void testManyDocsLin() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = jsonBuilder().startObject().startObject("type").startObject("properties") .startObject("test").field("type", "text").endObject().startObject("date").field("type", "date") diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java index 12a64d80a1489..80b40042801b5 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoBoundingBoxIT.java @@ -51,7 +51,7 @@ protected Collection> nodePlugins() { } public void testSimpleBoundingBoxTest() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") @@ -123,7 +123,7 @@ public void testSimpleBoundingBoxTest() throws Exception { } public void testLimit2BoundingBox() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") @@ -176,7 +176,7 @@ public void testLimit2BoundingBox() throws Exception { } public void testCompleteLonRange() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1") diff --git a/server/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java b/server/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java index 5966ea6a49dcc..143fd611c3f5e 100644 --- a/server/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java +++ b/server/src/test/java/org/elasticsearch/search/geo/GeoDistanceIT.java @@ -101,7 +101,7 @@ static Double distanceScript(Map vars, Function> nodePlugins() { @Override protected void setupSuiteScopeCluster() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java index 965dcb3e8ccf1..e134b20c309f4 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceIT.java @@ -59,7 +59,7 @@ protected Collection> nodePlugins() { } public void testDistanceSortingMVFields() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") @@ -189,7 +189,7 @@ public void testDistanceSortingMVFields() throws Exception { // Regression bug: // https://github.com/elastic/elasticsearch/issues/2851 public void testDistanceSortingWithMissingGeoPoint() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("type1").startObject("properties") @@ -234,7 +234,7 @@ public void testDistanceSortingWithMissingGeoPoint() throws Exception { } public void testDistanceSortingNestedFields() throws Exception { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); XContentBuilder xContentBuilder = XContentFactory.jsonBuilder().startObject().startObject("company").startObject("properties") @@ -383,7 +383,7 @@ public void testDistanceSortingNestedFields() throws Exception { * Issue 3073 */ public void testGeoDistanceFilter() throws IOException { - Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, + Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); double lat = 40.720611; diff --git a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java index 200043a6668ab..cac5fede848a4 100644 --- a/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java +++ b/server/src/test/java/org/elasticsearch/search/sort/GeoDistanceSortBuilderIT.java @@ -70,7 +70,7 @@ public void testManyToManyGeoPoints() throws ExecutionException, InterruptedExce * 1 2 3 4 5 6 7 */ Version version = randomBoolean() ? Version.CURRENT - : VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); + : VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point")); XContentBuilder d1Builder = jsonBuilder(); @@ -136,7 +136,7 @@ public void testSingeToManyAvgMedian() throws ExecutionException, InterruptedExc * d2 = (0, 1), (0, 5), (0, 6); so avg. distance is 4, median distance is 5 */ Version version = randomBoolean() ? Version.CURRENT - : VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); + : VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point")); XContentBuilder d1Builder = jsonBuilder(); @@ -197,7 +197,7 @@ public void testManyToManyGeoPointsWithDifferentFormats() throws ExecutionExcept * 1 2 3 4 5 6 */ Version version = randomBoolean() ? Version.CURRENT - : VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, Version.CURRENT); + : VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.CURRENT); Settings settings = Settings.builder().put(IndexMetaData.SETTING_VERSION_CREATED, version).build(); assertAcked(prepareCreate("index").setSettings(settings).addMapping("type", LOCATION_FIELD, "type=geo_point")); XContentBuilder d1Builder = jsonBuilder(); diff --git a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java index f1929e72d8b33..84a6ce54d1ed1 100644 --- a/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java +++ b/server/src/test/java/org/elasticsearch/transport/RemoteClusterServiceTests.java @@ -821,10 +821,5 @@ public void testGetNodePredicatesCombination() { allRoles, Version.CURRENT); assertTrue(nodePredicate.test(node)); } - { - DiscoveryNode node = new DiscoveryNode("id", address, Collections.singletonMap("gateway", "true"), - allRoles, Version.V_5_3_0); - assertFalse(nodePredicate.test(node)); - } } } diff --git a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java index 0b6112eb51c90..0bf12ba82c821 100644 --- a/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java +++ b/server/src/test/java/org/elasticsearch/transport/TcpTransportTests.java @@ -156,19 +156,26 @@ public void testEnsureVersionCompatibility() { TcpTransport.ensureVersionCompatibility(VersionUtils.randomVersionBetween(random(), Version.CURRENT.minimumCompatibilityVersion(), Version.CURRENT), Version.CURRENT, randomBoolean()); - TcpTransport.ensureVersionCompatibility(Version.fromString("5.0.0"), Version.fromString("6.0.0"), true); + TcpTransport.ensureVersionCompatibility(Version.fromString("6.0.0"), Version.fromString("7.0.0"), true); IllegalStateException ise = expectThrows(IllegalStateException.class, () -> - TcpTransport.ensureVersionCompatibility(Version.fromString("5.0.0"), Version.fromString("6.0.0"), false)); - assertEquals("Received message from unsupported version: [5.0.0] minimal compatible version is: [5.6.0]", ise.getMessage()); + TcpTransport.ensureVersionCompatibility(Version.fromString("6.0.0"), Version.fromString("7.0.0"), false)); + assertEquals("Received message from unsupported version: [6.0.0] minimal compatible version is: [6.5.0]", ise.getMessage()); + // For handshake we are compatible with N-2 + TcpTransport.ensureVersionCompatibility(Version.fromString("5.6.0"), Version.fromString("7.0.0"), true); ise = expectThrows(IllegalStateException.class, () -> - TcpTransport.ensureVersionCompatibility(Version.fromString("2.3.0"), Version.fromString("6.0.0"), true)); - assertEquals("Received handshake message from unsupported version: [2.3.0] minimal compatible version is: [5.6.0]", + TcpTransport.ensureVersionCompatibility(Version.fromString("5.6.0"), Version.fromString("7.0.0"), false)); + assertEquals("Received message from unsupported version: [5.6.0] minimal compatible version is: [6.5.0]", ise.getMessage()); ise = expectThrows(IllegalStateException.class, () -> - TcpTransport.ensureVersionCompatibility(Version.fromString("2.3.0"), Version.fromString("6.0.0"), false)); - assertEquals("Received message from unsupported version: [2.3.0] minimal compatible version is: [5.6.0]", + TcpTransport.ensureVersionCompatibility(Version.fromString("2.3.0"), Version.fromString("7.0.0"), true)); + assertEquals("Received handshake message from unsupported version: [2.3.0] minimal compatible version is: [6.5.0]", + ise.getMessage()); + + ise = expectThrows(IllegalStateException.class, () -> + TcpTransport.ensureVersionCompatibility(Version.fromString("2.3.0"), Version.fromString("7.0.0"), false)); + assertEquals("Received message from unsupported version: [2.3.0] minimal compatible version is: [6.5.0]", ise.getMessage()); } diff --git a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java b/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java index 4c4fe8f76ad89..b9a0e4a9b1ea0 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java +++ b/test/framework/src/main/java/org/elasticsearch/test/OldIndexUtils.java @@ -75,27 +75,20 @@ public static Path getIndexDir( final String indexFile, final Path dataDir) throws IOException { final Version version = Version.fromString(indexName.substring("index-".length())); - if (version.before(Version.V_5_0_0_alpha1)) { - // the bwc scripts packs the indices under this path - Path src = dataDir.resolve("nodes/0/indices/" + indexName); - assertTrue("[" + indexFile + "] missing index dir: " + src.toString(), Files.exists(src)); - return src; - } else { - final List indexFolders = new ArrayList<>(); - try (DirectoryStream stream = Files.newDirectoryStream(dataDir.resolve("0/indices"), - (p) -> p.getFileName().toString().startsWith("extra") == false)) { // extra FS can break this... - for (final Path path : stream) { - indexFolders.add(path); - } + final List indexFolders = new ArrayList<>(); + try (DirectoryStream stream = Files.newDirectoryStream(dataDir.resolve("0/indices"), + (p) -> p.getFileName().toString().startsWith("extra") == false)) { // extra FS can break this... + for (final Path path : stream) { + indexFolders.add(path); } - assertThat(indexFolders.toString(), indexFolders.size(), equalTo(1)); - final IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, - indexFolders.get(0)); - assertNotNull(indexMetaData); - assertThat(indexFolders.get(0).getFileName().toString(), equalTo(indexMetaData.getIndexUUID())); - assertThat(indexMetaData.getCreationVersion(), equalTo(version)); - return indexFolders.get(0); } + assertThat(indexFolders.toString(), indexFolders.size(), equalTo(1)); + final IndexMetaData indexMetaData = IndexMetaData.FORMAT.loadLatestState(logger, NamedXContentRegistry.EMPTY, + indexFolders.get(0)); + assertNotNull(indexMetaData); + assertThat(indexFolders.get(0).getFileName().toString(), equalTo(indexMetaData.getIndexUUID())); + assertThat(indexMetaData.getCreationVersion(), equalTo(version)); + return indexFolders.get(0); } // randomly distribute the files from src over dests paths diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java index 5da8601a9f340..500cff893cb1f 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSectionTests.java @@ -152,7 +152,7 @@ public void testParseTestSectionWithDoSetAndSkipSectionsNoSkip() throws Exceptio parser = createParser(YamlXContent.yamlXContent, "\"First test section\": \n" + " - skip:\n" + - " version: \"5.0.0 - 5.2.0\"\n" + + " version: \"6.0.0 - 6.2.0\"\n" + " reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" + " - do :\n" + " catch: missing\n" + @@ -167,9 +167,9 @@ public void testParseTestSectionWithDoSetAndSkipSectionsNoSkip() throws Exceptio assertThat(testSection, notNullValue()); assertThat(testSection.getName(), equalTo("First test section")); assertThat(testSection.getSkipSection(), notNullValue()); - assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_5_0_0)); + assertThat(testSection.getSkipSection().getLowerVersion(), equalTo(Version.V_6_0_0)); assertThat(testSection.getSkipSection().getUpperVersion(), - equalTo(Version.V_5_2_0)); + equalTo(Version.V_6_2_0)); assertThat(testSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(testSection.getExecutableSections().size(), equalTo(2)); DoSection doSection = (DoSection)testSection.getExecutableSections().get(0); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java index 4c97eb453610e..71814593ad487 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/ClientYamlTestSuiteTests.java @@ -66,10 +66,10 @@ public void testParseTestSetupTeardownAndSections() throws Exception { " - match: {test_index.test_type.properties.text.analyzer: whitespace}\n" + "\n" + "---\n" + - "\"Get type mapping - pre 5.0\":\n" + + "\"Get type mapping - pre 6.0\":\n" + "\n" + " - skip:\n" + - " version: \"5.0.0 - \"\n" + + " version: \"6.0.0 - \"\n" + " reason: \"for newer versions the index name is always returned\"\n" + "\n" + " - do:\n" + @@ -97,7 +97,7 @@ public void testParseTestSetupTeardownAndSections() throws Exception { } else { assertThat(restTestSuite.getSetupSection().isEmpty(), equalTo(true)); } - + assertThat(restTestSuite.getTeardownSection(), notNullValue()); if (includeTeardown) { assertThat(restTestSuite.getTeardownSection().isEmpty(), equalTo(false)); @@ -131,12 +131,12 @@ public void testParseTestSetupTeardownAndSections() throws Exception { assertThat(matchAssertion.getExpectedValue().toString(), equalTo("whitespace")); assertThat(restTestSuite.getTestSections().get(1).getName(), - equalTo("Get type mapping - pre 5.0")); + equalTo("Get type mapping - pre 6.0")); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().isEmpty(), equalTo(false)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getReason(), equalTo("for newer versions the index name is always returned")); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getLowerVersion(), - equalTo(Version.V_5_0_0)); + equalTo(Version.V_6_0_0)); assertThat(restTestSuite.getTestSections().get(1).getSkipSection().getUpperVersion(), equalTo(Version.CURRENT)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().size(), equalTo(3)); assertThat(restTestSuite.getTestSections().get(1).getExecutableSections().get(0), instanceOf(DoSection.class)); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java index cb9ab009b2594..e883e8e062af2 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SetupSectionTests.java @@ -53,7 +53,7 @@ public void testParseSetupSection() throws Exception { public void testParseSetupAndSkipSectionNoSkip() throws Exception { parser = createParser(YamlXContent.yamlXContent, " - skip:\n" + - " version: \"5.0.0 - 5.3.0\"\n" + + " version: \"6.0.0 - 6.3.0\"\n" + " reason: \"Update doesn't return metadata fields, waiting for #3259\"\n" + " - do:\n" + " index1:\n" + @@ -74,9 +74,9 @@ public void testParseSetupAndSkipSectionNoSkip() throws Exception { assertThat(setupSection, notNullValue()); assertThat(setupSection.getSkipSection().isEmpty(), equalTo(false)); assertThat(setupSection.getSkipSection(), notNullValue()); - assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_5_0_0)); + assertThat(setupSection.getSkipSection().getLowerVersion(), equalTo(Version.V_6_0_0)); assertThat(setupSection.getSkipSection().getUpperVersion(), - equalTo(Version.V_5_3_0)); + equalTo(Version.V_6_3_0)); assertThat(setupSection.getSkipSection().getReason(), equalTo("Update doesn't return metadata fields, waiting for #3259")); assertThat(setupSection.getDoSections().size(), equalTo(2)); assertThat(setupSection.getDoSections().get(0).getApiCallSection().getApi(), equalTo("index1")); diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java index 3ab9583335e7c..e5e466a82cc18 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/SkipSectionTests.java @@ -34,17 +34,17 @@ public class SkipSectionTests extends AbstractClientYamlTestFragmentParserTestCase { public void testSkip() { - SkipSection section = new SkipSection("5.0.0 - 5.1.0", + SkipSection section = new SkipSection("6.0.0 - 6.1.0", randomBoolean() ? Collections.emptyList() : Collections.singletonList("warnings"), "foobar"); assertFalse(section.skip(Version.CURRENT)); - assertTrue(section.skip(Version.V_5_0_0)); - section = new SkipSection(randomBoolean() ? null : "5.0.0 - 5.1.0", + assertTrue(section.skip(Version.V_6_0_0)); + section = new SkipSection(randomBoolean() ? null : "6.0.0 - 6.1.0", Collections.singletonList("boom"), "foobar"); assertTrue(section.skip(Version.CURRENT)); } public void testMessage() { - SkipSection section = new SkipSection("5.0.0 - 5.1.0", + SkipSection section = new SkipSection("6.0.0 - 6.1.0", Collections.singletonList("warnings"), "foobar"); assertEquals("[FOOBAR] skipped, reason: [foobar] unsupported features [warnings]", section.getSkipMessage("FOOBAR")); section = new SkipSection(null, Collections.singletonList("warnings"), "foobar"); @@ -55,14 +55,14 @@ public void testMessage() { public void testParseSkipSectionVersionNoFeature() throws Exception { parser = createParser(YamlXContent.yamlXContent, - "version: \" - 5.1.1\"\n" + + "version: \" - 6.1.1\"\n" + "reason: Delete ignores the parent param" ); SkipSection skipSection = SkipSection.parse(parser); assertThat(skipSection, notNullValue()); assertThat(skipSection.getLowerVersion(), equalTo(VersionUtils.getFirstVersion())); - assertThat(skipSection.getUpperVersion(), equalTo(Version.V_5_1_1)); + assertThat(skipSection.getUpperVersion(), equalTo(Version.V_6_1_1)); assertThat(skipSection.getFeatures().size(), equalTo(0)); assertThat(skipSection.getReason(), equalTo("Delete ignores the parent param")); } diff --git a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java index 15ca1ec0096e3..07afa9f33b5b1 100644 --- a/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java +++ b/test/framework/src/test/java/org/elasticsearch/test/rest/yaml/section/TeardownSectionTests.java @@ -56,7 +56,7 @@ public void testParseTeardownSection() throws Exception { public void testParseWithSkip() throws Exception { parser = createParser(YamlXContent.yamlXContent, " - skip:\n" + - " version: \"5.0.0 - 5.3.0\"\n" + + " version: \"6.0.0 - 6.3.0\"\n" + " reason: \"there is a reason\"\n" + " - do:\n" + " delete:\n" + @@ -75,8 +75,8 @@ public void testParseWithSkip() throws Exception { TeardownSection section = TeardownSection.parse(parser); assertThat(section, notNullValue()); assertThat(section.getSkipSection().isEmpty(), equalTo(false)); - assertThat(section.getSkipSection().getLowerVersion(), equalTo(Version.V_5_0_0)); - assertThat(section.getSkipSection().getUpperVersion(), equalTo(Version.V_5_3_0)); + assertThat(section.getSkipSection().getLowerVersion(), equalTo(Version.V_6_0_0)); + assertThat(section.getSkipSection().getUpperVersion(), equalTo(Version.V_6_3_0)); assertThat(section.getSkipSection().getReason(), equalTo("there is a reason")); assertThat(section.getDoSections().size(), equalTo(2)); assertThat(section.getDoSections().get(0).getApiCallSection().getApi(), equalTo("delete")); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java index 2a7eddcf35395..b51a451a67faa 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.protocol.xpack; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -399,8 +398,7 @@ public FeatureSet(String name, @Nullable String description, boolean available, } public FeatureSet(StreamInput in) throws IOException { - this(in.readString(), in.readOptionalString(), in.readBoolean(), in.readBoolean(), - in.getVersion().onOrAfter(Version.V_5_4_0) ? in.readMap() : null); + this(in.readString(), in.readOptionalString(), in.readBoolean(), in.readBoolean(), in.readMap()); } @Override @@ -409,9 +407,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(description); out.writeBoolean(available); out.writeBoolean(enabled); - if (out.getVersion().onOrAfter(Version.V_5_4_0)) { - out.writeMap(nativeCodeInfo); - } + out.writeMap(nativeCodeInfo); } public String name() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java index e5b116a3a7a98..16ed33ae94087 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/protocol/xpack/security/User.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.protocol.xpack.security; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -185,12 +184,7 @@ public static User partialReadFrom(String username, StreamInput input) throws IO boolean hasInnerUser = input.readBoolean(); if (hasInnerUser) { User innerUser = readFrom(input); - if (input.getVersion().onOrBefore(Version.V_5_4_0)) { - // backcompat: runas user was read first, so reverse outer and inner - return new User(innerUser, outerUser); - } else { - return new User(outerUser, innerUser); - } + return new User(outerUser, innerUser); } else { return outerUser; } @@ -207,11 +201,6 @@ public static void writeTo(User user, StreamOutput output) throws IOException { if (user.authenticatedUser == null) { // no backcompat necessary, since there is no inner user writeUser(user, output); - } else if (output.getVersion().onOrBefore(Version.V_5_4_0)) { - // backcompat: write runas user as the "inner" user - writeUser(user.authenticatedUser, output); - output.writeBoolean(true); - writeUser(user, output); } else { writeUser(user, output); output.writeBoolean(true); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java index e0b71abe966db..193695ac69362 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/MlMetadata.java @@ -115,7 +115,7 @@ public Set expandDatafeedIds(String expression, boolean allowNoDatafeeds @Override public Version getMinimalSupportedVersion() { - return Version.V_5_4_0; + return Version.V_6_0_0_alpha1; } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java index fb3ac55cda027..73cdbeef44259 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteDatafeedAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -72,18 +71,14 @@ public ActionRequestValidationException validate() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); datafeedId = in.readString(); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - force = in.readBoolean(); - } + force = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(datafeedId); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeBoolean(force); - } + out.writeBoolean(force); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java index 933e98b80ff80..56b7ec2b52fc1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/DeleteJobAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.action.support.master.AcknowledgedRequest; @@ -79,18 +78,14 @@ public Task createTask(long id, String type, String action, TaskId parentTaskId, public void readFrom(StreamInput in) throws IOException { super.readFrom(in); jobId = in.readString(); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - force = in.readBoolean(); - } + force = in.readBoolean(); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeString(jobId); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeBoolean(force); - } + out.writeBoolean(force); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java index ef086b5126228..4b96a4d6b2746 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/FlushJobAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequestBuilder; import org.elasticsearch.action.support.tasks.BaseTasksResponse; @@ -127,9 +126,7 @@ public void readFrom(StreamInput in) throws IOException { start = in.readOptionalString(); end = in.readOptionalString(); advanceTime = in.readOptionalString(); - if (in.getVersion().after(Version.V_5_5_0)) { - skipTime = in.readOptionalString(); - } + skipTime = in.readOptionalString(); } @Override @@ -139,9 +136,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(start); out.writeOptionalString(end); out.writeOptionalString(advanceTime); - if (out.getVersion().after(Version.V_5_5_0)) { - out.writeOptionalString(skipTime); - } + out.writeOptionalString(skipTime); } @Override @@ -222,18 +217,14 @@ public Date getLastFinalizedBucketEnd() { public void readFrom(StreamInput in) throws IOException { super.readFrom(in); flushed = in.readBoolean(); - if (in.getVersion().after(Version.V_5_5_0)) { - lastFinalizedBucketEnd = new Date(in.readVLong()); - } + lastFinalizedBucketEnd = new Date(in.readVLong()); } @Override public void writeTo(StreamOutput out) throws IOException { super.writeTo(out); out.writeBoolean(flushed); - if (out.getVersion().after(Version.V_5_5_0)) { - out.writeVLong(lastFinalizedBucketEnd.getTime()); - } + out.writeVLong(lastFinalizedBucketEnd.getTime()); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java index 29b3d4bb8d557..c6c87ef0e465d 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/GetBucketsAction.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.action; -import org.elasticsearch.Version; import org.elasticsearch.action.Action; import org.elasticsearch.action.ActionRequest; import org.elasticsearch.action.ActionRequestBuilder; @@ -162,7 +161,7 @@ public PageParams getPageParams() { public void setPageParams(PageParams pageParams) { if (timestamp != null) { - throw new IllegalArgumentException("Param [" + PageParams.FROM.getPreferredName() + throw new IllegalArgumentException("Param [" + PageParams.FROM.getPreferredName() + ", " + PageParams.SIZE.getPreferredName() + "] is incompatible with [" + TIMESTAMP.getPreferredName() + "]."); } this.pageParams = ExceptionsHelper.requireNonNull(pageParams, PageParams.PAGE.getPreferredName()); @@ -212,10 +211,8 @@ public void readFrom(StreamInput in) throws IOException { end = in.readOptionalString(); anomalyScore = in.readOptionalDouble(); pageParams = in.readOptionalWriteable(PageParams::new); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - sort = in.readString(); - descending = in.readBoolean(); - } + sort = in.readString(); + descending = in.readBoolean(); } @Override @@ -229,10 +226,8 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(end); out.writeOptionalDouble(anomalyScore); out.writeOptionalWriteable(pageParams); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeString(sort); - out.writeBoolean(descending); - } + out.writeString(sort); + out.writeBoolean(descending); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java index c108a983aa17b..fc38d974defff 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/action/OpenJobAction.java @@ -168,10 +168,6 @@ public JobParams(String jobId) { public JobParams(StreamInput in) throws IOException { jobId = in.readString(); - if (in.getVersion().onOrBefore(Version.V_5_5_0)) { - // Read `ignoreDowntime` - in.readBoolean(); - } timeout = TimeValue.timeValueMillis(in.readVLong()); } @@ -199,10 +195,6 @@ public String getWriteableName() { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(jobId); - if (out.getVersion().onOrBefore(Version.V_5_5_0)) { - // Write `ignoreDowntime` - true by default - out.writeBoolean(true); - } out.writeVLong(timeout.millis()); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java index 1034b00af0a34..cdf25438cea33 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedConfig.java @@ -189,10 +189,6 @@ public DatafeedConfig(StreamInput in) throws IOException { this.scriptFields = null; } this.scrollSize = in.readOptionalVInt(); - if (in.getVersion().before(Version.V_5_5_0)) { - // read former _source field - in.readBoolean(); - } this.chunkingConfig = in.readOptionalWriteable(ChunkingConfig::new); if (in.getVersion().onOrAfter(Version.V_6_2_0)) { this.headers = Collections.unmodifiableMap(in.readMap(StreamInput::readString, StreamInput::readString)); @@ -290,10 +286,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(false); } out.writeOptionalVInt(scrollSize); - if (out.getVersion().before(Version.V_5_5_0)) { - // write former _source field - out.writeBoolean(false); - } out.writeOptionalWriteable(chunkingConfig); if (out.getVersion().onOrAfter(Version.V_6_2_0)) { out.writeMap(headers, StreamOutput::writeString, StreamOutput::writeString); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedState.java index d894f7b339fe5..70102f27a5669 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedState.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.datafeed; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -49,14 +48,6 @@ public String getWriteableName() { @Override public void writeTo(StreamOutput out) throws IOException { DatafeedState state = this; - // STARTING & STOPPING states were introduced in v5.5. - if (out.getVersion().before(Version.V_5_5_0)) { - if (this == STARTING) { - state = STOPPED; - } else if (this == STOPPING) { - state = STARTED; - } - } out.writeEnum(state); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java index f3748cefc51bc..d5425bdd1f469 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/datafeed/DatafeedUpdate.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.datafeed; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -122,10 +121,6 @@ public DatafeedUpdate(StreamInput in) throws IOException { this.scriptFields = null; } this.scrollSize = in.readOptionalVInt(); - if (in.getVersion().before(Version.V_5_5_0)) { - // TODO for former _source param - remove in v7.0.0 - in.readOptionalBoolean(); - } this.chunkingConfig = in.readOptionalWriteable(ChunkingConfig::new); } @@ -163,10 +158,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(false); } out.writeOptionalVInt(scrollSize); - if (out.getVersion().before(Version.V_5_5_0)) { - // TODO for former _source param - remove in v7.0.0 - out.writeOptionalBoolean(null); - } out.writeOptionalWriteable(chunkingConfig); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java index 93aa5495c409e..b5083aeecb9ae 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Detector.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.core.ml.job.config; import org.elasticsearch.ElasticsearchParseException; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -248,12 +247,7 @@ public Detector(StreamInput in) throws IOException { useNull = in.readBoolean(); excludeFrequent = in.readBoolean() ? ExcludeFrequent.readFromStream(in) : null; rules = Collections.unmodifiableList(in.readList(DetectionRule::new)); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - detectorIndex = in.readInt(); - } else { - // negative means unknown, and is expected for 5.4 jobs - detectorIndex = -1; - } + detectorIndex = in.readInt(); } @Override @@ -276,9 +270,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeList(Collections.emptyList()); } - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeInt(detectorIndex); - } + out.writeInt(detectorIndex); } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java index 0005d16a99c94..a978612fd02e5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/Job.java @@ -214,11 +214,7 @@ private Job(String jobId, String jobType, Version jobVersion, List group public Job(StreamInput in) throws IOException { jobId = in.readString(); jobType = in.readString(); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - jobVersion = in.readBoolean() ? Version.readVersion(in) : null; - } else { - jobVersion = null; - } + jobVersion = in.readBoolean() ? Version.readVersion(in) : null; if (in.getVersion().onOrAfter(Version.V_6_1_0)) { groups = Collections.unmodifiableList(in.readList(StreamInput::readString)); } else { @@ -482,13 +478,11 @@ public long earliestValidTimestamp(DataCounts dataCounts) { public void writeTo(StreamOutput out) throws IOException { out.writeString(jobId); out.writeString(jobType); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - if (jobVersion != null) { - out.writeBoolean(true); - Version.writeVersion(jobVersion, out); - } else { - out.writeBoolean(false); - } + if (jobVersion != null) { + out.writeBoolean(true); + Version.writeVersion(jobVersion, out); + } else { + out.writeBoolean(false); } if (out.getVersion().onOrAfter(Version.V_6_1_0)) { out.writeStringList(groups); @@ -666,9 +660,7 @@ private static void checkValueNotLessThan(long minVal, String name, Long value) */ public static Set getCompatibleJobTypes(Version nodeVersion) { Set compatibleTypes = new HashSet<>(); - if (nodeVersion.onOrAfter(Version.V_5_4_0)) { - compatibleTypes.add(ANOMALY_DETECTOR_JOB_TYPE); - } + compatibleTypes.add(ANOMALY_DETECTOR_JOB_TYPE); return compatibleTypes; } @@ -732,9 +724,7 @@ public Builder(Job job) { public Builder(StreamInput in) throws IOException { id = in.readOptionalString(); jobType = in.readString(); - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - jobVersion = in.readBoolean() ? Version.readVersion(in) : null; - } + jobVersion = in.readBoolean() ? Version.readVersion(in) : null; if (in.getVersion().onOrAfter(Version.V_6_1_0)) { groups = in.readList(StreamInput::readString); } else { @@ -921,13 +911,11 @@ public List invalidCreateTimeSettings() { public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(id); out.writeString(jobType); - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - if (jobVersion != null) { - out.writeBoolean(true); - Version.writeVersion(jobVersion, out); - } else { - out.writeBoolean(false); - } + if (jobVersion != null) { + out.writeBoolean(true); + Version.writeVersion(jobVersion, out); + } else { + out.writeBoolean(false); } if (out.getVersion().onOrAfter(Version.V_6_1_0)) { out.writeStringList(groups); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobState.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobState.java index e89149a062b68..948284d5e0080 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobState.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/config/JobState.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.job.config; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.common.io.stream.Writeable; @@ -34,10 +33,6 @@ public static JobState fromStream(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { JobState state = this; - // Pre v5.5 the OPENING state didn't exist - if (this == OPENING && out.getVersion().before(Version.V_5_5_0)) { - state = CLOSED; - } out.writeEnum(state); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java index ad8b24e66c643..2d9afa833c3c5 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/output/FlushAcknowledgement.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.job.process.autodetect.output; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -58,17 +57,13 @@ public FlushAcknowledgement(String id, Date lastFinalizedBucketEnd) { public FlushAcknowledgement(StreamInput in) throws IOException { id = in.readString(); - if (in.getVersion().after(Version.V_5_5_0)) { - lastFinalizedBucketEnd = new Date(in.readVLong()); - } + lastFinalizedBucketEnd = new Date(in.readVLong()); } @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(id); - if (out.getVersion().after(Version.V_5_5_0)) { - out.writeVLong(lastFinalizedBucketEnd.getTime()); - } + out.writeVLong(lastFinalizedBucketEnd.getTime()); } public String getId() { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java index 03487500d8a8b..068b998dc251a 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/process/autodetect/state/ModelSnapshot.java @@ -143,7 +143,7 @@ public ModelSnapshot(StreamInput in) throws IOException { if (in.getVersion().onOrAfter(Version.V_7_0_0_alpha1)) { minVersion = Version.readVersion(in); } else { - minVersion = Version.V_5_5_0; + minVersion = Version.CURRENT.minimumCompatibilityVersion(); } timestamp = in.readBoolean() ? new Date(in.readVLong()) : null; description = in.readOptionalString(); @@ -357,9 +357,8 @@ public static class Builder { private String jobId; // Stored snapshot documents created prior to 6.3.0 will have no - // value for min_version. We default it to 5.5.0 as there were - // no model changes between 5.5.0 and 6.3.0. - private Version minVersion = Version.V_5_5_0; + // value for min_version. + private Version minVersion = Version.V_6_3_0; private Date timestamp; private String description; diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java index 360bcfaaeadfd..869cdcb437e1f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/AnomalyRecord.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.job.results; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; import org.elasticsearch.common.io.stream.StreamInput; @@ -163,10 +162,6 @@ public AnomalyRecord(String jobId, Date timestamp, long bucketSpan) { @SuppressWarnings("unchecked") public AnomalyRecord(StreamInput in) throws IOException { jobId = in.readString(); - // bwc for removed sequenceNum field - if (in.getVersion().before(Version.V_5_5_0)) { - in.readInt(); - } detectorIndex = in.readInt(); probability = in.readDouble(); byFieldName = in.readOptionalString(); @@ -201,10 +196,6 @@ public AnomalyRecord(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(jobId); - // bwc for removed sequenceNum field - if (out.getVersion().before(Version.V_5_5_0)) { - out.writeInt(0); - } out.writeInt(detectorIndex); out.writeDouble(probability); out.writeOptionalString(byFieldName); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java index 8a7fe2395b4e0..8280ee9f22ef0 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Bucket.java @@ -137,19 +137,11 @@ public Bucket(StreamInput in) throws IOException { anomalyScore = in.readDouble(); bucketSpan = in.readLong(); initialAnomalyScore = in.readDouble(); - // bwc for recordCount - if (in.getVersion().before(Version.V_5_5_0)) { - in.readInt(); - } records = in.readList(AnomalyRecord::new); eventCount = in.readLong(); isInterim = in.readBoolean(); bucketInfluencers = in.readList(BucketInfluencer::new); processingTimeMs = in.readLong(); - // bwc for perPartitionMaxProbability - if (in.getVersion().before(Version.V_5_5_0)) { - in.readGenericValue(); - } // bwc for perPartitionNormalization if (in.getVersion().before(Version.V_6_5_0)) { in.readList(Bucket::readOldPerPartitionNormalization); @@ -171,19 +163,11 @@ public void writeTo(StreamOutput out) throws IOException { out.writeDouble(anomalyScore); out.writeLong(bucketSpan); out.writeDouble(initialAnomalyScore); - // bwc for recordCount - if (out.getVersion().before(Version.V_5_5_0)) { - out.writeInt(0); - } out.writeList(records); out.writeLong(eventCount); out.writeBoolean(isInterim); out.writeList(bucketInfluencers); out.writeLong(processingTimeMs); - // bwc for perPartitionMaxProbability - if (out.getVersion().before(Version.V_5_5_0)) { - out.writeGenericValue(Collections.emptyMap()); - } // bwc for perPartitionNormalization if (out.getVersion().before(Version.V_6_5_0)) { out.writeList(Collections.emptyList()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java index 8b18562ec6d1e..38d76789a2ea6 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/BucketInfluencer.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.job.results; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -100,10 +99,6 @@ public BucketInfluencer(StreamInput in) throws IOException { isInterim = in.readBoolean(); timestamp = new Date(in.readLong()); bucketSpan = in.readLong(); - // bwc for removed sequenceNum field - if (in.getVersion().before(Version.V_5_5_0)) { - in.readInt(); - } } @Override @@ -117,10 +112,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeBoolean(isInterim); out.writeLong(timestamp.getTime()); out.writeLong(bucketSpan); - // bwc for removed sequenceNum field - if (out.getVersion().before(Version.V_5_5_0)) { - out.writeInt(0); - } } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java index 97ed643c44dd5..8ee49cb88d05f 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/Influencer.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.ml.job.results; -import org.elasticsearch.Version; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.io.stream.StreamOutput; @@ -97,10 +96,6 @@ public Influencer(StreamInput in) throws IOException { influencerScore = in.readDouble(); isInterim = in.readBoolean(); bucketSpan = in.readLong(); - // bwc for removed sequenceNum field - if (in.getVersion().before(Version.V_5_5_0)) { - in.readInt(); - } } @Override @@ -114,10 +109,6 @@ public void writeTo(StreamOutput out) throws IOException { out.writeDouble(influencerScore); out.writeBoolean(isInterim); out.writeLong(bucketSpan); - // bwc for removed sequenceNum field - if (out.getVersion().before(Version.V_5_5_0)) { - out.writeInt(0); - } } @Override diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java index c331d8b043797..9f066b6e98ec3 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/ml/job/results/ModelPlot.java @@ -109,20 +109,7 @@ public ModelPlot(String jobId, Date timestamp, long bucketSpan, int detectorInde public ModelPlot(StreamInput in) throws IOException { jobId = in.readString(); - // timestamp isn't optional in v5.5 - if (in.getVersion().before(Version.V_5_5_0)) { - if (in.readBoolean()) { - timestamp = new Date(in.readLong()); - } else { - timestamp = new Date(); - } - } else { - timestamp = new Date(in.readLong()); - } - // bwc for removed id field - if (in.getVersion().before(Version.V_5_5_0)) { - in.readOptionalString(); - } + timestamp = new Date(in.readLong()); partitionFieldName = in.readOptionalString(); partitionFieldValue = in.readOptionalString(); overFieldName = in.readOptionalString(); @@ -138,11 +125,7 @@ public ModelPlot(StreamInput in) throws IOException { } else { actual = in.readOptionalDouble(); } - if (in.getVersion().onOrAfter(Version.V_5_5_0)) { - bucketSpan = in.readLong(); - } else { - bucketSpan = 0; - } + bucketSpan = in.readLong(); if (in.getVersion().onOrAfter(Version.V_6_1_0)) { detectorIndex = in.readInt(); } else { @@ -154,20 +137,7 @@ public ModelPlot(StreamInput in) throws IOException { @Override public void writeTo(StreamOutput out) throws IOException { out.writeString(jobId); - // timestamp isn't optional in v5.5 - if (out.getVersion().before(Version.V_5_5_0)) { - boolean hasTimestamp = timestamp != null; - out.writeBoolean(hasTimestamp); - if (hasTimestamp) { - out.writeLong(timestamp.getTime()); - } - } else { - out.writeLong(timestamp.getTime()); - } - // bwc for removed id field - if (out.getVersion().before(Version.V_5_5_0)) { - out.writeOptionalString(null); - } + out.writeLong(timestamp.getTime()); out.writeOptionalString(partitionFieldName); out.writeOptionalString(partitionFieldValue); out.writeOptionalString(overFieldName); @@ -189,9 +159,7 @@ public void writeTo(StreamOutput out) throws IOException { } else { out.writeOptionalDouble(actual); } - if (out.getVersion().onOrAfter(Version.V_5_5_0)) { - out.writeLong(bucketSpan); - } + out.writeLong(bucketSpan); if (out.getVersion().onOrAfter(Version.V_6_1_0)) { out.writeInt(detectorIndex); } diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java index 38bd84888a886..69712a6f33de7 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/authz/RoleDescriptor.java @@ -243,12 +243,7 @@ public static RoleDescriptor readFrom(StreamInput in) throws IOException { String[] runAs = in.readStringArray(); Map metadata = in.readMap(); - final Map transientMetadata; - if (in.getVersion().onOrAfter(Version.V_5_2_0)) { - transientMetadata = in.readMap(); - } else { - transientMetadata = Collections.emptyMap(); - } + final Map transientMetadata = in.readMap(); final ApplicationResourcePrivileges[] applicationPrivileges; final ConditionalClusterPrivilege[] conditionalClusterPrivileges; @@ -273,9 +268,7 @@ public static void writeTo(RoleDescriptor descriptor, StreamOutput out) throws I } out.writeStringArray(descriptor.runAs); out.writeMap(descriptor.metadata); - if (out.getVersion().onOrAfter(Version.V_5_2_0)) { - out.writeMap(descriptor.transientMetadata); - } + out.writeMap(descriptor.transientMetadata); if (out.getVersion().onOrAfter(Version.V_6_4_0)) { out.writeArray(ApplicationResourcePrivileges::write, descriptor.applicationPrivileges); ConditionalClusterPrivileges.writeArray(out, descriptor.getConditionalClusterPrivileges()); diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/LogstashSystemUser.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/LogstashSystemUser.java index 047758177fb0b..71e43ff5a30fe 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/LogstashSystemUser.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/security/user/LogstashSystemUser.java @@ -5,7 +5,6 @@ */ package org.elasticsearch.xpack.core.security.user; -import org.elasticsearch.Version; import org.elasticsearch.protocol.xpack.security.User; import org.elasticsearch.xpack.core.security.support.MetadataUtils; @@ -16,8 +15,6 @@ public class LogstashSystemUser extends User { public static final String NAME = UsernamesField.LOGSTASH_NAME; public static final String ROLE_NAME = UsernamesField.LOGSTASH_ROLE; - public static final Version DEFINED_SINCE = Version.V_5_2_0; - public static final BuiltinUserInfo USER_INFO = new BuiltinUserInfo(NAME, ROLE_NAME, DEFINED_SINCE); public LogstashSystemUser(boolean enabled) { super(NAME, new String[]{ ROLE_NAME }, null, null, MetadataUtils.DEFAULT_RESERVED_METADATA, enabled); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java index bb21ddbd1a13e..c2cb5af130538 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/license/XPackLicenseStateTests.java @@ -229,7 +229,7 @@ public void testNewTrialDefaultsSecurityOff() { public void testOldTrialDefaultsSecurityOn() { XPackLicenseState licenseState = new XPackLicenseState(Settings.EMPTY); - licenseState.update(TRIAL, true, rarely() ? null : VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, Version.V_6_2_4)); + licenseState.update(TRIAL, true, rarely() ? null : VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_2_4)); assertThat(licenseState.isSecurityEnabled(), is(true)); assertThat(licenseState.isAuthAllowed(), is(true)); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java index 88d9b07816d44..7e53478533eb3 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/ml/job/config/JobTests.java @@ -39,7 +39,6 @@ import java.util.Map; import java.util.Set; -import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.is; @@ -479,19 +478,6 @@ public void testBuilder_givenTimeFieldInAnalysisConfig() { assertThat(e.getMessage(), equalTo(Messages.getMessage(Messages.JOB_CONFIG_TIME_FIELD_NOT_ALLOWED_IN_ANALYSIS_CONFIG))); } - public void testGetCompatibleJobTypes_givenVersionBefore_V_5_4() { - assertThat(Job.getCompatibleJobTypes(Version.V_5_0_0).isEmpty(), is(true)); - assertThat(Job.getCompatibleJobTypes(Version.V_5_3_0).isEmpty(), is(true)); - assertThat(Job.getCompatibleJobTypes(Version.V_5_3_2).isEmpty(), is(true)); - } - - public void testGetCompatibleJobTypes_givenVersionAfter_V_5_4() { - assertThat(Job.getCompatibleJobTypes(Version.V_5_4_0), contains(Job.ANOMALY_DETECTOR_JOB_TYPE)); - assertThat(Job.getCompatibleJobTypes(Version.V_5_4_0).size(), equalTo(1)); - assertThat(Job.getCompatibleJobTypes(Version.V_5_5_0), contains(Job.ANOMALY_DETECTOR_JOB_TYPE)); - assertThat(Job.getCompatibleJobTypes(Version.V_5_5_0).size(), equalTo(1)); - } - public void testInvalidCreateTimeSettings() { Job.Builder builder = new Job.Builder("invalid-settings"); builder.setModelSnapshotId("snapshot-foo"); diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java index a2b8d40e44c0b..a68a522f0242c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/security/action/role/PutRoleRequestTests.java @@ -78,7 +78,7 @@ public void testSerializationV63AndBefore() throws IOException { final PutRoleRequest original = buildRandomRequest(); final BytesStreamOutput out = new BytesStreamOutput(); - final Version version = VersionUtils.randomVersionBetween(random(), Version.V_5_6_0, Version.V_6_3_2); + final Version version = VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_3_2); out.setVersion(version); original.writeTo(out); diff --git a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java index 0f54784a33f46..d496eea2f0d13 100644 --- a/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java +++ b/x-pack/plugin/deprecation/src/test/java/org/elasticsearch/xpack/deprecation/IndexDeprecationChecksTests.java @@ -7,10 +7,7 @@ import org.elasticsearch.Version; import org.elasticsearch.cluster.metadata.IndexMetaData; -import org.elasticsearch.common.Strings; import org.elasticsearch.common.settings.Settings; -import org.elasticsearch.common.xcontent.XContentBuilder; -import org.elasticsearch.common.xcontent.XContentFactory; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.VersionUtils; import org.elasticsearch.xpack.core.deprecation.DeprecationInfoAction; @@ -23,153 +20,9 @@ import static org.elasticsearch.xpack.deprecation.DeprecationChecks.INDEX_SETTINGS_CHECKS; public class IndexDeprecationChecksTests extends ESTestCase { - - private static void assertSettingsAndIssue(String key, String value, DeprecationIssue expected) { - IndexMetaData indexMetaData = IndexMetaData.builder("test") - .settings(settings(Version.V_5_6_0) - .put(key, value)) - .numberOfShards(1) - .numberOfReplicas(0) - .build(); - List issues = DeprecationInfoAction.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); - assertEquals(singletonList(expected), issues); - } - - public void testCoerceBooleanDeprecation() throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder(); - mapping.startObject(); { - mapping.startObject("properties"); { - mapping.startObject("my_boolean"); { - mapping.field("type", "boolean"); - } - mapping.endObject(); - mapping.startObject("my_object"); { - mapping.startObject("properties"); { - mapping.startObject("my_inner_boolean"); { - mapping.field("type", "boolean"); - } - mapping.endObject(); - mapping.startObject("my_text"); { - mapping.field("type", "text"); - mapping.startObject("fields"); { - mapping.startObject("raw"); { - mapping.field("type", "boolean"); - } - mapping.endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - - IndexMetaData indexMetaData = IndexMetaData.builder("test") - .putMapping("testBooleanCoercion", Strings.toString(mapping)) - .settings(settings(Version.V_5_6_0)) - .numberOfShards(1) - .numberOfReplicas(0) - .build(); - - DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.INFO, - "Coercion of boolean fields", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_mappings_changes.html#_coercion_of_boolean_fields", - "[[type: testBooleanCoercion, field: my_boolean], [type: testBooleanCoercion, field: my_inner_boolean]," + - " [type: testBooleanCoercion, field: my_text, multifield: raw]]"); - List issues = DeprecationInfoAction.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); - assertEquals(singletonList(expected), issues); - } - - public void testMatchMappingTypeCheck() throws IOException { - XContentBuilder mapping = XContentFactory.jsonBuilder(); - mapping.startObject(); { - mapping.startArray("dynamic_templates"); - { - mapping.startObject(); - { - mapping.startObject("integers"); - { - mapping.field("match_mapping_type", "UNKNOWN_VALUE"); - mapping.startObject("mapping"); - { - mapping.field("type", "integer"); - } - mapping.endObject(); - } - mapping.endObject(); - } - mapping.endObject(); - } - mapping.endArray(); - } - mapping.endObject(); - - IndexMetaData indexMetaData = IndexMetaData.builder("test") - .putMapping("test", Strings.toString(mapping)) - .settings(settings(Version.V_5_6_0)) - .numberOfShards(1) - .numberOfReplicas(0) - .build(); - - DeprecationIssue expected = new DeprecationIssue(DeprecationIssue.Level.CRITICAL, - "Unrecognized match_mapping_type options not silently ignored", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_mappings_changes.html#_unrecognized_literal_match_mapping_type_literal_options_not_silently_ignored", - "[type: test, dynamicFieldDefinitionintegers, unknown match_mapping_type[UNKNOWN_VALUE]]"); - List issues = DeprecationInfoAction.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); - assertEquals(singletonList(expected), issues); - } - - public void testBaseSimilarityDefinedCheck() { - assertSettingsAndIssue("index.similarity.base.type", "classic", - new DeprecationIssue(DeprecationIssue.Level.WARNING, - "The base similarity is now ignored as coords and query normalization have been removed." + - "If provided, this setting will be ignored and issue a deprecation warning", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_settings_changes.html#_similarity_settings", null)); - } - - public void testIndexStoreTypeCheck() { - assertSettingsAndIssue("index.store.type", "niofs", - new DeprecationIssue(DeprecationIssue.Level.CRITICAL, - "The default index.store.type has been removed. If you were using it, " + - "we advise that you simply remove it from your index settings and Elasticsearch" + - "will use the best store implementation for your operating system.", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_settings_changes.html#_store_settings", null)); - } - public void testStoreThrottleSettingsCheck() { - assertSettingsAndIssue("index.store.throttle.max_bytes_per_sec", "32", - new DeprecationIssue(DeprecationIssue.Level.CRITICAL, - "index.store.throttle settings are no longer recognized. these settings should be removed", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_settings_changes.html#_store_throttling_settings", - "present settings: [index.store.throttle.max_bytes_per_sec]")); - assertSettingsAndIssue("index.store.throttle.type", "none", - new DeprecationIssue(DeprecationIssue.Level.CRITICAL, - "index.store.throttle settings are no longer recognized. these settings should be removed", - "https://www.elastic.co/guide/en/elasticsearch/reference/master/" + - "breaking_60_settings_changes.html#_store_throttling_settings", - "present settings: [index.store.throttle.type]")); - } - - public void testSharedFileSystemSettingsCheck() { - assertSettingsAndIssue("index.shared_filesystem", "true", - new DeprecationIssue(DeprecationIssue.Level.CRITICAL, - "[index.shared_filesystem] setting should be removed", - "https://www.elastic.co/guide/en/elasticsearch/reference/6.0/" + - "breaking_60_indices_changes.html#_shadow_replicas_have_been_removed", null)); - } - public void testDelimitedPayloadFilterCheck() throws IOException { Settings settings = settings( - VersionUtils.randomVersionBetween(random(), Version.V_5_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0_alpha1))) + VersionUtils.randomVersionBetween(random(), Version.V_6_0_0, VersionUtils.getPreviousVersion(Version.V_7_0_0_alpha1))) .put("index.analysis.filter.my_delimited_payload_filter.type", "delimited_payload_filter") .put("index.analysis.filter.my_delimited_payload_filter.delimiter", "^") .put("index.analysis.filter.my_delimited_payload_filter.encoding", "identity").build(); @@ -183,4 +36,4 @@ public void testDelimitedPayloadFilterCheck() throws IOException { List issues = DeprecationInfoAction.filterChecks(INDEX_SETTINGS_CHECKS, c -> c.apply(indexMetaData)); assertEquals(singletonList(expected), issues); } -} \ No newline at end of file +} diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java index 3ca3c3154506a..252cf97d0c519 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportIsolateDatafeedAction.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.ml.action; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.FailedNodeException; import org.elasticsearch.action.TaskOperationFailure; @@ -53,11 +51,6 @@ protected void doExecute(Task task, IsolateDatafeedAction.Request request, Actio String executorNode = datafeedTask.getExecutorNode(); DiscoveryNodes nodes = state.nodes(); - if (nodes.resolveNode(executorNode).getVersion().before(Version.V_5_5_0)) { - listener.onFailure(new ElasticsearchException("Force delete datafeed is not supported because the datafeed task " + - "is running on a node [" + executorNode + "] with a version prior to " + Version.V_5_5_0)); - return; - } request.setNodes(datafeedTask.getExecutorNode()); super.doExecute(task, request, listener); diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java index b40f0368a1554..a9b43c3bcc47d 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportKillProcessAction.java @@ -5,8 +5,6 @@ */ package org.elasticsearch.xpack.ml.action; -import org.elasticsearch.ElasticsearchException; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionListener; import org.elasticsearch.action.support.ActionFilters; import org.elasticsearch.cluster.node.DiscoveryNode; @@ -73,12 +71,6 @@ protected void doExecute(Task task, KillProcessAction.Request request, ActionLis return; } - Version nodeVersion = executorNode.getVersion(); - if (nodeVersion.before(Version.V_5_5_0)) { - listener.onFailure(new ElasticsearchException("Cannot kill the process on node with version " + nodeVersion)); - return; - } - super.doExecute(task, request, listener); } diff --git a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java index 56d03dd1aacc6..512d8188abfac 100644 --- a/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java +++ b/x-pack/plugin/ml/src/main/java/org/elasticsearch/xpack/ml/action/TransportOpenJobAction.java @@ -179,14 +179,6 @@ static PersistentTasksCustomMetaData.Assignment selectLeastLoadedMlNode(String j continue; } - if (nodeSupportsJobVersion(node.getVersion()) == false) { - String reason = "Not opening job [" + jobId + "] on node [" + nodeNameAndVersion(node) - + "], because this node does not support jobs of version [" + job.getJobVersion() + "]"; - logger.trace(reason); - reasons.add(reason); - continue; - } - if (nodeSupportsModelSnapshotVersion(node, job) == false) { String reason = "Not opening job [" + jobId + "] on node [" + nodeNameAndVersion(node) + "], because the job's model snapshot requires a node of version [" @@ -385,10 +377,6 @@ static List verifyIndicesPrimaryShardsAreActive(String jobId, ClusterSta return unavailableIndices; } - private static boolean nodeSupportsJobVersion(Version nodeVersion) { - return nodeVersion.onOrAfter(Version.V_5_5_0); - } - private static boolean nodeSupportsModelSnapshotVersion(DiscoveryNode node, Job job) { if (job.getModelSnapshotId() == null || job.getModelSnapshotMinVersion() == null) { // There is no snapshot to restore or the min model snapshot version is 5.5.0 diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java index 02bfb1b326fd9..5bf8fb6956bfe 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/action/TransportOpenJobActionTests.java @@ -423,33 +423,6 @@ public void testSelectLeastLoadedMlNode_noCompatibleJobTypeNodes() { assertNull(result.getExecutorNode()); } - public void testSelectLeastLoadedMlNode_noNodesPriorTo_V_5_5() { - Map nodeAttr = new HashMap<>(); - nodeAttr.put(MachineLearning.ML_ENABLED_NODE_ATTR, "true"); - DiscoveryNodes nodes = DiscoveryNodes.builder() - .add(new DiscoveryNode("_node_name1", "_node_id1", new TransportAddress(InetAddress.getLoopbackAddress(), 9300), - nodeAttr, Collections.emptySet(), Version.V_5_4_0)) - .add(new DiscoveryNode("_node_name2", "_node_id2", new TransportAddress(InetAddress.getLoopbackAddress(), 9301), - nodeAttr, Collections.emptySet(), Version.V_5_4_0)) - .build(); - - PersistentTasksCustomMetaData.Builder tasksBuilder = PersistentTasksCustomMetaData.builder(); - addJobTask("incompatible_type_job", "_node_id1", null, tasksBuilder); - PersistentTasksCustomMetaData tasks = tasksBuilder.build(); - - ClusterState.Builder cs = ClusterState.builder(new ClusterName("_name")); - MetaData.Builder metaData = MetaData.builder(); - RoutingTable.Builder routingTable = RoutingTable.builder(); - addJobAndIndices(metaData, routingTable, "incompatible_type_job"); - cs.nodes(nodes); - metaData.putCustom(PersistentTasksCustomMetaData.TYPE, tasks); - cs.metaData(metaData); - cs.routingTable(routingTable.build()); - Assignment result = TransportOpenJobAction.selectLeastLoadedMlNode("incompatible_type_job", cs.build(), 2, 10, 30, logger); - assertThat(result.getExplanation(), containsString("because this node does not support jobs of version [" + Version.CURRENT + "]")); - assertNull(result.getExecutorNode()); - } - public void testSelectLeastLoadedMlNode_noNodesMatchingModelSnapshotMinVersion() { Map nodeAttr = new HashMap<>(); nodeAttr.put(MachineLearning.ML_ENABLED_NODE_ATTR, "true"); @@ -606,12 +579,6 @@ public void testMappingRequiresUpdateMaliciousMappingVersion() throws IOExceptio assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); } - public void testMappingRequiresUpdateOldMappingVersion() throws IOException { - ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_54", Version.V_5_4_0.toString())); - String[] indices = new String[] { "version_54" }; - assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); - } - public void testMappingRequiresUpdateBogusMappingVersion() throws IOException { ClusterState cs = getClusterStateWithMappingsWithMetaData(Collections.singletonMap("version_bogus", "0.0")); String[] indices = new String[] { "version_bogus" }; @@ -632,21 +599,6 @@ public void testMappingRequiresUpdateNewerMappingVersionMinor() throws IOExcepti TransportOpenJobAction.mappingRequiresUpdate(cs, indices, VersionUtils.getPreviousMinorVersion(), logger)); } - public void testMappingRequiresUpdateSomeVersionMix() throws IOException { - Map versionMix = new HashMap<>(); - versionMix.put("version_54", Version.V_5_4_0); - versionMix.put("version_current", Version.CURRENT); - versionMix.put("version_null", null); - versionMix.put("version_current2", Version.CURRENT); - versionMix.put("version_bogus", "0.0.0"); - versionMix.put("version_current3", Version.CURRENT); - versionMix.put("version_bogus2", "0.0.0"); - - ClusterState cs = getClusterStateWithMappingsWithMetaData(versionMix); - String[] indices = new String[] { "version_54", "version_null", "version_bogus", "version_bogus2" }; - assertArrayEquals(indices, TransportOpenJobAction.mappingRequiresUpdate(cs, indices, Version.CURRENT, logger)); - } - public void testNodeNameAndVersion() { TransportAddress ta = new TransportAddress(InetAddress.getLoopbackAddress(), 9300); Map attributes = new HashMap<>(); diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedStateTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedStateTests.java index 8b3e68b1e5714..32699f60cbdb9 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedStateTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/datafeed/DatafeedStateTests.java @@ -5,19 +5,8 @@ */ package org.elasticsearch.xpack.ml.datafeed; -import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.datafeed.DatafeedState; -import org.mockito.ArgumentCaptor; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; - -import java.io.IOException; - -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class DatafeedStateTests extends ESTestCase { @@ -37,35 +26,4 @@ public void testValidOrdinals() { assertEquals(2, DatafeedState.STARTING.ordinal()); assertEquals(3, DatafeedState.STOPPING.ordinal()); } - - @SuppressWarnings("unchecked") - public void testStreaming_v54BackwardsCompatibility() throws IOException { - StreamOutput out = mock(StreamOutput.class); - when(out.getVersion()).thenReturn(Version.V_5_4_0); - ArgumentCaptor enumCaptor = ArgumentCaptor.forClass(Enum.class); - - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) { - return null; - } - }).when(out).writeEnum(enumCaptor.capture()); - - // STARTING & STOPPING states were introduced in v5.5. - // Pre v5.5 STARTING translated as STOPPED - DatafeedState.STARTING.writeTo(out); - assertEquals(DatafeedState.STOPPED, enumCaptor.getValue()); - - // Pre v5.5 STOPPING means the datafeed is STARTED - DatafeedState.STOPPING.writeTo(out); - assertEquals(DatafeedState.STARTED, enumCaptor.getValue()); - - // POST 5.5 enums a written as is - when(out.getVersion()).thenReturn(Version.V_5_5_0); - - DatafeedState.STARTING.writeTo(out); - assertEquals(DatafeedState.STARTING, enumCaptor.getValue()); - DatafeedState.STOPPING.writeTo(out); - assertEquals(DatafeedState.STOPPING, enumCaptor.getValue()); - } } diff --git a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobStateTests.java b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobStateTests.java index cd983c6b0302b..2e324b6a1c201 100644 --- a/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobStateTests.java +++ b/x-pack/plugin/ml/src/test/java/org/elasticsearch/xpack/ml/job/config/JobStateTests.java @@ -5,19 +5,8 @@ */ package org.elasticsearch.xpack.ml.job.config; -import org.elasticsearch.Version; -import org.elasticsearch.common.io.stream.StreamOutput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.ml.job.config.JobState; -import org.mockito.ArgumentCaptor; -import org.mockito.invocation.InvocationOnMock; -import org.mockito.stubbing.Answer; - -import java.io.IOException; - -import static org.mockito.Mockito.doAnswer; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; public class JobStateTests extends ESTestCase { @@ -60,35 +49,4 @@ public void testIsAnyOf() { assertTrue(JobState.CLOSED.isAnyOf(JobState.CLOSED)); assertTrue(JobState.CLOSING.isAnyOf(JobState.CLOSING)); } - - @SuppressWarnings("unchecked") - public void testStreaming_v54BackwardsCompatibility() throws IOException { - StreamOutput out = mock(StreamOutput.class); - when(out.getVersion()).thenReturn(Version.V_5_4_0); - ArgumentCaptor enumCaptor = ArgumentCaptor.forClass(Enum.class); - - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) { - return null; - } - }).when(out).writeEnum(enumCaptor.capture()); - - // OPENING state was introduced in v5.5. - // Pre v5.5 its translated as CLOSED - JobState.OPENING.writeTo(out); - assertEquals(JobState.CLOSED, enumCaptor.getValue()); - - when(out.getVersion()).thenReturn(Version.V_5_5_0); - - doAnswer(new Answer() { - @Override - public Void answer(InvocationOnMock invocationOnMock) { - return null; - } - }).when(out).writeEnum(enumCaptor.capture()); - - JobState.OPENING.writeTo(out); - assertEquals(JobState.OPENING, enumCaptor.getValue()); - } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkDocTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkDocTests.java index 57106363bc199..dc294ef53de52 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkDocTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkDocTests.java @@ -5,12 +5,10 @@ */ package org.elasticsearch.xpack.monitoring.action; -import org.elasticsearch.Version; import org.elasticsearch.common.Strings; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.xcontent.XContentType; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.test.EqualsHashCodeTestUtils; @@ -21,7 +19,6 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Base64; import java.util.List; import static java.util.Collections.emptyList; @@ -158,23 +155,6 @@ public void testSerialization() throws IOException { } } - public void testSerializationBwc() throws IOException { - final byte[] data = Base64.getDecoder().decode("AQNtSWQBBTUuMS4yAAAAAQEEdHlwZQECaWQNeyJmb28iOiJiYXIifQAAAAAAAAAA"); - final Version version = randomFrom(Version.V_5_0_0, Version.V_5_0_1, Version.V_5_0_2, - Version.V_5_1_1, Version.V_5_1_2, Version.V_5_2_0); - try (StreamInput in = StreamInput.wrap(data)) { - in.setVersion(version); - MonitoringBulkDoc bulkDoc = MonitoringBulkDoc.readFrom(in); - assertEquals(MonitoredSystem.UNKNOWN, bulkDoc.getSystem()); - assertEquals("type", bulkDoc.getType()); - assertEquals("id", bulkDoc.getId()); - assertEquals(0L, bulkDoc.getTimestamp()); - assertEquals(0L, bulkDoc.getIntervalMillis()); - assertEquals("{\"foo\":\"bar\"}", bulkDoc.getSource().utf8ToString()); - assertEquals(XContentType.JSON, bulkDoc.getXContentType()); - } - } - /** * Test that we allow strings to be "" because Logstash 5.2 - 5.3 would submit empty _id values for time-based documents */ diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java index b336b3c885310..dc5cad7c94fd4 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/action/MonitoringBulkRequestTests.java @@ -6,7 +6,6 @@ package org.elasticsearch.xpack.monitoring.action; import org.apache.lucene.util.BytesRef; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionRequestValidationException; import org.elasticsearch.common.bytes.BytesArray; import org.elasticsearch.common.bytes.BytesReference; @@ -26,7 +25,6 @@ import java.util.Collection; import java.util.List; -import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.hamcrest.Matchers.containsString; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.hasItem; @@ -254,52 +252,6 @@ public void testSerialization() throws IOException { assertArrayEquals(originalBulkDocs, deserializedBulkDocs); } - public void testSerializationBwc() throws IOException { - final MonitoringBulkRequest originalRequest = new MonitoringBulkRequest(); - - final int numDocs = iterations(10, 30); - for (int i = 0; i < numDocs; i++) { - originalRequest.add(randomMonitoringBulkDoc()); - } - - final Version version = randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_0_0_rc1); - - final BytesStreamOutput out = new BytesStreamOutput(); - out.setVersion(version); - originalRequest.writeTo(out); - - final StreamInput in = out.bytes().streamInput(); - in.setVersion(out.getVersion()); - - final MonitoringBulkRequest deserializedRequest = new MonitoringBulkRequest(); - deserializedRequest.readFrom(in); - - assertThat(in.available(), equalTo(0)); - - final MonitoringBulkDoc[] originalBulkDocs = originalRequest.getDocs().toArray(new MonitoringBulkDoc[]{}); - final MonitoringBulkDoc[] deserializedBulkDocs = deserializedRequest.getDocs().toArray(new MonitoringBulkDoc[]{}); - - assertThat(originalBulkDocs.length, equalTo(deserializedBulkDocs.length)); - - for (int i = 0; i < originalBulkDocs.length; i++) { - final MonitoringBulkDoc original = originalBulkDocs[i]; - final MonitoringBulkDoc deserialized = deserializedBulkDocs[i]; - - assertThat(deserialized.getSystem(), equalTo(original.getSystem())); - assertThat(deserialized.getType(), equalTo(original.getType())); - assertThat(deserialized.getId(), equalTo(original.getId())); - assertThat(deserialized.getTimestamp(), equalTo(original.getTimestamp())); - assertThat(deserialized.getSource(), equalTo(original.getSource())); - assertThat(deserialized.getXContentType(), equalTo(original.getXContentType())); - - if (version.onOrAfter(Version.V_6_0_0_rc1)) { - assertThat(deserialized.getIntervalMillis(), equalTo(original.getIntervalMillis())); - } else { - assertThat(deserialized.getIntervalMillis(), equalTo(0L)); - } - } - } - /** * Return a {@link XContentType} supported by the Monitoring Bulk API (JSON or Smile) */ diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/CollectorTests.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/CollectorTests.java index 79279faa6f405..3d1a0bf9adedb 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/CollectorTests.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/collector/CollectorTests.java @@ -5,39 +5,11 @@ */ package org.elasticsearch.xpack.monitoring.collector; -import org.elasticsearch.Version; -import org.elasticsearch.cluster.node.DiscoveryNode; -import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.test.ESTestCase; -import org.elasticsearch.xpack.core.monitoring.exporter.MonitoringDoc; - -import java.util.Arrays; -import java.util.Collections; -import java.util.HashSet; -import java.util.Set; public class CollectorTests extends ESTestCase { public void testConvertNullNode() { assertEquals(null, Collector.convertNode(randomNonNegativeLong(), null)); } - - public void testConvertNode() { - final String name = randomBoolean() ? randomAlphaOfLength(5) : ""; - final String nodeId = randomAlphaOfLength(5); - final TransportAddress address = buildNewFakeTransportAddress(); - final Version version = randomFrom(Version.V_5_0_1, Version.V_5_3_0, Version.CURRENT); - final long timestamp = randomNonNegativeLong(); - - final Set roles = new HashSet<>(); - if (randomBoolean()) { - roles.addAll(randomSubsetOf(Arrays.asList(DiscoveryNode.Role.values()))); - } - - final MonitoringDoc.Node expectedNode = new MonitoringDoc.Node(nodeId, address.address().getHostString(), address.toString(), - address.getAddress(), name, timestamp); - - DiscoveryNode discoveryNode = new DiscoveryNode(name, nodeId, address, Collections.emptyMap(), roles, version); - assertEquals(expectedNode, Collector.convertNode(timestamp, discoveryNode)); - } } diff --git a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/BaseMonitoringDocTestCase.java b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/BaseMonitoringDocTestCase.java index 513ee3bdbb66b..46ba34dcd1a50 100644 --- a/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/BaseMonitoringDocTestCase.java +++ b/x-pack/plugin/monitoring/src/test/java/org/elasticsearch/xpack/monitoring/exporter/BaseMonitoringDocTestCase.java @@ -5,12 +5,10 @@ */ package org.elasticsearch.xpack.monitoring.exporter; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.UUIDs; import org.elasticsearch.common.bytes.BytesReference; import org.elasticsearch.common.io.stream.NamedWriteableRegistry; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.common.transport.TransportAddress; import org.elasticsearch.common.xcontent.LoggingDeprecationHandler; import org.elasticsearch.common.xcontent.NamedXContentRegistry; @@ -31,14 +29,12 @@ import java.io.IOException; import java.util.ArrayList; -import java.util.Base64; import java.util.List; import java.util.Map; import static java.util.Collections.emptyList; import static org.elasticsearch.common.xcontent.XContentHelper.toXContent; import static org.elasticsearch.test.EqualsHashCodeTestUtils.checkEqualsAndHashCode; -import static org.elasticsearch.test.VersionUtils.randomVersionBetween; import static org.elasticsearch.test.hamcrest.ElasticsearchAssertions.assertToXContentEquivalent; import static org.hamcrest.Matchers.equalTo; import static org.hamcrest.Matchers.notNullValue; @@ -273,22 +269,4 @@ public void testMonitoringNodeSerialization() throws IOException { assertEquals(deserialized.hashCode(), original.hashCode()); assertNotSame(deserialized, original); } - - public void testMonitoringNodeBwcSerialization() throws IOException { - final Version version = randomVersionBetween(random(), Version.V_5_0_0, Version.V_6_0_0_beta2); - - final byte[] data = Base64.getDecoder() - .decode("AQVFSWJKdgEDdFFOAQV3cGtMagEFa2xqeWEBBVZTamF2AwVrZXkjMgEyBWtleSMxATEFa2V5IzABMAAAAAAAAA=="); - try (StreamInput in = StreamInput.wrap(data)) { - in.setVersion(version); - - final MonitoringDoc.Node node = new MonitoringDoc.Node(in); - assertEquals("EIbJv", node.getUUID()); - assertEquals("VSjav", node.getName()); - assertEquals("tQN", node.getHost()); - assertEquals("wpkLj", node.getTransportAddress()); - assertEquals("kljya", node.getIp()); - assertEquals(0L, node.getTimestamp()); - } - } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java index 99c138bbb121d..0b8dbd0233550 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealm.java @@ -226,12 +226,10 @@ private boolean userIsDefinedForCurrentSecurityMapping(String username) { private Version getDefinedVersion(String username) { switch (username) { - case LogstashSystemUser.NAME: - return LogstashSystemUser.DEFINED_SINCE; case BeatsSystemUser.NAME: return BeatsSystemUser.DEFINED_SINCE; default: - return Version.V_5_0_0; + return Version.V_6_0_0; } } diff --git a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java index 761af81b08ec5..b686994a2ee98 100644 --- a/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java +++ b/x-pack/plugin/security/src/main/java/org/elasticsearch/xpack/security/transport/ServerTransportFilter.java @@ -25,7 +25,6 @@ import org.elasticsearch.transport.nio.NioTcpChannel; import org.elasticsearch.xpack.core.security.SecurityContext; import org.elasticsearch.xpack.core.security.authc.Authentication; -import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.protocol.xpack.security.User; import org.elasticsearch.xpack.security.action.SecurityActionMapper; @@ -116,50 +115,28 @@ requests from all the nodes are attached with a user (either a serialize } } - final Version version = transportChannel.getVersion().equals(Version.V_5_4_0) ? Version.CURRENT : transportChannel.getVersion(); + final Version version = transportChannel.getVersion(); authcService.authenticate(securityAction, request, (User)null, ActionListener.wrap((authentication) -> { - if (reservedRealmEnabled && authentication.getVersion().before(Version.V_5_2_0) && - KibanaUser.NAME.equals(authentication.getUser().authenticatedUser().principal())) { - executeAsCurrentVersionKibanaUser(securityAction, request, transportChannel, listener, authentication); - } else if (securityAction.equals(TransportService.HANDSHAKE_ACTION_NAME) && - SystemUser.is(authentication.getUser()) == false) { - securityContext.executeAsUser(SystemUser.INSTANCE, (ctx) -> { - final Authentication replaced = Authentication.getAuthentication(threadContext); - final AuthorizationUtils.AsyncAuthorizer asyncAuthorizer = - new AuthorizationUtils.AsyncAuthorizer(replaced, listener, (userRoles, runAsRoles) -> { - authzService.authorize(replaced, securityAction, request, userRoles, runAsRoles); - listener.onResponse(null); - }); - asyncAuthorizer.authorize(authzService); - }, version); - } else { + if (securityAction.equals(TransportService.HANDSHAKE_ACTION_NAME) && + SystemUser.is(authentication.getUser()) == false) { + securityContext.executeAsUser(SystemUser.INSTANCE, (ctx) -> { + final Authentication replaced = Authentication.getAuthentication(threadContext); final AuthorizationUtils.AsyncAuthorizer asyncAuthorizer = - new AuthorizationUtils.AsyncAuthorizer(authentication, listener, (userRoles, runAsRoles) -> { - authzService.authorize(authentication, securityAction, request, userRoles, runAsRoles); - listener.onResponse(null); - }); + new AuthorizationUtils.AsyncAuthorizer(replaced, listener, (userRoles, runAsRoles) -> { + authzService.authorize(replaced, securityAction, request, userRoles, runAsRoles); + listener.onResponse(null); + }); asyncAuthorizer.authorize(authzService); - } - }, listener::onFailure)); - } - - private void executeAsCurrentVersionKibanaUser(String securityAction, TransportRequest request, TransportChannel transportChannel, - ActionListener listener, Authentication authentication) { - // the authentication came from an older node - so let's replace the user with our version - final User kibanaUser = new KibanaUser(authentication.getUser().enabled()); - if (kibanaUser.enabled()) { - securityContext.executeAsUser(kibanaUser, (original) -> { - final Authentication replacedUserAuth = securityContext.getAuthentication(); + }, version); + } else { final AuthorizationUtils.AsyncAuthorizer asyncAuthorizer = - new AuthorizationUtils.AsyncAuthorizer(replacedUserAuth, listener, (userRoles, runAsRoles) -> { - authzService.authorize(replacedUserAuth, securityAction, request, userRoles, runAsRoles); + new AuthorizationUtils.AsyncAuthorizer(authentication, listener, (userRoles, runAsRoles) -> { + authzService.authorize(authentication, securityAction, request, userRoles, runAsRoles); listener.onResponse(null); }); asyncAuthorizer.authorize(authzService); - }, transportChannel.getVersion()); - } else { - throw new IllegalStateException("a disabled user should never be sent. " + kibanaUser); - } + } + }, listener::onFailure)); } } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java index 1ac5490dc0c6a..e4e1e7ca1c015 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/action/filter/SecurityActionFilterTests.java @@ -79,7 +79,7 @@ public void init() throws Exception { ClusterState state = mock(ClusterState.class); DiscoveryNodes nodes = DiscoveryNodes.builder() .add(new DiscoveryNode("id1", buildNewFakeTransportAddress(), Version.CURRENT)) - .add(new DiscoveryNode("id2", buildNewFakeTransportAddress(), Version.V_5_4_0)) + .add(new DiscoveryNode("id2", buildNewFakeTransportAddress(), Version.V_6_0_0)) .build(); when(state.nodes()).thenReturn(nodes); diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java index 04e0afcf88293..39d518a73f3b4 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authc/esnative/ReservedRealmTests.java @@ -444,23 +444,15 @@ public static void mockGetAllReservedUserInfo(NativeUsersStore usersStore, Map versionPredicate) { - assertThat(versionPredicate.test(Version.V_5_0_0_rc1), is(false)); switch (principal) { case LogstashSystemUser.NAME: - assertThat(versionPredicate.test(Version.V_5_0_0), is(false)); - assertThat(versionPredicate.test(Version.V_5_1_1), is(false)); - assertThat(versionPredicate.test(Version.V_5_2_0), is(true)); assertThat(versionPredicate.test(Version.V_6_3_0), is(true)); break; case BeatsSystemUser.NAME: - assertThat(versionPredicate.test(Version.V_5_6_9), is(false)); assertThat(versionPredicate.test(Version.V_6_2_3), is(false)); assertThat(versionPredicate.test(Version.V_6_3_0), is(true)); break; default: - assertThat(versionPredicate.test(Version.V_5_0_0), is(true)); - assertThat(versionPredicate.test(Version.V_5_1_1), is(true)); - assertThat(versionPredicate.test(Version.V_5_2_0), is(true)); assertThat(versionPredicate.test(Version.V_6_3_0), is(true)); break; } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java index 825ce4ee44c60..34a0685c2fd21 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/accesscontrol/IndicesPermissionTests.java @@ -195,7 +195,7 @@ public void testIndicesPrivilegesStreaming() throws IOException { assertEquals(readIndicesPrivileges, indicesPrivileges.build()); out = new BytesStreamOutput(); - out.setVersion(Version.V_5_0_0); + out.setVersion(Version.V_6_0_0); indicesPrivileges = RoleDescriptor.IndicesPrivileges.builder(); indicesPrivileges.grantedFields(allowed); indicesPrivileges.deniedFields(denied); @@ -205,7 +205,7 @@ public void testIndicesPrivilegesStreaming() throws IOException { indicesPrivileges.build().writeTo(out); out.close(); in = out.bytes().streamInput(); - in.setVersion(Version.V_5_0_0); + in.setVersion(Version.V_6_0_0); RoleDescriptor.IndicesPrivileges readIndicesPrivileges2 = RoleDescriptor.IndicesPrivileges.createFrom(in); assertEquals(readIndicesPrivileges, readIndicesPrivileges2); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java index 7d10198c6aea8..c3a6d7e920d1a 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/support/SecurityIndexManagerTests.java @@ -347,10 +347,10 @@ public void testIndexTemplateVersionMatching() throws Exception { assertTrue(SecurityIndexManager.checkTemplateExistsAndVersionMatches( SecurityIndexManager.SECURITY_TEMPLATE_NAME, clusterState, logger, - Version.V_5_0_0::before)); + Version.V_6_0_0::before)); assertFalse(SecurityIndexManager.checkTemplateExistsAndVersionMatches( SecurityIndexManager.SECURITY_TEMPLATE_NAME, clusterState, logger, - Version.V_5_0_0::after)); + Version.V_6_0_0::after)); } public void testUpToDateMappingsAreIdentifiedAsUpToDate() throws IOException { @@ -448,4 +448,4 @@ private static IndexTemplateMetaData.Builder getIndexTemplateMetaData(String tem } return templateBuilder; } -} \ No newline at end of file +} diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java index 08a991eb3ec29..bf8d8042546fd 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/transport/ServerTransportFilterTests.java @@ -27,7 +27,6 @@ import org.elasticsearch.xpack.core.security.authc.Authentication.RealmRef; import org.elasticsearch.xpack.core.security.authz.permission.Role; import org.elasticsearch.xpack.core.security.authz.store.ReservedRolesStore; -import org.elasticsearch.xpack.core.security.user.KibanaUser; import org.elasticsearch.xpack.core.security.user.SystemUser; import org.elasticsearch.protocol.xpack.security.User; import org.elasticsearch.xpack.core.security.user.XPackUser; @@ -37,12 +36,10 @@ import java.io.IOException; import java.util.Collections; -import java.util.concurrent.atomic.AtomicReference; import static org.elasticsearch.mock.orig.Mockito.times; import static org.elasticsearch.xpack.core.security.support.Exceptions.authenticationError; import static org.elasticsearch.xpack.core.security.support.Exceptions.authorizationError; -import static org.hamcrest.Matchers.arrayContaining; import static org.hamcrest.Matchers.equalTo; import static org.mockito.Matchers.any; import static org.mockito.Matchers.eq; @@ -220,47 +217,6 @@ public void testNodeProfileAllowsNodeActions() throws Exception { verifyNoMoreInteractions(authcService, authzService); } - public void testHandlesKibanaUserCompatibility() throws Exception { - TransportRequest request = mock(TransportRequest.class); - User user = new User("kibana", "kibana"); - Authentication authentication = mock(Authentication.class); - final Version version = Version.fromId(randomIntBetween(Version.V_5_0_0_ID, Version.V_5_2_0_ID - 100)); - when(authentication.getVersion()).thenReturn(version); - when(authentication.getUser()).thenReturn(user); - doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[3]; - callback.onResponse(authentication); - return Void.TYPE; - }).when(authcService).authenticate(eq("_action"), eq(request), eq((User)null), any(ActionListener.class)); - AtomicReference rolesRef = new AtomicReference<>(); - final Role empty = Role.EMPTY; - doAnswer((i) -> { - ActionListener callback = - (ActionListener) i.getArguments()[1]; - rolesRef.set(((User) i.getArguments()[0]).roles()); - callback.onResponse(empty); - return Void.TYPE; - }).when(authzService).roles(any(User.class), any(ActionListener.class)); - ServerTransportFilter filter = getClientOrNodeFilter(); - PlainActionFuture future = new PlainActionFuture<>(); - when(channel.getVersion()).thenReturn(version); - filter.inbound("_action", request, channel, future); - assertNotNull(rolesRef.get()); - assertThat(rolesRef.get(), arrayContaining("kibana_system")); - - // test with a version that doesn't need changing - filter = getClientOrNodeFilter(); - rolesRef.set(null); - user = new KibanaUser(true); - when(authentication.getUser()).thenReturn(user); - when(authentication.getVersion()).thenReturn(Version.V_5_2_0); - future = new PlainActionFuture<>(); - filter.inbound("_action", request, channel, future); - assertNotNull(rolesRef.get()); - assertThat(rolesRef.get(), arrayContaining("kibana_system")); - } - private ServerTransportFilter getClientOrNodeFilter() throws IOException { return randomBoolean() ? getNodeFilter(true) : getClientFilter(true); } diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/UserSerializationTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/UserSerializationTests.java index 6bea620982fac..0d5941eaf2674 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/UserSerializationTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/user/UserSerializationTests.java @@ -5,9 +5,7 @@ */ package org.elasticsearch.xpack.security.user; -import org.elasticsearch.Version; import org.elasticsearch.common.io.stream.BytesStreamOutput; -import org.elasticsearch.common.io.stream.StreamInput; import org.elasticsearch.test.ESTestCase; import org.elasticsearch.xpack.core.security.user.ElasticUser; import org.elasticsearch.xpack.core.security.user.InternalUserSerializationHelper; @@ -60,46 +58,6 @@ public void testWriteToAndReadFromWithRunAs() throws Exception { assertThat(readFromAuthUser.authenticatedUser(), is(authUser)); } - public void testRunAsBackcompatRead() throws Exception { - User user = new User(randomAlphaOfLengthBetween(4, 30), - randomBoolean() ? generateRandomStringArray(20, 30, false) : null); - // store the runAs user as the "authenticationUser" here to mimic old format for writing - User authUser = new User(randomAlphaOfLengthBetween(4, 30), generateRandomStringArray(20, 30, false), user); - - BytesStreamOutput output = new BytesStreamOutput(); - User.writeTo(authUser, output); - StreamInput input = output.bytes().streamInput(); - input.setVersion(randomFrom(Version.V_5_0_0, Version.V_5_4_0)); - User readFrom = User.readFrom(input); - - assertThat(readFrom.principal(), is(user.principal())); - assertThat(Arrays.equals(readFrom.roles(), user.roles()), is(true)); - User readFromAuthUser = readFrom.authenticatedUser(); - assertThat(authUser, is(notNullValue())); - assertThat(readFromAuthUser.principal(), is(authUser.principal())); - assertThat(Arrays.equals(readFromAuthUser.roles(), authUser.roles()), is(true)); - } - - public void testRunAsBackcompatWrite() throws Exception { - User user = new User(randomAlphaOfLengthBetween(4, 30), - randomBoolean() ? generateRandomStringArray(20, 30, false) : null); - // store the runAs user as the "authenticationUser" here to mimic old format for writing - User authUser = new User(randomAlphaOfLengthBetween(4, 30), generateRandomStringArray(20, 30, false), user); - - BytesStreamOutput output = new BytesStreamOutput(); - output.setVersion(randomFrom(Version.V_5_0_0, Version.V_5_4_0)); - User.writeTo(authUser, output); - StreamInput input = output.bytes().streamInput(); - User readFrom = User.readFrom(input); - - assertThat(readFrom.principal(), is(user.principal())); - assertThat(Arrays.equals(readFrom.roles(), user.roles()), is(true)); - User readFromAuthUser = readFrom.authenticatedUser(); - assertThat(authUser, is(notNullValue())); - assertThat(readFromAuthUser.principal(), is(authUser.principal())); - assertThat(Arrays.equals(readFromAuthUser.roles(), authUser.roles()), is(true)); - } - public void testSystemUserReadAndWrite() throws Exception { BytesStreamOutput output = new BytesStreamOutput(); diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java index 07017e6fc0014..ad0ebd6815f2d 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/IndexUpgradeService.java @@ -79,7 +79,7 @@ private UpgradeActionRequired upgradeInfo(IndexMetaData indexMetaData, String in } } // Catch all check for all indices that didn't match the specific checks - if (indexMetaData.getCreationVersion().before(Version.V_5_0_0)) { + if (indexMetaData.getCreationVersion().before(Version.V_6_0_0)) { return UpgradeActionRequired.REINDEX; } else { return null; diff --git a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java index 568397e37395a..e454ac4a0140b 100644 --- a/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java +++ b/x-pack/plugin/upgrade/src/main/java/org/elasticsearch/xpack/upgrade/Upgrade.java @@ -44,7 +44,7 @@ public class Upgrade extends Plugin implements ActionPlugin { - public static final Version UPGRADE_INTRODUCED = Version.V_5_6_0; + public static final Version UPGRADE_INTRODUCED = Version.CURRENT.minimumCompatibilityVersion(); private final Settings settings; private final List> upgradeCheckFactories; diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java index 5939777572b48..f980450c07f7c 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/IndexUpgradeServiceTests.java @@ -166,7 +166,7 @@ public static IndexMetaData newTestIndexMeta(String name, String alias, Settings .put(IndexMetaData.SETTING_NUMBER_OF_SHARDS, 1) .put(IndexMetaData.SETTING_CREATION_DATE, 1) .put(IndexMetaData.SETTING_INDEX_UUID, UUIDs.randomBase64UUID()) - .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_5_0_0_beta1) + .put(IndexMetaData.SETTING_VERSION_UPGRADED, Version.V_6_0_0) .put(indexSettings) .build(); IndexMetaData.Builder builder = IndexMetaData.builder(name).settings(build); diff --git a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java index cd83803d1884c..71e3348b058b6 100644 --- a/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java +++ b/x-pack/plugin/upgrade/src/test/java/org/elasticsearch/xpack/upgrade/InternalIndexReindexerIT.java @@ -206,9 +206,9 @@ private ClusterState withRandomOldNode() { DiscoveryNode node = discoveryNodes.get(nodeId); DiscoveryNode newNode = new DiscoveryNode(node.getName(), node.getId(), node.getEphemeralId(), node.getHostName(), node.getHostAddress(), node.getAddress(), node.getAttributes(), node.getRoles(), - randomVersionBetween(random(), Version.V_5_0_0, Version.V_5_4_0)); + randomVersionBetween(random(), Version.V_6_0_0, Version.V_6_4_0)); return ClusterState.builder(clusterState).nodes(DiscoveryNodes.builder(discoveryNodes).remove(node).add(newNode)).build(); } -} \ No newline at end of file +} diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java index 3b9032f092185..1d3e51c11e027 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/XPackInfoResponse.java @@ -18,7 +18,6 @@ */ package org.elasticsearch.protocol.xpack; -import org.elasticsearch.Version; import org.elasticsearch.action.ActionResponse; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; @@ -412,8 +411,7 @@ public FeatureSet(String name, @Nullable String description, boolean available, } public FeatureSet(StreamInput in) throws IOException { - this(in.readString(), in.readOptionalString(), in.readBoolean(), in.readBoolean(), - in.getVersion().onOrAfter(Version.V_5_4_0) ? in.readMap() : null); + this(in.readString(), in.readOptionalString(), in.readBoolean(), in.readBoolean(), in.readMap()); } @Override @@ -422,9 +420,7 @@ public void writeTo(StreamOutput out) throws IOException { out.writeOptionalString(description); out.writeBoolean(available); out.writeBoolean(enabled); - if (out.getVersion().onOrAfter(Version.V_5_4_0)) { - out.writeMap(nativeCodeInfo); - } + out.writeMap(nativeCodeInfo); } public String name() { diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java index 2b9957f9bc756..ea5f016993101 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/ml/job/process/ModelSnapshot.java @@ -221,10 +221,8 @@ public boolean equals(Object other) { public static class Builder { private String jobId; - // Stored snapshot documents created prior to 6.3.0 will have no - // value for min_version. We default it to 5.5.0 as there were - // no model changes between 5.5.0 and 6.3.0. - private Version minVersion = Version.V_5_5_0; + // Stored snapshot documents created prior to 6.3.0 will have no value for min_version. + private Version minVersion = Version.V_6_3_0; private Date timestamp; private String description; diff --git a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/User.java b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/User.java index 42e957ecf2d51..e08289e98215c 100644 --- a/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/User.java +++ b/x-pack/protocol/src/main/java/org/elasticsearch/protocol/xpack/security/User.java @@ -19,7 +19,6 @@ package org.elasticsearch.protocol.xpack.security; -import org.elasticsearch.Version; import org.elasticsearch.common.Nullable; import org.elasticsearch.common.ParseField; import org.elasticsearch.common.Strings; @@ -199,12 +198,7 @@ public static User partialReadFrom(String username, StreamInput input) throws IO boolean hasInnerUser = input.readBoolean(); if (hasInnerUser) { User innerUser = readFrom(input); - if (input.getVersion().onOrBefore(Version.V_5_4_0)) { - // backcompat: runas user was read first, so reverse outer and inner - return new User(innerUser, outerUser); - } else { - return new User(outerUser, innerUser); - } + return new User(outerUser, innerUser); } else { return outerUser; } @@ -221,11 +215,6 @@ public static void writeTo(User user, StreamOutput output) throws IOException { if (user.authenticatedUser == null) { // no backcompat necessary, since there is no inner user writeUser(user, output); - } else if (output.getVersion().onOrBefore(Version.V_5_4_0)) { - // backcompat: write runas user as the "inner" user - writeUser(user.authenticatedUser, output); - output.writeBoolean(true); - writeUser(user, output); } else { writeUser(user, output); output.writeBoolean(true); From c5e5a97a343d970f796b5d5748408ff765127d8f Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 24 Aug 2018 10:55:23 +0200 Subject: [PATCH 2/6] Update Google Cloud Storage Library for Java (#32940) This commit updated the google-cloud-storage library from version 1.28.0 to version 1.40.0. --- plugins/repository-gcs/build.gradle | 200 ++++----------- .../licenses/api-common-1.5.0.jar.sha1 | 1 - .../licenses/api-common-1.7.0.jar.sha1 | 1 + .../licenses/commons-codec-1.10.jar.sha1 | 1 + .../{old => }/commons-codec-LICENSE.txt | 0 .../{old => }/commons-codec-NOTICE.txt | 0 .../licenses/commons-logging-1.1.3.jar.sha1 | 1 + .../{old => }/commons-logging-LICENSE.txt | 0 .../{old => }/commons-logging-NOTICE.txt | 0 .../licenses/gax-1.25.0.jar.sha1 | 1 - .../licenses/gax-1.30.0.jar.sha1 | 1 + .../licenses/gax-httpjson-0.40.0.jar.sha1 | 1 - .../licenses/gax-httpjson-0.47.0.jar.sha1 | 1 + .../google-api-client-1.23.0.jar.sha1 | 1 - .../google-api-client-1.24.1.jar.sha1 | 1 + ...services-storage-v1-rev115-1.23.0.jar.sha1 | 1 - ...services-storage-v1-rev135-1.24.1.jar.sha1 | 1 + ...e-auth-library-credentials-0.10.0.jar.sha1 | 1 + ...le-auth-library-credentials-0.9.1.jar.sha1 | 1 - ...e-auth-library-oauth2-http-0.10.0.jar.sha1 | 1 + ...le-auth-library-oauth2-http-0.9.1.jar.sha1 | 1 - .../google-cloud-core-1.28.0.jar.sha1 | 1 - .../google-cloud-core-1.40.0.jar.sha1 | 1 + .../google-cloud-core-http-1.28.0.jar.sha1 | 1 - .../google-cloud-core-http-1.40.0.jar.sha1 | 1 + .../google-cloud-storage-1.28.0.jar.sha1 | 1 - .../google-cloud-storage-1.40.0.jar.sha1 | 1 + .../google-http-client-1.23.0.jar.sha1 | 1 - .../google-http-client-1.24.1.jar.sha1 | 1 + ...ogle-http-client-appengine-1.23.0.jar.sha1 | 1 - ...ogle-http-client-appengine-1.24.1.jar.sha1 | 1 + ...google-http-client-jackson-1.23.0.jar.sha1 | 1 - ...google-http-client-jackson-1.24.1.jar.sha1 | 1 + ...oogle-http-client-jackson2-1.23.0.jar.sha1 | 1 - ...oogle-http-client-jackson2-1.24.1.jar.sha1 | 1 + .../google-oauth-client-1.23.0.jar.sha1 | 1 - .../google-oauth-client-1.24.1.jar.sha1 | 1 + .../licenses/grpc-context-1.12.0.jar.sha1 | 1 + .../licenses/grpc-context-1.9.0.jar.sha1 | 1 - .../repository-gcs/licenses/gson-2.7.jar.sha1 | 1 + ...-core-asl-LICENSE.txt => gson-LICENSE.txt} | 0 ...on-core-asl-NOTICE.txt => gson-NOTICE.txt} | 0 .../licenses/httpclient-4.5.2.jar.sha1 | 1 + .../licenses/{old => }/httpclient-LICENSE.txt | 0 .../licenses/{old => }/httpclient-NOTICE.txt | 0 .../licenses/httpcore-4.4.5.jar.sha1 | 1 + .../repository-gcs/licenses/jackson-LICENSE | 8 + .../repository-gcs/licenses/jackson-NOTICE | 20 ++ .../licenses/jackson-core-asl-1.9.11.jar.sha1 | 1 + .../licenses/jackson-core-asl-1.9.13.jar.sha1 | 1 - .../licenses/old/google-LICENSE.txt | 201 --------------- .../licenses/old/google-NOTICE.txt | 1 - .../licenses/old/httpcore-LICENSE.txt | 241 ------------------ .../licenses/old/httpcore-NOTICE.txt | 8 - .../licenses/opencensus-api-0.11.1.jar.sha1 | 1 - .../licenses/opencensus-api-0.15.0.jar.sha1 | 1 + ...encensus-contrib-http-util-0.11.1.jar.sha1 | 1 - ...encensus-contrib-http-util-0.15.0.jar.sha1 | 1 + ...s-LICENSE.txt => proto-google-LICENSE.txt} | 0 ...tos-NOTICE.txt => proto-google-NOTICE.txt} | 0 ...proto-google-common-protos-1.12.0.jar.sha1 | 1 + .../proto-google-common-protos-1.8.0.jar.sha1 | 1 - .../proto-google-iam-v1-0.12.0.jar.sha1 | 1 + .../licenses/protobuf-LICENSE.txt | 32 +++ .../licenses/protobuf-NOTICE.txt | 32 +++ .../licenses/protobuf-java-3.6.0.jar.sha1 | 1 + .../protobuf-java-util-3.6.0.jar.sha1 | 1 + .../licenses/threetenbp-1.3.3.jar.sha1 | 1 + .../licenses/threetenbp-1.3.6.jar.sha1 | 1 - 69 files changed, 167 insertions(+), 626 deletions(-) delete mode 100644 plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/api-common-1.7.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 rename plugins/repository-gcs/licenses/{old => }/commons-codec-LICENSE.txt (100%) rename plugins/repository-gcs/licenses/{old => }/commons-codec-NOTICE.txt (100%) create mode 100644 plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 rename plugins/repository-gcs/licenses/{old => }/commons-logging-LICENSE.txt (100%) rename plugins/repository-gcs/licenses/{old => }/commons-logging-NOTICE.txt (100%) delete mode 100644 plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-1.30.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gax-httpjson-0.47.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-api-client-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-api-client-1.24.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-api-services-storage-v1-rev135-1.24.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-auth-library-credentials-0.10.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.10.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-1.40.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-core-http-1.40.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-cloud-storage-1.40.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-http-client-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-1.24.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-appengine-1.24.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-jackson-1.24.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-http-client-jackson2-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-http-client-jackson2-1.24.1.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/google-oauth-client-1.23.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/google-oauth-client-1.24.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/grpc-context-1.12.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/gson-2.7.jar.sha1 rename plugins/repository-gcs/licenses/{jackson-core-asl-LICENSE.txt => gson-LICENSE.txt} (100%) rename plugins/repository-gcs/licenses/{jackson-core-asl-NOTICE.txt => gson-NOTICE.txt} (100%) create mode 100644 plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 rename plugins/repository-gcs/licenses/{old => }/httpclient-LICENSE.txt (100%) rename plugins/repository-gcs/licenses/{old => }/httpclient-NOTICE.txt (100%) create mode 100644 plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/jackson-LICENSE create mode 100644 plugins/repository-gcs/licenses/jackson-NOTICE create mode 100644 plugins/repository-gcs/licenses/jackson-core-asl-1.9.11.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/old/google-LICENSE.txt delete mode 100644 plugins/repository-gcs/licenses/old/google-NOTICE.txt delete mode 100644 plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt delete mode 100644 plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt delete mode 100644 plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/opencensus-api-0.15.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.15.0.jar.sha1 rename plugins/repository-gcs/licenses/{proto-google-common-protos-LICENSE.txt => proto-google-LICENSE.txt} (100%) rename plugins/repository-gcs/licenses/{proto-google-common-protos-NOTICE.txt => proto-google-NOTICE.txt} (100%) create mode 100644 plugins/repository-gcs/licenses/proto-google-common-protos-1.12.0.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/proto-google-iam-v1-0.12.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/protobuf-LICENSE.txt create mode 100644 plugins/repository-gcs/licenses/protobuf-NOTICE.txt create mode 100644 plugins/repository-gcs/licenses/protobuf-java-3.6.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/protobuf-java-util-3.6.0.jar.sha1 create mode 100644 plugins/repository-gcs/licenses/threetenbp-1.3.3.jar.sha1 delete mode 100644 plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 diff --git a/plugins/repository-gcs/build.gradle b/plugins/repository-gcs/build.gradle index 07ef4b4be5e62..510c101379d2f 100644 --- a/plugins/repository-gcs/build.gradle +++ b/plugins/repository-gcs/build.gradle @@ -23,28 +23,38 @@ esplugin { } dependencies { - compile 'com.google.cloud:google-cloud-storage:1.28.0' - compile 'com.google.cloud:google-cloud-core:1.28.0' - compile 'com.google.cloud:google-cloud-core-http:1.28.0' - compile 'com.google.auth:google-auth-library-oauth2-http:0.9.1' - compile 'com.google.auth:google-auth-library-credentials:0.9.1' - compile 'com.google.oauth-client:google-oauth-client:1.23.0' - compile 'com.google.http-client:google-http-client:1.23.0' - compile 'com.google.http-client:google-http-client-jackson:1.23.0' - compile 'com.google.http-client:google-http-client-jackson2:1.23.0' - compile 'com.google.http-client:google-http-client-appengine:1.23.0' - compile 'com.google.api-client:google-api-client:1.23.0' - compile 'com.google.api:gax:1.25.0' - compile 'com.google.api:gax-httpjson:0.40.0' - compile 'com.google.api:api-common:1.5.0' - compile 'com.google.api.grpc:proto-google-common-protos:1.8.0' + compile 'com.google.cloud:google-cloud-storage:1.40.0' + compile 'com.google.cloud:google-cloud-core:1.40.0' compile 'com.google.guava:guava:20.0' - compile 'com.google.apis:google-api-services-storage:v1-rev115-1.23.0' - compile 'org.codehaus.jackson:jackson-core-asl:1.9.13' - compile 'io.grpc:grpc-context:1.9.0' - compile 'io.opencensus:opencensus-api:0.11.1' - compile 'io.opencensus:opencensus-contrib-http-util:0.11.1' - compile 'org.threeten:threetenbp:1.3.6' + compile 'joda-time:joda-time:2.10' + compile 'com.google.http-client:google-http-client:1.24.1' + compile "org.apache.httpcomponents:httpclient:${versions.httpclient}" + compile "org.apache.httpcomponents:httpcore:${versions.httpcore}" + compile "commons-logging:commons-logging:${versions.commonslogging}" + compile "commons-codec:commons-codec:${versions.commonscodec}" + compile 'com.google.api:api-common:1.7.0' + compile 'com.google.api:gax:1.30.0' + compile 'org.threeten:threetenbp:1.3.3' + compile 'com.google.protobuf:protobuf-java-util:3.6.0' + compile 'com.google.protobuf:protobuf-java:3.6.0' + compile 'com.google.code.gson:gson:2.7' + compile 'com.google.api.grpc:proto-google-common-protos:1.12.0' + compile 'com.google.api.grpc:proto-google-iam-v1:0.12.0' + compile 'com.google.cloud:google-cloud-core-http:1.40.0' + compile 'com.google.auth:google-auth-library-credentials:0.10.0' + compile 'com.google.auth:google-auth-library-oauth2-http:0.10.0' + compile 'com.google.oauth-client:google-oauth-client:1.24.1' + compile 'com.google.api-client:google-api-client:1.24.1' + compile 'com.google.http-client:google-http-client-appengine:1.24.1' + compile 'com.google.http-client:google-http-client-jackson:1.24.1' + compile 'org.codehaus.jackson:jackson-core-asl:1.9.11' + compile 'com.google.http-client:google-http-client-jackson2:1.24.1' + compile "com.fasterxml.jackson.core:jackson-core:${versions.jackson}" + compile 'com.google.api:gax-httpjson:0.47.0' + compile 'io.opencensus:opencensus-api:0.15.0' + compile 'io.grpc:grpc-context:1.12.0' + compile 'io.opencensus:opencensus-contrib-http-util:0.15.0' + compile 'com.google.apis:google-api-services-storage:v1-rev135-1.24.1' } dependencyLicenses { @@ -52,10 +62,18 @@ dependencyLicenses { mapping from: /google-auth-.*/, to: 'google-auth' mapping from: /google-http-.*/, to: 'google-http' mapping from: /opencensus.*/, to: 'opencensus' + mapping from: /jackson-.*/, to: 'jackson' + mapping from: /http.*/, to: 'httpclient' + mapping from: /protobuf.*/, to: 'protobuf' + mapping from: /proto-google.*/, to: 'proto-google' } thirdPartyAudit.excludes = [ // uses internal java api: sun.misc.Unsafe + 'com.google.protobuf.UnsafeUtil', + 'com.google.protobuf.UnsafeUtil$1', + 'com.google.protobuf.UnsafeUtil$JvmMemoryAccessor', + 'com.google.protobuf.UnsafeUtil$MemoryAccessor', 'com.google.common.cache.Striped64', 'com.google.common.cache.Striped64$1', 'com.google.common.cache.Striped64$Cell', @@ -87,139 +105,13 @@ thirdPartyAudit.excludes = [ 'com.google.appengine.api.urlfetch.HTTPResponse', 'com.google.appengine.api.urlfetch.URLFetchService', 'com.google.appengine.api.urlfetch.URLFetchServiceFactory', - 'com.google.gson.Gson', - 'com.google.gson.GsonBuilder', - 'com.google.gson.TypeAdapter', - 'com.google.gson.stream.JsonReader', - 'com.google.gson.stream.JsonWriter', - 'com.google.iam.v1.Binding$Builder', - 'com.google.iam.v1.Binding', - 'com.google.iam.v1.Policy$Builder', - 'com.google.iam.v1.Policy', - 'com.google.protobuf.AbstractMessageLite$Builder', - 'com.google.protobuf.AbstractParser', - 'com.google.protobuf.Any$Builder', - 'com.google.protobuf.Any', - 'com.google.protobuf.AnyOrBuilder', - 'com.google.protobuf.AnyProto', - 'com.google.protobuf.Api$Builder', - 'com.google.protobuf.Api', - 'com.google.protobuf.ApiOrBuilder', - 'com.google.protobuf.ApiProto', - 'com.google.protobuf.ByteString', - 'com.google.protobuf.CodedInputStream', - 'com.google.protobuf.CodedOutputStream', - 'com.google.protobuf.DescriptorProtos', - 'com.google.protobuf.Descriptors$Descriptor', - 'com.google.protobuf.Descriptors$EnumDescriptor', - 'com.google.protobuf.Descriptors$EnumValueDescriptor', - 'com.google.protobuf.Descriptors$FieldDescriptor', - 'com.google.protobuf.Descriptors$FileDescriptor$InternalDescriptorAssigner', - 'com.google.protobuf.Descriptors$FileDescriptor', - 'com.google.protobuf.Descriptors$OneofDescriptor', - 'com.google.protobuf.Duration$Builder', - 'com.google.protobuf.Duration', - 'com.google.protobuf.DurationOrBuilder', - 'com.google.protobuf.DurationProto', - 'com.google.protobuf.EmptyProto', - 'com.google.protobuf.Enum$Builder', - 'com.google.protobuf.Enum', - 'com.google.protobuf.EnumOrBuilder', - 'com.google.protobuf.ExtensionRegistry', - 'com.google.protobuf.ExtensionRegistryLite', - 'com.google.protobuf.FloatValue$Builder', - 'com.google.protobuf.FloatValue', - 'com.google.protobuf.FloatValueOrBuilder', - 'com.google.protobuf.GeneratedMessage$GeneratedExtension', - 'com.google.protobuf.GeneratedMessage', - 'com.google.protobuf.GeneratedMessageV3$Builder', - 'com.google.protobuf.GeneratedMessageV3$BuilderParent', - 'com.google.protobuf.GeneratedMessageV3$FieldAccessorTable', - 'com.google.protobuf.GeneratedMessageV3', - 'com.google.protobuf.Internal$EnumLite', - 'com.google.protobuf.Internal$EnumLiteMap', - 'com.google.protobuf.Internal', - 'com.google.protobuf.InvalidProtocolBufferException', - 'com.google.protobuf.LazyStringArrayList', - 'com.google.protobuf.LazyStringList', - 'com.google.protobuf.MapEntry$Builder', - 'com.google.protobuf.MapEntry', - 'com.google.protobuf.MapField', - 'com.google.protobuf.Message', - 'com.google.protobuf.MessageOrBuilder', - 'com.google.protobuf.Parser', - 'com.google.protobuf.ProtocolMessageEnum', - 'com.google.protobuf.ProtocolStringList', - 'com.google.protobuf.RepeatedFieldBuilderV3', - 'com.google.protobuf.SingleFieldBuilderV3', - 'com.google.protobuf.Struct$Builder', - 'com.google.protobuf.Struct', - 'com.google.protobuf.StructOrBuilder', - 'com.google.protobuf.StructProto', - 'com.google.protobuf.Timestamp$Builder', - 'com.google.protobuf.Timestamp', - 'com.google.protobuf.TimestampProto', - 'com.google.protobuf.Type$Builder', - 'com.google.protobuf.Type', - 'com.google.protobuf.TypeOrBuilder', - 'com.google.protobuf.TypeProto', - 'com.google.protobuf.UInt32Value$Builder', - 'com.google.protobuf.UInt32Value', - 'com.google.protobuf.UInt32ValueOrBuilder', - 'com.google.protobuf.UnknownFieldSet$Builder', - 'com.google.protobuf.UnknownFieldSet', - 'com.google.protobuf.WireFormat$FieldType', - 'com.google.protobuf.WrappersProto', - 'com.google.protobuf.util.Timestamps', - 'org.apache.http.ConnectionReuseStrategy', - 'org.apache.http.Header', - 'org.apache.http.HttpEntity', - 'org.apache.http.HttpEntityEnclosingRequest', - 'org.apache.http.HttpHost', - 'org.apache.http.HttpRequest', - 'org.apache.http.HttpResponse', - 'org.apache.http.HttpVersion', - 'org.apache.http.RequestLine', - 'org.apache.http.StatusLine', - 'org.apache.http.client.AuthenticationHandler', - 'org.apache.http.client.HttpClient', - 'org.apache.http.client.HttpRequestRetryHandler', - 'org.apache.http.client.RedirectHandler', - 'org.apache.http.client.RequestDirector', - 'org.apache.http.client.UserTokenHandler', - 'org.apache.http.client.methods.HttpDelete', - 'org.apache.http.client.methods.HttpEntityEnclosingRequestBase', - 'org.apache.http.client.methods.HttpGet', - 'org.apache.http.client.methods.HttpHead', - 'org.apache.http.client.methods.HttpOptions', - 'org.apache.http.client.methods.HttpPost', - 'org.apache.http.client.methods.HttpPut', - 'org.apache.http.client.methods.HttpRequestBase', - 'org.apache.http.client.methods.HttpTrace', - 'org.apache.http.conn.ClientConnectionManager', - 'org.apache.http.conn.ConnectionKeepAliveStrategy', - 'org.apache.http.conn.params.ConnManagerParams', - 'org.apache.http.conn.params.ConnPerRouteBean', - 'org.apache.http.conn.params.ConnRouteParams', - 'org.apache.http.conn.routing.HttpRoutePlanner', - 'org.apache.http.conn.scheme.PlainSocketFactory', - 'org.apache.http.conn.scheme.Scheme', - 'org.apache.http.conn.scheme.SchemeRegistry', - 'org.apache.http.conn.ssl.SSLSocketFactory', - 'org.apache.http.conn.ssl.X509HostnameVerifier', - 'org.apache.http.entity.AbstractHttpEntity', - 'org.apache.http.impl.client.DefaultHttpClient', - 'org.apache.http.impl.client.DefaultHttpRequestRetryHandler', - 'org.apache.http.impl.conn.ProxySelectorRoutePlanner', - 'org.apache.http.impl.conn.tsccm.ThreadSafeClientConnManager', - 'org.apache.http.message.BasicHttpResponse', - 'org.apache.http.params.BasicHttpParams', - 'org.apache.http.params.HttpConnectionParams', - 'org.apache.http.params.HttpParams', - 'org.apache.http.params.HttpProtocolParams', - 'org.apache.http.protocol.HttpContext', - 'org.apache.http.protocol.HttpProcessor', - 'org.apache.http.protocol.HttpRequestExecutor' + // commons-logging optional dependencies + 'org.apache.avalon.framework.logger.Logger', + 'org.apache.log.Hierarchy', + 'org.apache.log.Logger', + // commons-logging provided dependencies + 'javax.servlet.ServletContextEvent', + 'javax.servlet.ServletContextListener' ] check { diff --git a/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 b/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 deleted file mode 100644 index 64435356e5eaf..0000000000000 --- a/plugins/repository-gcs/licenses/api-common-1.5.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7e537338d40a57ad469239acb6d828fa544fb52b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/api-common-1.7.0.jar.sha1 b/plugins/repository-gcs/licenses/api-common-1.7.0.jar.sha1 new file mode 100644 index 0000000000000..67291b658e5c5 --- /dev/null +++ b/plugins/repository-gcs/licenses/api-common-1.7.0.jar.sha1 @@ -0,0 +1 @@ +ea59fb8b2450999345035dec8a6f472543391766 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 b/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 new file mode 100644 index 0000000000000..3fe8682a1b0f9 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-codec-1.10.jar.sha1 @@ -0,0 +1 @@ +4b95f4897fa13f2cd904aee711aeafc0c5295cd8 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt b/plugins/repository-gcs/licenses/commons-codec-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/old/commons-codec-LICENSE.txt rename to plugins/repository-gcs/licenses/commons-codec-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/old/commons-codec-NOTICE.txt b/plugins/repository-gcs/licenses/commons-codec-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/old/commons-codec-NOTICE.txt rename to plugins/repository-gcs/licenses/commons-codec-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 new file mode 100644 index 0000000000000..5b8f029e58293 --- /dev/null +++ b/plugins/repository-gcs/licenses/commons-logging-1.1.3.jar.sha1 @@ -0,0 +1 @@ +f6f66e966c70a83ffbdb6f17a0919eaf7c8aca7f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/old/commons-logging-LICENSE.txt b/plugins/repository-gcs/licenses/commons-logging-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/old/commons-logging-LICENSE.txt rename to plugins/repository-gcs/licenses/commons-logging-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/old/commons-logging-NOTICE.txt b/plugins/repository-gcs/licenses/commons-logging-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/old/commons-logging-NOTICE.txt rename to plugins/repository-gcs/licenses/commons-logging-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 deleted file mode 100644 index 594177047c140..0000000000000 --- a/plugins/repository-gcs/licenses/gax-1.25.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -36ab73c0b5d4a67447eb89a3174cc76ced150bd1 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-1.30.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-1.30.0.jar.sha1 new file mode 100644 index 0000000000000..d6d2bb20ed840 --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-1.30.0.jar.sha1 @@ -0,0 +1 @@ +58fa2feb11b092be0a6ebe705a28736f12374230 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 deleted file mode 100644 index c251ea1dd956c..0000000000000 --- a/plugins/repository-gcs/licenses/gax-httpjson-0.40.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -cb4bafbfd45b9d24efbb6138a31e37918fac015f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gax-httpjson-0.47.0.jar.sha1 b/plugins/repository-gcs/licenses/gax-httpjson-0.47.0.jar.sha1 new file mode 100644 index 0000000000000..fdc722d1520d6 --- /dev/null +++ b/plugins/repository-gcs/licenses/gax-httpjson-0.47.0.jar.sha1 @@ -0,0 +1 @@ +d096f3142eb3adbf877588d1044895d148d9efcb \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-api-client-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-api-client-1.23.0.jar.sha1 deleted file mode 100644 index 0c35d8e08b91f..0000000000000 --- a/plugins/repository-gcs/licenses/google-api-client-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -522ea860eb48dee71dfe2c61a1fd09663539f556 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-api-client-1.24.1.jar.sha1 b/plugins/repository-gcs/licenses/google-api-client-1.24.1.jar.sha1 new file mode 100644 index 0000000000000..27dafe58a0182 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-client-1.24.1.jar.sha1 @@ -0,0 +1 @@ +37de23fb9b8b077de4ecec3192d98e752b0e5d72 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-1.23.0.jar.sha1 deleted file mode 100644 index 9f6f77ada3a69..0000000000000 --- a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev115-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -ba4fb6c5dc8d5ad94dedd9927ceee10a31a59abd \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev135-1.24.1.jar.sha1 b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev135-1.24.1.jar.sha1 new file mode 100644 index 0000000000000..e3042ee6ea07e --- /dev/null +++ b/plugins/repository-gcs/licenses/google-api-services-storage-v1-rev135-1.24.1.jar.sha1 @@ -0,0 +1 @@ +28d3d391dfc7e7e7951760708ad2f48cecacf38f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-credentials-0.10.0.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.10.0.jar.sha1 new file mode 100644 index 0000000000000..c8258d69326b8 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.10.0.jar.sha1 @@ -0,0 +1 @@ +f981288bd84fe6d140ed70d1d8dbe994a64fa3cc \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 deleted file mode 100644 index 0922a53d2e356..0000000000000 --- a/plugins/repository-gcs/licenses/google-auth-library-credentials-0.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -25e0f45f3b3d1b4fccc8944845e51a7a4f359652 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.10.0.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.10.0.jar.sha1 new file mode 100644 index 0000000000000..f55ef7c9c2150 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.10.0.jar.sha1 @@ -0,0 +1 @@ +c079a62086121973a23d90f54e2b8c13050fa39d \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 b/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 deleted file mode 100644 index 100a44c187218..0000000000000 --- a/plugins/repository-gcs/licenses/google-auth-library-oauth2-http-0.9.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c0fe3a39b0f28d59de1986b3c50f018cd7cb9ec2 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 deleted file mode 100644 index 071533f227839..0000000000000 --- a/plugins/repository-gcs/licenses/google-cloud-core-1.28.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -c0e88c78ce17c92d76bf46345faf3fa68833b216 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-1.40.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-1.40.0.jar.sha1 new file mode 100644 index 0000000000000..7562ead12e9f9 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-1.40.0.jar.sha1 @@ -0,0 +1 @@ +4985701f989030e262cf8f4e38cc954115f5b082 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 deleted file mode 100644 index fed3fc257c32c..0000000000000 --- a/plugins/repository-gcs/licenses/google-cloud-core-http-1.28.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -7b4559a9513abd98da50958c56a10f8ae00cb0f7 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-core-http-1.40.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-core-http-1.40.0.jar.sha1 new file mode 100644 index 0000000000000..2761bfdc745c6 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-core-http-1.40.0.jar.sha1 @@ -0,0 +1 @@ +67f5806beda32894f1e6c9527925b64199fd2e4f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 deleted file mode 100644 index f49152ea05646..0000000000000 --- a/plugins/repository-gcs/licenses/google-cloud-storage-1.28.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -226019ae816b42c59f1b06999aeeb73722b87200 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-cloud-storage-1.40.0.jar.sha1 b/plugins/repository-gcs/licenses/google-cloud-storage-1.40.0.jar.sha1 new file mode 100644 index 0000000000000..33e83b73712f7 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-cloud-storage-1.40.0.jar.sha1 @@ -0,0 +1 @@ +fabefef46f07d1e334123f0de17702708b4dfbd1 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-1.23.0.jar.sha1 deleted file mode 100644 index 5526275d5a15f..0000000000000 --- a/plugins/repository-gcs/licenses/google-http-client-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -8e86c84ff3c98eca6423e97780325b299133d858 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-1.24.1.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-1.24.1.jar.sha1 new file mode 100644 index 0000000000000..46b99f23e470a --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-1.24.1.jar.sha1 @@ -0,0 +1 @@ +396eac8d3fb1332675f82b208f48a469d64f3b4a \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 deleted file mode 100644 index 823c3a85089a5..0000000000000 --- a/plugins/repository-gcs/licenses/google-http-client-appengine-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -0eda0d0f758c1cc525866e52e1226c4eb579d130 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-appengine-1.24.1.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-appengine-1.24.1.jar.sha1 new file mode 100644 index 0000000000000..e39f63fe33ae3 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-appengine-1.24.1.jar.sha1 @@ -0,0 +1 @@ +8535031ae10bf6a196e68f25e10c0d6382699cb6 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 deleted file mode 100644 index 85ba0ab798d05..0000000000000 --- a/plugins/repository-gcs/licenses/google-http-client-jackson-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -a72ea3a197937ef63a893e73df312dac0d813663 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson-1.24.1.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson-1.24.1.jar.sha1 new file mode 100644 index 0000000000000..f6b9694abaa6c --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-jackson-1.24.1.jar.sha1 @@ -0,0 +1 @@ +02c88e77c14effdda76f02a0eac968de74e0bd4e \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.23.0.jar.sha1 deleted file mode 100644 index 510856a517f04..0000000000000 --- a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -fd6761f4046a8cb0455e6fa5f58e12b061e9826e \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-http-client-jackson2-1.24.1.jar.sha1 b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.24.1.jar.sha1 new file mode 100644 index 0000000000000..634b7d9198c8e --- /dev/null +++ b/plugins/repository-gcs/licenses/google-http-client-jackson2-1.24.1.jar.sha1 @@ -0,0 +1 @@ +2ad1dffd8a450055e68d8004fe003033b751d761 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-oauth-client-1.23.0.jar.sha1 b/plugins/repository-gcs/licenses/google-oauth-client-1.23.0.jar.sha1 deleted file mode 100644 index 036812b88b5e0..0000000000000 --- a/plugins/repository-gcs/licenses/google-oauth-client-1.23.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -e57ea1e2220bda5a2bd24ff17860212861f3c5cf \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/google-oauth-client-1.24.1.jar.sha1 b/plugins/repository-gcs/licenses/google-oauth-client-1.24.1.jar.sha1 new file mode 100644 index 0000000000000..2d89939674a51 --- /dev/null +++ b/plugins/repository-gcs/licenses/google-oauth-client-1.24.1.jar.sha1 @@ -0,0 +1 @@ +7b0e0218b96808868c23a7d0b40566a713931d9f \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-context-1.12.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-context-1.12.0.jar.sha1 new file mode 100644 index 0000000000000..57f37a81c960f --- /dev/null +++ b/plugins/repository-gcs/licenses/grpc-context-1.12.0.jar.sha1 @@ -0,0 +1 @@ +5b63a170b786051a42cce08118d5ea3c8f60f749 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 b/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 deleted file mode 100644 index 02bac0e492074..0000000000000 --- a/plugins/repository-gcs/licenses/grpc-context-1.9.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -28b0836f48c9705abf73829bbc536dba29a1329a \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/gson-2.7.jar.sha1 b/plugins/repository-gcs/licenses/gson-2.7.jar.sha1 new file mode 100644 index 0000000000000..b3433f306eb3f --- /dev/null +++ b/plugins/repository-gcs/licenses/gson-2.7.jar.sha1 @@ -0,0 +1 @@ +751f548c85fa49f330cecbb1875893f971b33c4e \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt b/plugins/repository-gcs/licenses/gson-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/jackson-core-asl-LICENSE.txt rename to plugins/repository-gcs/licenses/gson-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt b/plugins/repository-gcs/licenses/gson-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/jackson-core-asl-NOTICE.txt rename to plugins/repository-gcs/licenses/gson-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 b/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 new file mode 100644 index 0000000000000..6937112a09fb6 --- /dev/null +++ b/plugins/repository-gcs/licenses/httpclient-4.5.2.jar.sha1 @@ -0,0 +1 @@ +733db77aa8d9b2d68015189df76ab06304406e50 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/old/httpclient-LICENSE.txt b/plugins/repository-gcs/licenses/httpclient-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/old/httpclient-LICENSE.txt rename to plugins/repository-gcs/licenses/httpclient-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/old/httpclient-NOTICE.txt b/plugins/repository-gcs/licenses/httpclient-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/old/httpclient-NOTICE.txt rename to plugins/repository-gcs/licenses/httpclient-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 b/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 new file mode 100644 index 0000000000000..581726601745b --- /dev/null +++ b/plugins/repository-gcs/licenses/httpcore-4.4.5.jar.sha1 @@ -0,0 +1 @@ +e7501a1b34325abb00d17dde96150604a0658b54 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/jackson-LICENSE b/plugins/repository-gcs/licenses/jackson-LICENSE new file mode 100644 index 0000000000000..f5f45d26a49d6 --- /dev/null +++ b/plugins/repository-gcs/licenses/jackson-LICENSE @@ -0,0 +1,8 @@ +This copy of Jackson JSON processor streaming parser/generator is licensed under the +Apache (Software) License, version 2.0 ("the License"). +See the License for details about distribution rights, and the +specific rights regarding derivate works. + +You may obtain a copy of the License at: + +http://www.apache.org/licenses/LICENSE-2.0 diff --git a/plugins/repository-gcs/licenses/jackson-NOTICE b/plugins/repository-gcs/licenses/jackson-NOTICE new file mode 100644 index 0000000000000..4c976b7b4cc58 --- /dev/null +++ b/plugins/repository-gcs/licenses/jackson-NOTICE @@ -0,0 +1,20 @@ +# Jackson JSON processor + +Jackson is a high-performance, Free/Open Source JSON processing library. +It was originally written by Tatu Saloranta (tatu.saloranta@iki.fi), and has +been in development since 2007. +It is currently developed by a community of developers, as well as supported +commercially by FasterXML.com. + +## Licensing + +Jackson core and extension components may licensed under different licenses. +To find the details that apply to this artifact see the accompanying LICENSE file. +For more information, including possible other licensing options, contact +FasterXML.com (http://fasterxml.com). + +## Credits + +A list of contributors may be found from CREDITS file, which is included +in some artifacts (usually source distributions); but is always available +from the source code management (SCM) system project uses. diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-1.9.11.jar.sha1 b/plugins/repository-gcs/licenses/jackson-core-asl-1.9.11.jar.sha1 new file mode 100644 index 0000000000000..ed70030899aa0 --- /dev/null +++ b/plugins/repository-gcs/licenses/jackson-core-asl-1.9.11.jar.sha1 @@ -0,0 +1 @@ +e32303ef8bd18a5c9272780d49b81c95e05ddf43 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 b/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 deleted file mode 100644 index c5016bf828d60..0000000000000 --- a/plugins/repository-gcs/licenses/jackson-core-asl-1.9.13.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -3c304d70f42f832e0a86d45bd437f692129299a4 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/old/google-LICENSE.txt b/plugins/repository-gcs/licenses/old/google-LICENSE.txt deleted file mode 100644 index 980a15ac24eeb..0000000000000 --- a/plugins/repository-gcs/licenses/old/google-LICENSE.txt +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. diff --git a/plugins/repository-gcs/licenses/old/google-NOTICE.txt b/plugins/repository-gcs/licenses/old/google-NOTICE.txt deleted file mode 100644 index 8d1c8b69c3fce..0000000000000 --- a/plugins/repository-gcs/licenses/old/google-NOTICE.txt +++ /dev/null @@ -1 +0,0 @@ - diff --git a/plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt b/plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt deleted file mode 100644 index 72819a9f06f2a..0000000000000 --- a/plugins/repository-gcs/licenses/old/httpcore-LICENSE.txt +++ /dev/null @@ -1,241 +0,0 @@ - - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - -========================================================================= - -This project contains annotations in the package org.apache.http.annotation -which are derived from JCIP-ANNOTATIONS -Copyright (c) 2005 Brian Goetz and Tim Peierls. -See http://www.jcip.net and the Creative Commons Attribution License -(http://creativecommons.org/licenses/by/2.5) -Full text: http://creativecommons.org/licenses/by/2.5/legalcode - -License - -THE WORK (AS DEFINED BELOW) IS PROVIDED UNDER THE TERMS OF THIS CREATIVE COMMONS PUBLIC LICENSE ("CCPL" OR "LICENSE"). THE WORK IS PROTECTED BY COPYRIGHT AND/OR OTHER APPLICABLE LAW. ANY USE OF THE WORK OTHER THAN AS AUTHORIZED UNDER THIS LICENSE OR COPYRIGHT LAW IS PROHIBITED. - -BY EXERCISING ANY RIGHTS TO THE WORK PROVIDED HERE, YOU ACCEPT AND AGREE TO BE BOUND BY THE TERMS OF THIS LICENSE. THE LICENSOR GRANTS YOU THE RIGHTS CONTAINED HERE IN CONSIDERATION OF YOUR ACCEPTANCE OF SUCH TERMS AND CONDITIONS. - -1. Definitions - - "Collective Work" means a work, such as a periodical issue, anthology or encyclopedia, in which the Work in its entirety in unmodified form, along with a number of other contributions, constituting separate and independent works in themselves, are assembled into a collective whole. A work that constitutes a Collective Work will not be considered a Derivative Work (as defined below) for the purposes of this License. - "Derivative Work" means a work based upon the Work or upon the Work and other pre-existing works, such as a translation, musical arrangement, dramatization, fictionalization, motion picture version, sound recording, art reproduction, abridgment, condensation, or any other form in which the Work may be recast, transformed, or adapted, except that a work that constitutes a Collective Work will not be considered a Derivative Work for the purpose of this License. For the avoidance of doubt, where the Work is a musical composition or sound recording, the synchronization of the Work in timed-relation with a moving image ("synching") will be considered a Derivative Work for the purpose of this License. - "Licensor" means the individual or entity that offers the Work under the terms of this License. - "Original Author" means the individual or entity who created the Work. - "Work" means the copyrightable work of authorship offered under the terms of this License. - "You" means an individual or entity exercising rights under this License who has not previously violated the terms of this License with respect to the Work, or who has received express permission from the Licensor to exercise rights under this License despite a previous violation. - -2. Fair Use Rights. Nothing in this license is intended to reduce, limit, or restrict any rights arising from fair use, first sale or other limitations on the exclusive rights of the copyright owner under copyright law or other applicable laws. - -3. License Grant. Subject to the terms and conditions of this License, Licensor hereby grants You a worldwide, royalty-free, non-exclusive, perpetual (for the duration of the applicable copyright) license to exercise the rights in the Work as stated below: - - to reproduce the Work, to incorporate the Work into one or more Collective Works, and to reproduce the Work as incorporated in the Collective Works; - to create and reproduce Derivative Works; - to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission the Work including as incorporated in Collective Works; - to distribute copies or phonorecords of, display publicly, perform publicly, and perform publicly by means of a digital audio transmission Derivative Works. - - For the avoidance of doubt, where the work is a musical composition: - Performance Royalties Under Blanket Licenses. Licensor waives the exclusive right to collect, whether individually or via a performance rights society (e.g. ASCAP, BMI, SESAC), royalties for the public performance or public digital performance (e.g. webcast) of the Work. - Mechanical Rights and Statutory Royalties. Licensor waives the exclusive right to collect, whether individually or via a music rights agency or designated agent (e.g. Harry Fox Agency), royalties for any phonorecord You create from the Work ("cover version") and distribute, subject to the compulsory license created by 17 USC Section 115 of the US Copyright Act (or the equivalent in other jurisdictions). - Webcasting Rights and Statutory Royalties. For the avoidance of doubt, where the Work is a sound recording, Licensor waives the exclusive right to collect, whether individually or via a performance-rights society (e.g. SoundExchange), royalties for the public digital performance (e.g. webcast) of the Work, subject to the compulsory license created by 17 USC Section 114 of the US Copyright Act (or the equivalent in other jurisdictions). - -The above rights may be exercised in all media and formats whether now known or hereafter devised. The above rights include the right to make such modifications as are technically necessary to exercise the rights in other media and formats. All rights not expressly granted by Licensor are hereby reserved. - -4. Restrictions.The license granted in Section 3 above is expressly made subject to and limited by the following restrictions: - - You may distribute, publicly display, publicly perform, or publicly digitally perform the Work only under the terms of this License, and You must include a copy of, or the Uniform Resource Identifier for, this License with every copy or phonorecord of the Work You distribute, publicly display, publicly perform, or publicly digitally perform. You may not offer or impose any terms on the Work that alter or restrict the terms of this License or the recipients' exercise of the rights granted hereunder. You may not sublicense the Work. You must keep intact all notices that refer to this License and to the disclaimer of warranties. You may not distribute, publicly display, publicly perform, or publicly digitally perform the Work with any technological measures that control access or use of the Work in a manner inconsistent with the terms of this License Agreement. The above applies to the Work as incorporated in a Collective Work, but this does not require the Collective Work apart from the Work itself to be made subject to the terms of this License. If You create a Collective Work, upon notice from any Licensor You must, to the extent practicable, remove from the Collective Work any credit as required by clause 4(b), as requested. If You create a Derivative Work, upon notice from any Licensor You must, to the extent practicable, remove from the Derivative Work any credit as required by clause 4(b), as requested. - If you distribute, publicly display, publicly perform, or publicly digitally perform the Work or any Derivative Works or Collective Works, You must keep intact all copyright notices for the Work and provide, reasonable to the medium or means You are utilizing: (i) the name of the Original Author (or pseudonym, if applicable) if supplied, and/or (ii) if the Original Author and/or Licensor designate another party or parties (e.g. a sponsor institute, publishing entity, journal) for attribution in Licensor's copyright notice, terms of service or by other reasonable means, the name of such party or parties; the title of the Work if supplied; to the extent reasonably practicable, the Uniform Resource Identifier, if any, that Licensor specifies to be associated with the Work, unless such URI does not refer to the copyright notice or licensing information for the Work; and in the case of a Derivative Work, a credit identifying the use of the Work in the Derivative Work (e.g., "French translation of the Work by Original Author," or "Screenplay based on original Work by Original Author"). Such credit may be implemented in any reasonable manner; provided, however, that in the case of a Derivative Work or Collective Work, at a minimum such credit will appear where any other comparable authorship credit appears and in a manner at least as prominent as such other comparable authorship credit. - -5. Representations, Warranties and Disclaimer - -UNLESS OTHERWISE MUTUALLY AGREED TO BY THE PARTIES IN WRITING, LICENSOR OFFERS THE WORK AS-IS AND MAKES NO REPRESENTATIONS OR WARRANTIES OF ANY KIND CONCERNING THE WORK, EXPRESS, IMPLIED, STATUTORY OR OTHERWISE, INCLUDING, WITHOUT LIMITATION, WARRANTIES OF TITLE, MERCHANTIBILITY, FITNESS FOR A PARTICULAR PURPOSE, NONINFRINGEMENT, OR THE ABSENCE OF LATENT OR OTHER DEFECTS, ACCURACY, OR THE PRESENCE OF ABSENCE OF ERRORS, WHETHER OR NOT DISCOVERABLE. SOME JURISDICTIONS DO NOT ALLOW THE EXCLUSION OF IMPLIED WARRANTIES, SO SUCH EXCLUSION MAY NOT APPLY TO YOU. - -6. Limitation on Liability. EXCEPT TO THE EXTENT REQUIRED BY APPLICABLE LAW, IN NO EVENT WILL LICENSOR BE LIABLE TO YOU ON ANY LEGAL THEORY FOR ANY SPECIAL, INCIDENTAL, CONSEQUENTIAL, PUNITIVE OR EXEMPLARY DAMAGES ARISING OUT OF THIS LICENSE OR THE USE OF THE WORK, EVEN IF LICENSOR HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. - -7. Termination - - This License and the rights granted hereunder will terminate automatically upon any breach by You of the terms of this License. Individuals or entities who have received Derivative Works or Collective Works from You under this License, however, will not have their licenses terminated provided such individuals or entities remain in full compliance with those licenses. Sections 1, 2, 5, 6, 7, and 8 will survive any termination of this License. - Subject to the above terms and conditions, the license granted here is perpetual (for the duration of the applicable copyright in the Work). Notwithstanding the above, Licensor reserves the right to release the Work under different license terms or to stop distributing the Work at any time; provided, however that any such election will not serve to withdraw this License (or any other license that has been, or is required to be, granted under the terms of this License), and this License will continue in full force and effect unless terminated as stated above. - -8. Miscellaneous - - Each time You distribute or publicly digitally perform the Work or a Collective Work, the Licensor offers to the recipient a license to the Work on the same terms and conditions as the license granted to You under this License. - Each time You distribute or publicly digitally perform a Derivative Work, Licensor offers to the recipient a license to the original Work on the same terms and conditions as the license granted to You under this License. - If any provision of this License is invalid or unenforceable under applicable law, it shall not affect the validity or enforceability of the remainder of the terms of this License, and without further action by the parties to this agreement, such provision shall be reformed to the minimum extent necessary to make such provision valid and enforceable. - No term or provision of this License shall be deemed waived and no breach consented to unless such waiver or consent shall be in writing and signed by the party to be charged with such waiver or consent. - This License constitutes the entire agreement between the parties with respect to the Work licensed here. There are no understandings, agreements or representations with respect to the Work not specified here. Licensor shall not be bound by any additional provisions that may appear in any communication from You. This License may not be modified without the mutual written agreement of the Licensor and You. diff --git a/plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt b/plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt deleted file mode 100644 index c0be50a505ec1..0000000000000 --- a/plugins/repository-gcs/licenses/old/httpcore-NOTICE.txt +++ /dev/null @@ -1,8 +0,0 @@ -Apache HttpComponents Core -Copyright 2005-2014 The Apache Software Foundation - -This product includes software developed at -The Apache Software Foundation (http://www.apache.org/). - -This project contains annotations derived from JCIP-ANNOTATIONS -Copyright (c) 2005 Brian Goetz and Tim Peierls. See http://www.jcip.net diff --git a/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 deleted file mode 100644 index 61d8e3b148144..0000000000000 --- a/plugins/repository-gcs/licenses/opencensus-api-0.11.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -54689fbf750a7f26e34fa1f1f96b883c53f51486 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/opencensus-api-0.15.0.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-api-0.15.0.jar.sha1 new file mode 100644 index 0000000000000..e200e2e24a7df --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-api-0.15.0.jar.sha1 @@ -0,0 +1 @@ +9a098392b287d7924660837f4eba0ce252013683 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 deleted file mode 100644 index c0b04f0f8ccce..0000000000000 --- a/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.11.1.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -82e572b41e81ecf58d0d1e9a3953a05aa8f9c84b \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.15.0.jar.sha1 b/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.15.0.jar.sha1 new file mode 100644 index 0000000000000..b642e1ebebd59 --- /dev/null +++ b/plugins/repository-gcs/licenses/opencensus-contrib-http-util-0.15.0.jar.sha1 @@ -0,0 +1 @@ +d88690591669d9b5ba6d91d9eac7736e58ccf3da \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt b/plugins/repository-gcs/licenses/proto-google-LICENSE.txt similarity index 100% rename from plugins/repository-gcs/licenses/proto-google-common-protos-LICENSE.txt rename to plugins/repository-gcs/licenses/proto-google-LICENSE.txt diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt b/plugins/repository-gcs/licenses/proto-google-NOTICE.txt similarity index 100% rename from plugins/repository-gcs/licenses/proto-google-common-protos-NOTICE.txt rename to plugins/repository-gcs/licenses/proto-google-NOTICE.txt diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-1.12.0.jar.sha1 b/plugins/repository-gcs/licenses/proto-google-common-protos-1.12.0.jar.sha1 new file mode 100644 index 0000000000000..47f3c178a68c6 --- /dev/null +++ b/plugins/repository-gcs/licenses/proto-google-common-protos-1.12.0.jar.sha1 @@ -0,0 +1 @@ +1140cc74df039deb044ed0e320035e674dc13062 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 b/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 deleted file mode 100644 index 0a2dee4447e92..0000000000000 --- a/plugins/repository-gcs/licenses/proto-google-common-protos-1.8.0.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -b3282312ba82536fc9a7778cabfde149a875e877 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/proto-google-iam-v1-0.12.0.jar.sha1 b/plugins/repository-gcs/licenses/proto-google-iam-v1-0.12.0.jar.sha1 new file mode 100644 index 0000000000000..2bfae3456d499 --- /dev/null +++ b/plugins/repository-gcs/licenses/proto-google-iam-v1-0.12.0.jar.sha1 @@ -0,0 +1 @@ +ea312c0250a5d0a7cdd1b20bc2c3259938b79855 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/protobuf-LICENSE.txt b/plugins/repository-gcs/licenses/protobuf-LICENSE.txt new file mode 100644 index 0000000000000..19b305b00060a --- /dev/null +++ b/plugins/repository-gcs/licenses/protobuf-LICENSE.txt @@ -0,0 +1,32 @@ +Copyright 2008 Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. diff --git a/plugins/repository-gcs/licenses/protobuf-NOTICE.txt b/plugins/repository-gcs/licenses/protobuf-NOTICE.txt new file mode 100644 index 0000000000000..19b305b00060a --- /dev/null +++ b/plugins/repository-gcs/licenses/protobuf-NOTICE.txt @@ -0,0 +1,32 @@ +Copyright 2008 Google Inc. All rights reserved. + +Redistribution and use in source and binary forms, with or without +modification, are permitted provided that the following conditions are +met: + + * Redistributions of source code must retain the above copyright +notice, this list of conditions and the following disclaimer. + * Redistributions in binary form must reproduce the above +copyright notice, this list of conditions and the following disclaimer +in the documentation and/or other materials provided with the +distribution. + * Neither the name of Google Inc. nor the names of its +contributors may be used to endorse or promote products derived from +this software without specific prior written permission. + +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +Code generated by the Protocol Buffer compiler is owned by the owner +of the input file used when generating it. This code is not +standalone and requires a support library to be linked with it. This +support library is itself covered by the above license. diff --git a/plugins/repository-gcs/licenses/protobuf-java-3.6.0.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-3.6.0.jar.sha1 new file mode 100644 index 0000000000000..050ebd44c9282 --- /dev/null +++ b/plugins/repository-gcs/licenses/protobuf-java-3.6.0.jar.sha1 @@ -0,0 +1 @@ +5333f7e422744d76840c08a106e28e519fbe3acd \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/protobuf-java-util-3.6.0.jar.sha1 b/plugins/repository-gcs/licenses/protobuf-java-util-3.6.0.jar.sha1 new file mode 100644 index 0000000000000..cc85974499a65 --- /dev/null +++ b/plugins/repository-gcs/licenses/protobuf-java-util-3.6.0.jar.sha1 @@ -0,0 +1 @@ +3680d0042d4fe0b95ada844ff24da0698a7f0773 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/threetenbp-1.3.3.jar.sha1 b/plugins/repository-gcs/licenses/threetenbp-1.3.3.jar.sha1 new file mode 100644 index 0000000000000..9273043e14520 --- /dev/null +++ b/plugins/repository-gcs/licenses/threetenbp-1.3.3.jar.sha1 @@ -0,0 +1 @@ +3ea31c96676ff12ab56be0b1af6fff61d1a4f1f2 \ No newline at end of file diff --git a/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 b/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 deleted file mode 100644 index 65c16fed4a07b..0000000000000 --- a/plugins/repository-gcs/licenses/threetenbp-1.3.6.jar.sha1 +++ /dev/null @@ -1 +0,0 @@ -89dcc04a7e028c3c963413a71f950703cf51f057 \ No newline at end of file From 66e458b78b547d68ed74c4ddae4924b4fdb0d0b2 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Fri, 24 Aug 2018 12:36:23 +0300 Subject: [PATCH 3/6] Muted testEmptyAuthorizedIndicesSearchForAllDisallowNoIndices --- .../org/elasticsearch/xpack/security/authz/ReadActionsTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java index 76568d3d48b5a..a88dafece3251 100644 --- a/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java +++ b/x-pack/plugin/security/src/test/java/org/elasticsearch/xpack/security/authz/ReadActionsTests.java @@ -102,6 +102,7 @@ public void testEmptyAuthorizedIndicesSearchForAll() { assertNoSearchHits(client().prepareSearch().get()); } + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33123") public void testEmptyAuthorizedIndicesSearchForAllDisallowNoIndices() { createIndicesWithRandomAliases("index1", "index2"); IndexNotFoundException e = expectThrows(IndexNotFoundException.class, () -> client().prepareSearch() From 879a90b99922f29c8764ce5de7d4349647cd56b7 Mon Sep 17 00:00:00 2001 From: Tanguy Leroux Date: Fri, 24 Aug 2018 11:57:46 +0200 Subject: [PATCH 4/6] [Rollup] Move getMetadata() methods out of rollup config objects (#32579) This committ removes the getMetadata() methods from the DateHistoGroupConfig and HistoGroupConfig objects. This way the configuration objects do not rely on RollupField.formatMetaField() anymore and do not expose a getMetadata() method that is tighlty coupled to the rollup indexer. --- .../rollup/job/DateHistogramGroupConfig.java | 4 -- .../core/rollup/job/HistogramGroupConfig.java | 5 +- .../xpack/rollup/job/RollupIndexer.java | 21 ++++++-- .../xpack/rollup/job/RollupIndexerTests.java | 49 +++++++++++++++++++ 4 files changed, 70 insertions(+), 9 deletions(-) create mode 100644 x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerTests.java diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java index 77dfa1cbbb1c3..281277043c829 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/DateHistogramGroupConfig.java @@ -211,10 +211,6 @@ public Map toAggCap() { return map; } - public Map getMetadata() { - return Collections.singletonMap(RollupField.formatMetaField(RollupField.INTERVAL), interval.toString()); - } - public void validateMappings(Map> fieldCapsResponse, ActionRequestValidationException validationException) { diff --git a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java index 0480050bf52f0..1e1f88a7c20e1 100644 --- a/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java +++ b/x-pack/plugin/core/src/main/java/org/elasticsearch/xpack/core/rollup/job/HistogramGroupConfig.java @@ -28,6 +28,7 @@ import java.util.List; import java.util.Map; import java.util.Objects; +import java.util.Set; import java.util.stream.Collectors; import static org.elasticsearch.common.xcontent.ConstructingObjectParser.constructorArg; @@ -115,8 +116,8 @@ public Map toAggCap() { return map; } - public Map getMetadata() { - return Collections.singletonMap(RollupField.formatMetaField(RollupField.INTERVAL), interval); + public Set getAllFields() { + return Arrays.stream(fields).collect(Collectors.toSet()); } public void validateMappings(Map> fieldCapsResponse, diff --git a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java index 87294706b3b7d..d1db021361c8c 100644 --- a/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java +++ b/x-pack/plugin/rollup/src/main/java/org/elasticsearch/xpack/rollup/job/RollupIndexer.java @@ -23,6 +23,7 @@ import org.elasticsearch.xpack.core.rollup.RollupField; import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.GroupConfig; +import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig; import org.elasticsearch.xpack.core.rollup.job.IndexerState; import org.elasticsearch.xpack.core.rollup.job.RollupJob; import org.elasticsearch.xpack.core.rollup.job.RollupJobConfig; @@ -392,15 +393,12 @@ private SearchRequest buildSearchRequest() { private CompositeAggregationBuilder createCompositeBuilder(RollupJobConfig config) { final GroupConfig groupConfig = config.getGroupConfig(); List> builders = new ArrayList<>(); - Map metadata = new HashMap<>(); // Add all the agg builders to our request in order: date_histo -> histo -> terms if (groupConfig != null) { builders.addAll(groupConfig.getDateHistogram().toBuilders()); - metadata.putAll(groupConfig.getDateHistogram().getMetadata()); if (groupConfig.getHistogram() != null) { builders.addAll(groupConfig.getHistogram().toBuilders()); - metadata.putAll(groupConfig.getHistogram().getMetadata()); } if (groupConfig.getTerms() != null) { builders.addAll(groupConfig.getTerms().toBuilders()); @@ -409,6 +407,8 @@ private CompositeAggregationBuilder createCompositeBuilder(RollupJobConfig confi CompositeAggregationBuilder composite = new CompositeAggregationBuilder(AGGREGATION_NAME, builders); config.getMetricsConfig().forEach(m -> m.toBuilders().forEach(composite::subAggregation)); + + final Map metadata = createMetadata(groupConfig); if (metadata.isEmpty() == false) { composite.setMetaData(metadata); } @@ -441,5 +441,20 @@ private QueryBuilder createBoundaryQuery(Map position) { .format("epoch_millis"); return query; } + + static Map createMetadata(final GroupConfig groupConfig) { + final Map metadata = new HashMap<>(); + if (groupConfig != null) { + // Add all the metadata in order: date_histo -> histo + final DateHistogramGroupConfig dateHistogram = groupConfig.getDateHistogram(); + metadata.put(RollupField.formatMetaField(RollupField.INTERVAL), dateHistogram.getInterval().toString()); + + final HistogramGroupConfig histogram = groupConfig.getHistogram(); + if (histogram != null) { + metadata.put(RollupField.formatMetaField(RollupField.INTERVAL), histogram.getInterval()); + } + } + return metadata; + } } diff --git a/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerTests.java b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerTests.java new file mode 100644 index 0000000000000..5ab85e2ffa743 --- /dev/null +++ b/x-pack/plugin/rollup/src/test/java/org/elasticsearch/xpack/rollup/job/RollupIndexerTests.java @@ -0,0 +1,49 @@ +/* + * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one + * or more contributor license agreements. Licensed under the Elastic License; + * you may not use this file except in compliance with the Elastic License. + */ +package org.elasticsearch.xpack.rollup.job; + +import org.elasticsearch.test.ESTestCase; +import org.elasticsearch.xpack.core.rollup.ConfigTestHelpers; +import org.elasticsearch.xpack.core.rollup.job.DateHistogramGroupConfig; +import org.elasticsearch.xpack.core.rollup.job.GroupConfig; +import org.elasticsearch.xpack.core.rollup.job.HistogramGroupConfig; + +import java.util.Map; + +import static org.hamcrest.Matchers.equalTo; + +public class RollupIndexerTests extends ESTestCase { + + public void testCreateMetadataNoGroupConfig() { + final Map metadata = RollupIndexer.createMetadata(null); + assertNotNull(metadata); + assertTrue(metadata.isEmpty()); + } + + public void testCreateMetadataWithDateHistogramGroupConfigOnly() { + final DateHistogramGroupConfig dateHistogram = ConfigTestHelpers.randomDateHistogramGroupConfig(random()); + final GroupConfig groupConfig = new GroupConfig(dateHistogram); + + final Map metadata = RollupIndexer.createMetadata(groupConfig); + assertEquals(1, metadata.size()); + assertTrue(metadata.containsKey("_rollup.interval")); + Object value = metadata.get("_rollup.interval"); + assertThat(value, equalTo(dateHistogram.getInterval().toString())); + } + + public void testCreateMetadata() { + final DateHistogramGroupConfig dateHistogram = ConfigTestHelpers.randomDateHistogramGroupConfig(random()); + final HistogramGroupConfig histogram = ConfigTestHelpers.randomHistogramGroupConfig(random()); + final GroupConfig groupConfig = new GroupConfig(dateHistogram, histogram, null); + + final Map metadata = RollupIndexer.createMetadata(groupConfig); + assertEquals(1, metadata.size()); + assertTrue(metadata.containsKey("_rollup.interval")); + Object value = metadata.get("_rollup.interval"); + assertThat(value, equalTo(histogram.getInterval())); + } +} + From 1d8745036f19786f62808cc78a5dedbf2cfcda21 Mon Sep 17 00:00:00 2001 From: Andrei Stefan Date: Fri, 24 Aug 2018 13:14:03 +0300 Subject: [PATCH 5/6] Muted testListenersThrowingExceptionsDoNotCauseOtherListenersToBeSkipped --- .../elasticsearch/xpack/core/scheduler/SchedulerEngineTests.java | 1 + 1 file changed, 1 insertion(+) diff --git a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngineTests.java b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngineTests.java index 869a320fb6386..0f98acefe5b7c 100644 --- a/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngineTests.java +++ b/x-pack/plugin/core/src/test/java/org/elasticsearch/xpack/core/scheduler/SchedulerEngineTests.java @@ -31,6 +31,7 @@ public class SchedulerEngineTests extends ESTestCase { + @AwaitsFix(bugUrl="https://github.com/elastic/elasticsearch/issues/33124") public void testListenersThrowingExceptionsDoNotCauseOtherListenersToBeSkipped() throws InterruptedException { final Logger mockLogger = mock(Logger.class); final SchedulerEngine engine = new SchedulerEngine(Settings.EMPTY, Clock.systemUTC(), mockLogger); From 619e0b28b97b8ea7cddf6b29aaa1a2756da9b363 Mon Sep 17 00:00:00 2001 From: Jason Tedor Date: Fri, 24 Aug 2018 06:53:44 -0400 Subject: [PATCH 6/6] Add hook to skip asserting x-content equivalence (#33114) This commit adds a hook to AbstractSerializingTestCase to enable skipping asserting that the x-content of the test instance and an instance parsed from the x-content of the test instance are the same. While we usually expect these to be the same, they will not be the same when exceptions are involved because the x-content there is lossy. --- .../test/AbstractSerializingTestCase.java | 26 ++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java index 6ec32f6654fff..5aeb30bfdbd5d 100644 --- a/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java +++ b/test/framework/src/main/java/org/elasticsearch/test/AbstractSerializingTestCase.java @@ -16,6 +16,7 @@ * specific language governing permissions and limitations * under the License. */ + package org.elasticsearch.test; import org.elasticsearch.common.Strings; @@ -34,9 +35,17 @@ public abstract class AbstractSerializingTestCase