diff --git a/build.gradle b/build.gradle index 63779a569..22678d38d 100644 --- a/build.gradle +++ b/build.gradle @@ -59,7 +59,7 @@ buildscript { classpath "org.opensearch.gradle:build-tools:${opensearch_version}" classpath "org.jetbrains.kotlin:kotlin-gradle-plugin:${kotlin_version}" classpath "org.jetbrains.kotlin:kotlin-allopen:${kotlin_version}" - classpath "io.gitlab.arturbosch.detekt:detekt-gradle-plugin:1.17.1" + classpath "io.gitlab.arturbosch.detekt:detekt-gradle-plugin:1.21.0" classpath "org.jacoco:org.jacoco.agent:0.8.7" } } @@ -95,7 +95,7 @@ configurations.all { force 'org.apache.httpcomponents.client5:httpclient5:5.0.3' force 'org.apache.httpcomponents.client5:httpclient5-osgi:5.0.3' force 'com.fasterxml.jackson.core:jackson-databind:2.10.4' - force 'org.yaml:snakeyaml:1.26' + force 'org.yaml:snakeyaml:1.31' force 'org.codehaus.plexus:plexus-utils:3.0.24' } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementPlugin.kt b/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementPlugin.kt index 116a5b055..4cd69b92e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementPlugin.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/IndexManagementPlugin.kt @@ -73,7 +73,7 @@ import org.opensearch.indexmanagement.indexstatemanagement.transport.action.retr import org.opensearch.indexmanagement.indexstatemanagement.transport.action.updateindexmetadata.TransportUpdateManagedIndexMetaDataAction import org.opensearch.indexmanagement.indexstatemanagement.transport.action.updateindexmetadata.UpdateManagedIndexMetaDataAction import org.opensearch.indexmanagement.indexstatemanagement.util.DEFAULT_INDEX_TYPE -import org.opensearch.indexmanagement.migration.ISMTemplateService +import org.opensearch.indexmanagement.indexstatemanagement.migration.ISMTemplateService import org.opensearch.indexmanagement.refreshanalyzer.RefreshSearchAnalyzerAction import org.opensearch.indexmanagement.refreshanalyzer.RestRefreshSearchAnalyzerAction import org.opensearch.indexmanagement.refreshanalyzer.TransportRefreshSearchAnalyzerAction @@ -280,7 +280,7 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin indexManagementExtensions.forEach { extension -> val extensionName = extension.getExtensionName() if (extensionName in extensions) { - throw IllegalStateException("Multiple extensions of IndexManagement have same name $extensionName - not supported") + error("Multiple extensions of IndexManagement have same name $extensionName - not supported") } extension.getISMActionParsers().forEach { parser -> ISMActionsParser.instance.addParser(parser, extensionName) @@ -288,7 +288,7 @@ class IndexManagementPlugin : JobSchedulerExtension, NetworkPlugin, ActionPlugin indexMetadataServices.add(extension.getIndexMetadataService()) extension.overrideClusterStateIndexUuidSetting()?.let { if (customIndexUUIDSetting != null) { - throw IllegalStateException( + error( "Multiple extensions of IndexManagement plugin overriding ClusterStateIndexUUIDSetting - not supported" ) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/Channel.kt b/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/Channel.kt index 8b4c1cec4..f26c8f0d8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/Channel.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/common/model/notification/Channel.kt @@ -61,7 +61,7 @@ data class Channel(val id: String) : ToXContent, Writeable { when (fieldName) { ID -> id = xcp.text() else -> { - throw IllegalStateException("Unexpected field: $fieldName, while parsing Channel destination") + error("Unexpected field: $fieldName, while parsing Channel destination") } } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexCoordinator.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexCoordinator.kt index 9cdf89b5f..26d1ac590 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexCoordinator.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/ManagedIndexCoordinator.kt @@ -73,7 +73,7 @@ import org.opensearch.indexmanagement.indexstatemanagement.util.isFailed import org.opensearch.indexmanagement.indexstatemanagement.util.isPolicyCompleted import org.opensearch.indexmanagement.indexstatemanagement.util.managedIndexConfigIndexRequest import org.opensearch.indexmanagement.indexstatemanagement.util.updateEnableManagedIndexRequest -import org.opensearch.indexmanagement.migration.ISMTemplateService +import org.opensearch.indexmanagement.indexstatemanagement.migration.ISMTemplateService import org.opensearch.indexmanagement.opensearchapi.IndexManagementSecurityContext import org.opensearch.indexmanagement.opensearchapi.contentParser import org.opensearch.indexmanagement.opensearchapi.parseFromSearchResponse diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/migration/MigrationServices.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/migration/MigrationServices.kt index 43845844f..154ce65d5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/migration/MigrationServices.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/migration/MigrationServices.kt @@ -3,7 +3,7 @@ * SPDX-License-Identifier: Apache-2.0 */ -package org.opensearch.indexmanagement.migration +package org.opensearch.indexmanagement.indexstatemanagement.migration import kotlinx.coroutines.Dispatchers import kotlinx.coroutines.withContext diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Chime.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Chime.kt index c7e5102ce..1b1f4a3e2 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Chime.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Chime.kt @@ -15,7 +15,6 @@ import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParser.Token import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken import java.io.IOException -import java.lang.IllegalStateException /** * A value object that represents a Chime message. Chime message will be @@ -58,7 +57,7 @@ data class Chime(val url: String) : ToXContent, Writeable { when (fieldName) { URL -> url = xcp.text() else -> { - throw IllegalStateException("Unexpected field: $fieldName, while parsing Chime destination") + error("Unexpected field: $fieldName, while parsing Chime destination") } } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/CustomWebhook.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/CustomWebhook.kt index 288757c7c..ce008b865 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/CustomWebhook.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/CustomWebhook.kt @@ -15,7 +15,6 @@ import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParser.Token import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken import java.io.IOException -import java.lang.IllegalStateException /** * A value object that represents a Custom webhook message. Webhook message will be @@ -121,7 +120,7 @@ data class CustomWebhook( USERNAME_FIELD -> username = xcp.textOrNull() PASSWORD_FIELD -> password = xcp.textOrNull() else -> { - throw IllegalStateException("Unexpected field: $fieldName, while parsing custom webhook destination") + error("Unexpected field: $fieldName, while parsing custom webhook destination") } } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Slack.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Slack.kt index a3db82c97..81c338e64 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Slack.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/model/destination/Slack.kt @@ -15,7 +15,6 @@ import org.opensearch.common.xcontent.XContentParser import org.opensearch.common.xcontent.XContentParser.Token import org.opensearch.common.xcontent.XContentParserUtils.ensureExpectedToken import java.io.IOException -import java.lang.IllegalStateException /** * A value object that represents a Slack message. Slack message will be @@ -58,7 +57,7 @@ data class Slack(val url: String) : ToXContent, Writeable { when (fieldName) { URL -> url = xcp.text() else -> { - throw IllegalStateException("Unexpected field: $fieldName, while parsing Slack destination") + error("Unexpected field: $fieldName, while parsing Slack destination") } } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptMoveShardsStep.kt b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptMoveShardsStep.kt index 8d2614226..6d1764e4c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptMoveShardsStep.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/indexstatemanagement/step/shrink/AttemptMoveShardsStep.kt @@ -135,8 +135,8 @@ class AttemptMoveShardsStep(private val action: ShrinkAction) : ShrinkStep(name, private fun getOriginalSettings(indexName: String, clusterService: ClusterService): Map { val indexSettings = clusterService.state().metadata.index(indexName).settings val originalSettings = mutableMapOf() - indexSettings.get(ROUTING_SETTING)?.let { it -> originalSettings.put(ROUTING_SETTING, it) } - indexSettings.get(SETTING_BLOCKS_WRITE)?.let { it -> originalSettings.put(SETTING_BLOCKS_WRITE, it) } + indexSettings.get(ROUTING_SETTING)?.let { originalSettings.put(ROUTING_SETTING, it) } + indexSettings.get(SETTING_BLOCKS_WRITE)?.let { originalSettings.put(SETTING_BLOCKS_WRITE, it) } return originalSettings } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupIndexer.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupIndexer.kt index 5e4a7a9ee..6bc35fb0d 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupIndexer.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupIndexer.kt @@ -120,7 +120,7 @@ class RollupIndexer( is InternalMin -> aggResults[it.name] = it.value is InternalValueCount -> aggResults[it.name] = it.value is InternalAvg -> aggResults[it.name] = it.value - else -> throw IllegalStateException("Found aggregation in composite result that is not supported [${it.type} - ${it.name}]") + else -> error("Found aggregation in composite result that is not supported [${it.type} - ${it.name}]") } } mapOfKeyValues.putAll(aggResults) diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupRunner.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupRunner.kt index 016d2a27e..9dba61b5c 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupRunner.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/RollupRunner.kt @@ -293,7 +293,7 @@ object RollupRunner : ) { client.suspendUntil { listener: ActionListener -> execute(GetRollupAction.INSTANCE, GetRollupRequest(updatableJob.id, null, "_local"), listener) - }.rollup ?: throw IllegalStateException("Unable to get rollup job") + }.rollup ?: error("Unable to get rollup job") } } is RollupResult.Failure -> { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtils.kt b/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtils.kt index 12e090df7..2bbfd6092 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtils.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/rollup/util/RollupUtils.kt @@ -143,7 +143,7 @@ inline fun Rollup.findMatchingMetricField(field: String): String { } } } - throw IllegalStateException("Did not find matching rollup metric") + error("Did not find matching rollup metric") } @Suppress("NestedBlockDepth", "ComplexMethod") diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunner.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunner.kt index 3e36ed28d..272f3307d 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunner.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/SMRunner.kt @@ -62,8 +62,11 @@ object SMRunner : return this } + private const val MAX_NUMBER_OF_RETRIES = 3 + private const val EXPONENTIAL_BACKOFF_MILLIS = 1000L + private val backoffPolicy: BackoffPolicy = BackoffPolicy.exponentialBackoff( - TimeValue.timeValueMillis(1000L), 3 + TimeValue.timeValueMillis(EXPONENTIAL_BACKOFF_MILLIS), MAX_NUMBER_OF_RETRIES ) override fun runJob(job: ScheduledJobParameter, context: JobExecutionContext) { diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachine.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachine.kt index ae17e25ce..4556030e5 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachine.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/SMStateMachine.kt @@ -166,7 +166,7 @@ class SMStateMachine( val retryCount: Int if (retry == null) { log.warn("Starting to retry state [$currentState], remaining count 3.") - metadataBuilder.setRetry(3) // TODO SM 3 retry count could be customizable + metadataBuilder.setRetry(MAX_NUMBER_OF_RETRIES) // TODO SM 3 retry count could be customizable } else { retryCount = retry.count - 1 if (retryCount > 0) { @@ -218,7 +218,8 @@ class SMStateMachine( // TODO SM save a copy to history } - private val updateMetaDataRetryPolicy = BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(250), 3) + + private val updateMetaDataRetryPolicy = BackoffPolicy.exponentialBackoff(TimeValue.timeValueMillis(EXPONENTIAL_BACKOFF_MILLIS), MAX_NUMBER_OF_RETRIES) /** * Handle the policy change before job running @@ -241,4 +242,9 @@ class SMStateMachine( } return this } + + companion object { + private const val MAX_NUMBER_OF_RETRIES = 3 + private const val EXPONENTIAL_BACKOFF_MILLIS = 250L + } } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingState.kt index d2aff8500..bc4b296bd 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/creation/CreatingState.kt @@ -44,7 +44,7 @@ object CreatingState : State { if (snapshotName == null) { val getSnapshotsResult = client.getSnapshots( job, job.policyName + "*", metadataBuilder, - log, null, getSnapshotsErrorMessage(), + log, null, SNAPSHOT_ERROR_MESSAGE, ) metadataBuilder = getSnapshotsResult.metadataBuilder if (getSnapshotsResult.failed) { @@ -86,10 +86,10 @@ object CreatingState : State { private fun handleException(ex: Exception, snapshotName: String, metadataBuilder: SMMetadata.Builder, log: Logger): SMResult { if (ex is ConcurrentSnapshotExecutionException) { - log.error(getConcurrentSnapshotMessage(), ex) + log.error(CONCURRENT_SNAPSHOT_MESSAGE, ex) metadataBuilder.setLatestExecution( status = SMMetadata.LatestExecution.Status.RETRYING, - message = getConcurrentSnapshotMessage(), + message = CONCURRENT_SNAPSHOT_MESSAGE, ) return SMResult.Stay(metadataBuilder) } @@ -102,10 +102,10 @@ object CreatingState : State { return SMResult.Fail(metadataBuilder, WorkflowType.CREATION) } - fun getConcurrentSnapshotMessage() = "Concurrent snapshot exception happened, retrying..." + const val CONCURRENT_SNAPSHOT_MESSAGE = "Concurrent snapshot exception happened, retrying..." private fun getSnapshotCreationStartedMessage(snapshotName: String) = "Snapshot $snapshotName creation has been started and waiting for completion." - private fun getSnapshotsErrorMessage() = + private const val SNAPSHOT_ERROR_MESSAGE = "Caught exception while getting snapshots to decide if snapshot has been created in previous execution period." private fun getCreateSnapshotErrorMessage(snapshotName: String) = "Caught exception while creating snapshot $snapshotName." diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingState.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingState.kt index 2cffb7b5d..0aa9cfdd8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingState.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/engine/states/deletion/DeletingState.kt @@ -86,10 +86,10 @@ object DeletingState : State { private fun handleException(ex: Exception, snapshotsToDelete: List, metadataBuilder: SMMetadata.Builder, log: Logger): SMResult { if (ex is ConcurrentSnapshotExecutionException) { - log.error(CreatingState.getConcurrentSnapshotMessage(), ex) + log.error(CreatingState.CONCURRENT_SNAPSHOT_MESSAGE, ex) metadataBuilder.setLatestExecution( status = SMMetadata.LatestExecution.Status.RETRYING, - message = CreatingState.getConcurrentSnapshotMessage(), + message = CreatingState.CONCURRENT_SNAPSHOT_MESSAGE, ) return SMResult.Stay(metadataBuilder) } diff --git a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMPolicy.kt b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMPolicy.kt index 60ccac14b..dbc678ef8 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMPolicy.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/snapshotmanagement/model/SMPolicy.kt @@ -55,9 +55,15 @@ data class SMPolicy( ) : ScheduledJobParameter, Writeable { init { - require(snapshotConfig["repository"] != null && snapshotConfig["repository"] != "") { "Must provide the repository in snapshot config." } - require(creation.schedule.getNextExecutionTime(now()) != null) { "Next execution time from the creation schedule is null, please provide a valid cron expression." } - require(deletion == null || (deletion.schedule.getNextExecutionTime(now()) != null)) { "Next execution time from the deletion schedule is null, please provide a valid cron expression." } + require(snapshotConfig["repository"] != null && snapshotConfig["repository"] != "") { + "Must provide the repository in snapshot config." + } + require(creation.schedule.getNextExecutionTime(now()) != null) { + "Next execution time from the creation schedule is null, please provide a valid cron expression." + } + require(deletion == null || (deletion.schedule.getNextExecutionTime(now()) != null)) { + "Next execution time from the deletion schedule is null, please provide a valid cron expression." + } } // This name is used by the job scheduler and needs to match the id to avoid namespace conflicts with ISM policies sharing the same name diff --git a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformValidator.kt b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformValidator.kt index bc6c63234..a7ee2586e 100644 --- a/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformValidator.kt +++ b/src/main/kotlin/org/opensearch/indexmanagement/transform/TransformValidator.kt @@ -24,7 +24,6 @@ import org.opensearch.indexmanagement.transform.model.TransformValidationResult import org.opensearch.indexmanagement.transform.settings.TransformSettings import org.opensearch.monitor.jvm.JvmService import org.opensearch.transport.RemoteTransportException -import java.lang.IllegalStateException @Suppress("SpreadOperator", "ReturnCount", "ThrowsCount") class TransformValidator( @@ -93,9 +92,7 @@ class TransformValidator( private suspend fun validateIndex(index: String, transform: Transform): List { val request = GetMappingsRequest().indices(index) val result: GetMappingsResponse = - client.admin().indices().suspendUntil { getMappings(request, it) } ?: throw IllegalStateException( - "GetMappingResponse for [$index] was null" - ) + client.admin().indices().suspendUntil { getMappings(request, it) } ?: error("GetMappingResponse for [$index] was null") return validateMappingsResponse(index, result, transform) } diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionIT.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionIT.kt index 99149cdd9..7d6d77935 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionIT.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/action/RolloverActionIT.kt @@ -237,11 +237,11 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { // assuming our ingestion is randomly split between the 20 primary shards // then 250kb/20 gives around 12.5kb per primary shard which is below our 100kb condition - val KB_250 = 250_000 + val kb250 = 250_000 var primaryStoreSizeBytes = 0 var count = 0 // Ingest data into the test index until the total size of the index is greater than our min primary size condition - while (primaryStoreSizeBytes < KB_250) { + while (primaryStoreSizeBytes < kb250) { // this count should never get as high as 10... if it does just fail the test if (count++ > 10) fail("Something is wrong with the data ingestion for testing rollover condition") insertSampleData(index = firstIndex, docCount = 20, jsonString = "{ \"test_field\": \"${OpenSearchTestCase.randomAlphaOfLength(7000)}\" }", delay = 0) @@ -271,12 +271,12 @@ class RolloverActionIT : IndexStateManagementRestTestCase() { assertThat("Did not have min size current", minPrimarySize["current"], isA(String::class.java)) } - val KB_150 = 150_000 + val kb150 = 150_000 var primaryShardSizeBytes = 0 count = 0 // Ingest data into the test index using custom routing so it always goes to a single shard until the size of the // primary shard is over 150kb - while (primaryShardSizeBytes < KB_150) { + while (primaryShardSizeBytes < kb150) { // this count should never get as high as 10... if it does just fail the test if (count++ > 10) fail("Something is wrong with the data ingestion for testing rollover condition") insertSampleData(index = firstIndex, docCount = 20, delay = 0, jsonString = "{ \"test_field\": \"${OpenSearchTestCase.randomAlphaOfLength(7000)}\" }", routing = "custom_routing") diff --git a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorTests.kt index 5bd6b8da6..d04e7b05f 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/indexstatemanagement/coordinator/ManagedIndexCoordinatorTests.kt @@ -21,7 +21,7 @@ import org.opensearch.indexmanagement.indexstatemanagement.IndexMetadataProvider import org.opensearch.indexmanagement.indexstatemanagement.ManagedIndexCoordinator import org.opensearch.indexmanagement.indexstatemanagement.MetadataService import org.opensearch.indexmanagement.indexstatemanagement.settings.ManagedIndexSettings -import org.opensearch.indexmanagement.migration.ISMTemplateService +import org.opensearch.indexmanagement.indexstatemanagement.migration.ISMTemplateService import org.opensearch.test.ClusterServiceUtils import org.opensearch.test.OpenSearchTestCase import org.opensearch.threadpool.Scheduler diff --git a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponseTests.kt b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponseTests.kt index 3df655682..079cf6cd2 100644 --- a/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponseTests.kt +++ b/src/test/kotlin/org/opensearch/indexmanagement/refreshanalyzer/RefreshSearchAnalyzerResponseTests.kt @@ -22,34 +22,34 @@ class RefreshSearchAnalyzerResponseTests : OpenSearchTestCase() { val i2s0 = ShardId(index2, "xyz", 0) val i2s1 = ShardId(index2, "xyz", 1) - val response_i1s0 = RefreshSearchAnalyzerShardResponse(i1s0, listOf(syn1, syn2)) - val response_i1s1 = RefreshSearchAnalyzerShardResponse(i1s1, listOf(syn1, syn2)) - val response_i2s0 = RefreshSearchAnalyzerShardResponse(i2s0, listOf(syn1)) - val response_i2s1 = RefreshSearchAnalyzerShardResponse(i2s1, listOf(syn1)) - val failure_i1s0 = DefaultShardOperationFailedException(index1, 0, Throwable("dummyCause")) - val failure_i1s1 = DefaultShardOperationFailedException(index1, 1, Throwable("dummyCause")) - val failure_i2s0 = DefaultShardOperationFailedException(index2, 0, Throwable("dummyCause")) - val failure_i2s1 = DefaultShardOperationFailedException(index2, 1, Throwable("dummyCause")) + val responseI1s0 = RefreshSearchAnalyzerShardResponse(i1s0, listOf(syn1, syn2)) + val responseI1s1 = RefreshSearchAnalyzerShardResponse(i1s1, listOf(syn1, syn2)) + val responseI2s0 = RefreshSearchAnalyzerShardResponse(i2s0, listOf(syn1)) + val responseI2s1 = RefreshSearchAnalyzerShardResponse(i2s1, listOf(syn1)) + val failureI1s0 = DefaultShardOperationFailedException(index1, 0, Throwable("dummyCause")) + val failureI1s1 = DefaultShardOperationFailedException(index1, 1, Throwable("dummyCause")) + val failureI2s0 = DefaultShardOperationFailedException(index2, 0, Throwable("dummyCause")) + val failureI2s1 = DefaultShardOperationFailedException(index2, 1, Throwable("dummyCause")) // Case 1: All shards successful - var aggregate_response = listOf(response_i1s0, response_i1s1, response_i2s0, response_i2s1) - var aggregate_failures = listOf() - var refreshSearchAnalyzerResponse = RefreshSearchAnalyzerResponse(4, 4, 0, aggregate_failures, aggregate_response) + var aggregateResponse = listOf(responseI1s0, responseI1s1, responseI2s0, responseI2s1) + var aggregateFailures = listOf() + var refreshSearchAnalyzerResponse = RefreshSearchAnalyzerResponse(4, 4, 0, aggregateFailures, aggregateResponse) var successfulIndices = refreshSearchAnalyzerResponse.getSuccessfulRefreshDetails() Assert.assertTrue(successfulIndices.containsKey(index1)) Assert.assertTrue(successfulIndices.containsKey(index2)) // Case 2: All shards failed - aggregate_response = listOf() - aggregate_failures = listOf(failure_i1s0, failure_i1s1, failure_i2s0, failure_i2s1) - refreshSearchAnalyzerResponse = RefreshSearchAnalyzerResponse(4, 0, 4, aggregate_failures, aggregate_response) + aggregateResponse = listOf() + aggregateFailures = listOf(failureI1s0, failureI1s1, failureI2s0, failureI2s1) + refreshSearchAnalyzerResponse = RefreshSearchAnalyzerResponse(4, 0, 4, aggregateFailures, aggregateResponse) successfulIndices = refreshSearchAnalyzerResponse.getSuccessfulRefreshDetails() Assert.assertTrue(successfulIndices.isEmpty()) // Case 3: Some shards of an index fail, while some others succeed - aggregate_response = listOf(response_i1s1, response_i2s0, response_i2s1) - aggregate_failures = listOf(failure_i1s0) - refreshSearchAnalyzerResponse = RefreshSearchAnalyzerResponse(4, 3, 1, aggregate_failures, aggregate_response) + aggregateResponse = listOf(responseI1s1, responseI2s0, responseI2s1) + aggregateFailures = listOf(failureI1s0) + refreshSearchAnalyzerResponse = RefreshSearchAnalyzerResponse(4, 3, 1, aggregateFailures, aggregateResponse) successfulIndices = refreshSearchAnalyzerResponse.getSuccessfulRefreshDetails() Assert.assertTrue(successfulIndices.containsKey(index2)) Assert.assertFalse(successfulIndices.containsKey(index1))