diff --git a/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/MaheService.groovy b/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/MaheService.groovy index fb8a7f8c51..e1169bb28e 100644 --- a/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/MaheService.groovy +++ b/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/MaheService.groovy @@ -26,6 +26,9 @@ interface MaheService { @POST('/properties/upsert') Response upsertProperty(@Body Map property) + @GET('/properties/find') + Response findProperty(@QueryMap Map property) + @GET('/properties/prop') Response getPropertyById(@Query('propId') String propId, @Query('env') String env) diff --git a/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/cleanup/FastPropertyCleanupListener.groovy b/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/cleanup/FastPropertyCleanupListener.groovy index cab8291b97..c48cd05d3a 100644 --- a/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/cleanup/FastPropertyCleanupListener.groovy +++ b/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/cleanup/FastPropertyCleanupListener.groovy @@ -18,6 +18,7 @@ package com.netflix.spinnaker.orca.mahe.cleanup import com.fasterxml.jackson.databind.ObjectMapper import com.netflix.spinnaker.orca.ExecutionStatus +import com.netflix.spinnaker.orca.RetrySupport import com.netflix.spinnaker.orca.listeners.ExecutionListener import com.netflix.spinnaker.orca.listeners.Persister import com.netflix.spinnaker.orca.mahe.MaheService @@ -27,6 +28,7 @@ import com.netflix.spinnaker.orca.pipeline.model.Stage import groovy.util.logging.Slf4j import org.springframework.beans.factory.annotation.Autowired import org.springframework.stereotype.Component +import retrofit.RetrofitError import retrofit.client.Response @Slf4j @@ -41,6 +43,7 @@ class FastPropertyCleanupListener implements ExecutionListener { } @Autowired ObjectMapper mapper + @Autowired RetrySupport retrySupport @Override void afterExecution(Persister persister, @@ -55,34 +58,82 @@ class FastPropertyCleanupListener implements ExecutionListener { rollbacks.each { stage -> switch (stage.context.propertyAction) { case PropertyAction.CREATE.toString(): - stage.context.propertyIdList.each { prop -> - log.info("Rolling back the creation of: ${prop.propertyId} on execution ${execution.id} by deleting") - Response response = mahe.deleteProperty(prop.propertyId, "spinnaker rollback", extractEnvironment(prop.propertyId)) - resolveRollbackResponse(response, stage.context.propertyAction.toString(), prop) + stage.context.persistedProperties.each { Map prop -> + String propertyId = prop.propertyId + if (shouldRollback(prop)) { + log.info("Rolling back the creation of: ${propertyId} on execution ${execution.id} by deleting") + Response response = mahe.deleteProperty(propertyId, "spinnaker rollback", extractEnvironment(propertyId)) + resolveRollbackResponse(response, stage.context.propertyAction.toString(), prop) + } else { + log.info("Property has been updated since this execution; not rolling back") + } } break case PropertyAction.UPDATE.toString(): - stage.context.originalProperties.each { prop -> - log.info("Rolling back the ${stage.context.propertyAction} of: ${prop.property.propertyId} on execution ${execution.id} by upserting") - Response response = mahe.upsertProperty(prop) - resolveRollbackResponse(response, stage.context.propertyAction.toString(), prop.property) + stage.context.originalProperties.each { Map originalProp -> + Map property = originalProp.property + Map updatedProperty = (Map) stage.context.persistedProperties.find { it.propertyId == property.propertyId } + String propertyId = property.propertyId + if (shouldRollback(updatedProperty)) { + log.info("Rolling back the update of: ${propertyId} on execution ${execution.id} by upserting") + Response response = mahe.upsertProperty(originalProp) + resolveRollbackResponse(response, stage.context.propertyAction.toString(), property) + } else { + log.info("Property has been updated since this execution; not rolling back") + } } break case PropertyAction.DELETE.toString(): - stage.context.originalProperties.each { prop -> - if (prop.property.propertyId) { - prop.property.remove('propertyId') - } - log.info("Rolling back the ${stage.context.propertyAction} of: ${prop.property.key}|${prop.property.value} on execution ${execution.id} by re-creating") + stage.context.originalProperties.each { Map prop -> + Map property = prop.property + if (propertyExists(property)) { + log.info("Property exists, not restoring to original state after delete.") + } else { + if (property.propertyId) { + property.remove('propertyId') + } + log.info("Rolling back the delete of: ${property.key}|${property.value} on execution ${execution.id} by re-creating") - Response response = mahe.upsertProperty(prop) - resolveRollbackResponse(response, stage.context.propertyAction.toString(), prop.property) + Response response = mahe.upsertProperty(prop) + resolveRollbackResponse(response, stage.context.propertyAction.toString(), property) + } } } } } } + private boolean shouldRollback(Map property) { + String propertyId = property.propertyId + String env = extractEnvironment(propertyId) + try { + return retrySupport.retry({ + Response propertyResponse = mahe.getPropertyById(propertyId, env) + Map currentProperty = mapper.readValue(propertyResponse.body.in().text, Map) + return currentProperty.ts == property.ts + }, 3, 2, false) + } catch (RetrofitError error) { + if (error.response.status == 404) { + return false + } + throw error + } + } + + private boolean propertyExists(Map property) { + try { + return retrySupport.retry({ + mahe.getPropertyById(property.propertyId, property.env) + return true + }, 3, 2, false) + } catch (RetrofitError error) { + if (error.kind == RetrofitError.Kind.HTTP && error.response.status == 404) { + return false + } + throw error + } + } + private void resolveRollbackResponse(Response response, String initialPropertyAction, def property) { if(response.status == 200) { log.info("Successful Fast Property rollback for $initialPropertyAction") diff --git a/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/tasks/CreatePropertiesTask.groovy b/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/tasks/CreatePropertiesTask.groovy index 479547fad7..48dd93f87f 100644 --- a/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/tasks/CreatePropertiesTask.groovy +++ b/orca-mahe/src/main/groovy/com/netflix/spinnaker/orca/mahe/tasks/CreatePropertiesTask.groovy @@ -26,6 +26,7 @@ import com.netflix.spinnaker.orca.pipeline.model.Stage import groovy.util.logging.Slf4j import org.springframework.beans.factory.annotation.Autowired import org.springframework.stereotype.Component +import retrofit.RetrofitError import retrofit.client.Response import static com.netflix.spinnaker.orca.ExecutionStatus.SUCCEEDED @@ -44,7 +45,10 @@ class CreatePropertiesTask implements Task { context = overrides.find { it.refId == stage.refId } ?: context } List properties = assemblePersistedPropertyListFromContext(context, context.persistedProperties) - List originalProperties = assemblePersistedPropertyListFromContext(context, context.originalProperties) + // originalProperties field is only present on ad-hoc property pipelines - not as part of a createProperty stage, + // so we'll need to add the original property if found + boolean hasOriginalProperties = context.originalProperties + List originalProperties = assemblePersistedPropertyListFromContext(context, context.originalProperties ?: []) List propertyIdList = [] PropertyAction propertyAction = PropertyAction.UNKNOWN @@ -56,8 +60,13 @@ class CreatePropertiesTask implements Task { propertyAction = PropertyAction.DELETE } else { log.info("Upserting Property: ${prop} on execution ${stage.execution.id}") + Map existingProperty = getExistingProperty(prop) + log.info("Property ${prop.key} ${existingProperty ? 'exists' : 'does not exist'}") response = maheService.upsertProperty(prop) - propertyAction = prop.property.propertyId ? PropertyAction.UPDATE : PropertyAction.CREATE + propertyAction = existingProperty ? PropertyAction.UPDATE : PropertyAction.CREATE + if (existingProperty && !hasOriginalProperties) { + originalProperties << existingProperty + } } if (response.status == 200) { @@ -82,6 +91,16 @@ class CreatePropertiesTask implements Task { } + private Map getExistingProperty(Map prop) { + try { + return mapper.readValue(maheService.findProperty(prop).body.in().text, Map) + } catch (RetrofitError error) { + if (error.kind == RetrofitError.Kind.HTTP && error.response.status == 404) { + return null + } + throw error + } + } List assemblePersistedPropertyListFromContext(Map context, List propertyList) { diff --git a/orca-mahe/src/test/groovy/com/netflix/spinnaker/orca/mahe/tasks/CreatePropertiesTaskSpec.groovy b/orca-mahe/src/test/groovy/com/netflix/spinnaker/orca/mahe/tasks/CreatePropertiesTaskSpec.groovy index 36dd02dd26..ba90739715 100644 --- a/orca-mahe/src/test/groovy/com/netflix/spinnaker/orca/mahe/tasks/CreatePropertiesTaskSpec.groovy +++ b/orca-mahe/src/test/groovy/com/netflix/spinnaker/orca/mahe/tasks/CreatePropertiesTaskSpec.groovy @@ -17,31 +17,19 @@ package com.netflix.spinnaker.orca.mahe.tasks import com.fasterxml.jackson.databind.ObjectMapper +import com.netflix.spinnaker.orca.RetrySupport import com.netflix.spinnaker.orca.mahe.MaheService import com.netflix.spinnaker.orca.mahe.pipeline.CreatePropertyStage import com.netflix.spinnaker.orca.mahe.pipeline.MonitorCreatePropertyStage import com.netflix.spinnaker.orca.pipeline.model.Pipeline import com.netflix.spinnaker.orca.pipeline.model.PipelineBuilder import com.netflix.spinnaker.orca.pipeline.model.Stage +import retrofit.RetrofitError import retrofit.client.Response import retrofit.mime.TypedByteArray +import retrofit.mime.TypedString import spock.lang.Specification import spock.lang.Unroll -/* - * Copyright 2016 Netflix, Inc. - * - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ class CreatePropertiesTaskSpec extends Specification { @@ -50,6 +38,8 @@ class CreatePropertiesTaskSpec extends Specification { CreatePropertiesTask task = new CreatePropertiesTask(maheService: maheService, mapper: mapper) + RetrofitError NOT_FOUND = new RetrofitError(null, null, new Response("http://clouddriver", 404, "null", [], null), null, null, RetrofitError.Kind.HTTP, null) + def "assemble the changed property list and original from the context"() { given: def pipeline = new Pipeline('foo') @@ -63,7 +53,7 @@ class CreatePropertiesTaskSpec extends Specification { List properties = task.assemblePersistedPropertyListFromContext(stage.context, stage.context.persistedProperties) List originalProperties = task.assemblePersistedPropertyListFromContext(stage.context, stage.context.originalProperties) - then: "this is what the property payload the is sent to MAHE needs to look like" + then: "this is what the property payload that is sent to MAHE needs to look like" properties.size() == 1 originalProperties.size() == 1 @@ -85,6 +75,33 @@ class CreatePropertiesTaskSpec extends Specification { } } + def "adds original property to outputs if none present in stage context"() { + given: + def pipeline = new Pipeline('foo') + def scope = createScope() + def property = createProperty() + def originalProperty = createProperty() + + def stage = createPropertiesStage(pipeline, scope, property, originalProperty ) + stage.context.remove("originalProperties") + + when: + def results = task.execute(stage) + + then: + 1 * maheService.findProperty(_) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([a:1]))) + 1 * maheService.upsertProperty(_) >> { Map res -> + def json = mapper.writeValueAsString([propertyId: 'propertyId']) + new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes)) + } + + then: + with(results.outputs) { + originalProperties.size() == 1 + originalProperties[0].a == 1 + } + } + def "prefer a stage override if present for context"() { given: def trigger = [ stageOverrides: [] ] @@ -98,11 +115,11 @@ class CreatePropertiesTaskSpec extends Specification { pipeline.trigger.stageOverrides << stageOverride.context - - when: + when: def results = task.execute(createPropertiesStage) then: + 1 * maheService.findProperty(_) >> { throw NOT_FOUND } 1 * maheService.upsertProperty(_) >> { Map res -> def json = mapper.writeValueAsString([propertyId: 'other']) new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes)) @@ -172,6 +189,7 @@ class CreatePropertiesTaskSpec extends Specification { def results = task.execute(createPropertiesStage) then: + 1 * maheService.findProperty(_) >> { throw NOT_FOUND } 1 * maheService.upsertProperty(_) >> { Map res -> def json = mapper.writeValueAsString([propertyId: 'propertyId']) new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes)) @@ -236,7 +254,7 @@ class CreatePropertiesTaskSpec extends Specification { } - def "create multiple new persistent property"() { + def "create multiple new persistent properties"() { given: def pipeline = new Pipeline('foo') def parentStageId = UUID.randomUUID().toString() @@ -266,6 +284,7 @@ class CreatePropertiesTaskSpec extends Specification { then: + 2 * maheService.findProperty(_) >> { throw NOT_FOUND } 2 * maheService.upsertProperty(_) >> { Map res -> captured = res String propId = "${res.property.key}|${res.property.value}" diff --git a/orca-mahe/src/test/groovy/com/netflix/spinnaker/orca/mahe/tasks/PropertyChangeCleanupSpec.groovy b/orca-mahe/src/test/groovy/com/netflix/spinnaker/orca/mahe/tasks/PropertyChangeCleanupSpec.groovy index e5d1b9383a..b29d87258e 100644 --- a/orca-mahe/src/test/groovy/com/netflix/spinnaker/orca/mahe/tasks/PropertyChangeCleanupSpec.groovy +++ b/orca-mahe/src/test/groovy/com/netflix/spinnaker/orca/mahe/tasks/PropertyChangeCleanupSpec.groovy @@ -18,18 +18,22 @@ package com.netflix.spinnaker.orca.mahe.tasks import com.fasterxml.jackson.databind.ObjectMapper import com.netflix.spinnaker.orca.ExecutionStatus +import com.netflix.spinnaker.orca.RetrySupport import com.netflix.spinnaker.orca.mahe.MaheService import com.netflix.spinnaker.orca.mahe.PropertyAction import com.netflix.spinnaker.orca.mahe.cleanup.FastPropertyCleanupListener import com.netflix.spinnaker.orca.pipeline.persistence.ExecutionRepository +import retrofit.RetrofitError import retrofit.client.Response import retrofit.mime.TypedByteArray +import retrofit.mime.TypedString import spock.lang.Specification import spock.lang.Subject import spock.lang.Unroll import static com.netflix.spinnaker.orca.mahe.PropertyAction.CREATE import static com.netflix.spinnaker.orca.mahe.PropertyAction.DELETE +import static com.netflix.spinnaker.orca.mahe.PropertyAction.UPDATE import static com.netflix.spinnaker.orca.mahe.pipeline.CreatePropertyStage.PIPELINE_CONFIG_TYPE import static com.netflix.spinnaker.orca.test.model.ExecutionBuilder.pipeline import static com.netflix.spinnaker.orca.test.model.ExecutionBuilder.stage @@ -37,12 +41,16 @@ import static com.netflix.spinnaker.orca.test.model.ExecutionBuilder.stage class PropertyChangeCleanupSpec extends Specification { ObjectMapper mapper = new ObjectMapper() + RetrySupport retrySupport = new NoSleepRetry() def repository = Stub(ExecutionRepository) def mahe = Mock(MaheService) @Subject def listener = new FastPropertyCleanupListener(mahe) + RetrofitError NOT_FOUND = new RetrofitError(null, null, new Response("http://clouddriver", 404, "null", [], null), null, null, RetrofitError.Kind.HTTP, null) + def setup() { listener.mapper = mapper + listener.retrySupport = retrySupport } @Unroll() @@ -63,6 +71,7 @@ class PropertyChangeCleanupSpec extends Specification { listener.afterExecution(null, pipeline, executionStatus, false) then: + 3 * mahe.getPropertyById(propertyId, _) >> { throw NOT_FOUND } 1 * mahe.upsertProperty(_) >> { Map res -> String propId = "${res.property.key}|${res.property.value}" def json = mapper.writeValueAsString([propertyId: propId]) @@ -97,6 +106,8 @@ class PropertyChangeCleanupSpec extends Specification { new Response("http://mahe", 500, "OK", [], null) } + 3 * mahe.getPropertyById(propertyId, propertyEnv) >> { throw NOT_FOUND } + IllegalStateException ex = thrown() assert ex.message.contains("Unable to rollback DELETE") @@ -133,9 +144,9 @@ class PropertyChangeCleanupSpec extends Specification { } def "properties marked for rollback are rolled back on a successful execution"() { - def createStageContext = [rollback: true, propertyIdList: [[propertyId: propertyId]], originalProperties: [], propertyAction: PropertyAction.CREATE.toString()] - def deleteStageContext = [rollback: true, propertyIdList: [[propertyId: propertyId]], originalProperties: [[property: previous]], propertyAction: DELETE.toString()] - def retainedStageContext = [propertyIdList: [[propertyId: 'z' + propertyId]], originalProperties: [[property: previous]], propertyAction: CREATE.toString()] + def createStageContext = [rollback: true, persistedProperties: [[propertyId: createPropertyId]], originalProperties: [], propertyAction: CREATE.toString()] + def deleteStageContext = [rollback: true, originalProperties: [[property: previous]], propertyAction: DELETE.toString()] + def retainedStageContext = [originalProperties: [[property: retainedPrevious]], propertyAction: CREATE.toString()] def pipeline = pipeline { stage { type = PIPELINE_CONFIG_TYPE @@ -159,8 +170,11 @@ class PropertyChangeCleanupSpec extends Specification { then: - 1 * mahe.deleteProperty(propertyId, 'spinnaker rollback', propertyEnv) >> { def res -> - def json = mapper.writeValueAsString([propertyId: propertyId]) + 1 * mahe.getPropertyById(createPropertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([:]))) + 3 * mahe.getPropertyById(deletePropertyId, _) >> { throw NOT_FOUND } + + 1 * mahe.deleteProperty(createPropertyId, 'spinnaker rollback', propertyEnv) >> { def res -> + def json = mapper.writeValueAsString([propertyId: createPropertyId]) new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes)) } @@ -172,15 +186,18 @@ class PropertyChangeCleanupSpec extends Specification { 0 * _ where: - propertyId = "test_rfletcher|mahe|test|us-west-1||||asg=mahe-test-v010|cluster=mahe-test" + createPropertyId = "a|b|test|us-west-1||||asg=mahe-test-v010|cluster=mahe-test" + deletePropertyId = "a|b|test|us-west-1||||asg=mahe-test-v010|cluster=mahe-test" + retainPropertyId = "a|b|test|us-west-1||||asg=mahe-test-v010|cluster=mahe-test" propertyEnv = "test" - previous = createPropertyWithId(propertyId) + previous = createPropertyWithId(deletePropertyId) + retainedPrevious = createPropertyWithId(retainPropertyId) } @Unroll() def "a newly created property should be deleted if the pipeline status is #executionStatus and has matching original property"() { given: - def propertyContext = [propertyIdList: [[propertyId: propertyId]], originalProperties: [], propertyAction: PropertyAction.CREATE] + def propertyContext = [persistedProperties: [[propertyId: propertyId]], originalProperties: [], propertyAction: CREATE] def pipeline = pipeline { stage { type = PIPELINE_CONFIG_TYPE @@ -195,6 +212,7 @@ class PropertyChangeCleanupSpec extends Specification { listener.afterExecution(null, pipeline, executionStatus, false) then: + 1 * mahe.getPropertyById(propertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([:]))) 1 * mahe.deleteProperty(propertyId, 'spinnaker rollback', propertyEnv) >> { def res -> new Response("http://mahe", 200, "OK", [], null) } @@ -208,7 +226,7 @@ class PropertyChangeCleanupSpec extends Specification { def "failed rollback of delete should throw IllegalStateException"() { given: - def propertyContext = [propertyIdList: [[propertyId: propertyId]], originalProperties: [], propertyAction: PropertyAction.CREATE] + def propertyContext = [persistedProperties: [[propertyId: propertyId]], originalProperties: [], propertyAction: CREATE] def pipeline = pipeline { stage { type = PIPELINE_CONFIG_TYPE @@ -223,6 +241,7 @@ class PropertyChangeCleanupSpec extends Specification { listener.afterExecution(null, pipeline, executionStatus, false) then: + 1 * mahe.getPropertyById(propertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([:]))) 1 * mahe.deleteProperty(propertyId, 'spinnaker rollback', propertyEnv) >> { def res -> new Response("http://mahe", 500, "OK", [] , null) } @@ -240,7 +259,7 @@ class PropertyChangeCleanupSpec extends Specification { def "a property created by a pipeline stage marked for 'rollback' is cleaned up at the end"() { given: - def propertyContext = [propertyIdList: [[propertyId: propertyId]], originalProperties: [], rollback: true, propertyAction: PropertyAction.CREATE.toString()] + def propertyContext = [persistedProperties: [[propertyId: propertyId]], originalProperties: [], rollback: true, propertyAction: CREATE.toString()] def pipeline = pipeline { stage { type = PIPELINE_CONFIG_TYPE @@ -254,6 +273,7 @@ class PropertyChangeCleanupSpec extends Specification { listener.afterExecution(null, pipeline, null, true) then: + 1 * mahe.getPropertyById(propertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([:]))) 1 * mahe.deleteProperty(propertyId, 'spinnaker rollback', propertyEnv) >> { def res -> def json = mapper.writeValueAsString([propertyId: propertyId]) new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes)) @@ -266,7 +286,7 @@ class PropertyChangeCleanupSpec extends Specification { def "a property updated by a pipeline stage is cleaned up at the end when marked for rollback"() { given: - def propertyContext = [propertyIdList: [[propertyId: propertyId]], originalProperties: [previous], rollback: true, propertyAction: PropertyAction.UPDATE.toString()] + def propertyContext = [persistedProperties: [[propertyId: propertyId]], originalProperties: [previous], rollback: true, propertyAction: PropertyAction.UPDATE.toString()] def pipeline = pipeline { stage { type = PIPELINE_CONFIG_TYPE @@ -280,6 +300,7 @@ class PropertyChangeCleanupSpec extends Specification { listener.afterExecution(null, pipeline, null, true) then: + 1 * mahe.getPropertyById(propertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([:]))) 1 * mahe.upsertProperty(previous) >> { Map res -> String propId = "${res.property.key}|${res.property.value}" def json = mapper.writeValueAsString([propertyId: propId]) @@ -318,7 +339,7 @@ class PropertyChangeCleanupSpec extends Specification { def "rollback a pipeline with multiple create stages"() { given: - def createStageContext = [propertyIdList: [[propertyId: propertyId]], originalProperties: [], propertyAction: PropertyAction.CREATE.toString()] + def createStageContext = [persistedProperties: [[propertyId: propertyId]], originalProperties: [], propertyAction: CREATE.toString()] def pipeline = pipeline { 3.times { stage { @@ -333,7 +354,7 @@ class PropertyChangeCleanupSpec extends Specification { listener.afterExecution(null, pipeline, ExecutionStatus.TERMINAL, true) then: - + 3 * mahe.getPropertyById(propertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([:]))) 3 * mahe.deleteProperty(propertyId, 'spinnaker rollback', propertyEnv) >> { def res -> def json = mapper.writeValueAsString([propertyId: propertyId]) new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes)) @@ -348,8 +369,8 @@ class PropertyChangeCleanupSpec extends Specification { def "rollback a pipeline with a create and a delete stages that are created for a scope update"() { given: - def createStageContext = [propertyIdList: [[propertyId: propertyId]], originalProperties: [], propertyAction: PropertyAction.CREATE.toString()] - def deleteStageContext = [propertyIdList: [[propertyId: propertyId]], originalProperties: [[property: previous]], propertyAction: DELETE.toString()] + def createStageContext = [persistedProperties: [[propertyId: propertyId]], originalProperties: [], propertyAction: CREATE.toString()] + def deleteStageContext = [originalProperties: [[property: previous]], propertyAction: DELETE.toString()] def pipeline = pipeline { stage { type = PIPELINE_CONFIG_TYPE @@ -367,7 +388,8 @@ class PropertyChangeCleanupSpec extends Specification { listener.afterExecution(null, pipeline, ExecutionStatus.TERMINAL, true) then: - + 1 * mahe.getPropertyById(propertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([:]))) + 3 * mahe.getPropertyById(deletedPropertyId, _) >> { throw NOT_FOUND } 1 * mahe.deleteProperty(propertyId, 'spinnaker rollback', propertyEnv) >> { def res -> def json = mapper.writeValueAsString([propertyId: propertyId]) new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes)) @@ -381,9 +403,82 @@ class PropertyChangeCleanupSpec extends Specification { where: propertyId = "test_rfletcher|mahe|test|us-west-1||||asg=mahe-test-v010|cluster=mahe-test" + deletedPropertyId = "test_rfletcher|mahe|test|us-west-1||||asg=mahe-test-v010|cluster=mahe-test2" + propertyEnv = "test" + previous = createPropertyWithId(deletedPropertyId) + + } + + def "does not attempt to roll back deleted property if it exists"() { + def stageContext = [rollback: true, originalProperties: [[property: previous]], propertyAction: DELETE.toString()] + def pipeline = pipeline { + stage { + type = PIPELINE_CONFIG_TYPE + name = PIPELINE_CONFIG_TYPE + context = stageContext + } + } + + when: + listener.afterExecution(null, pipeline, ExecutionStatus.SUCCEEDED, true) + + then: + + 1 * mahe.getPropertyById(propertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([:]))) + 0 * _ + + where: + propertyId = "a|b|test|us-west-1||||asg=mahe-test-v010|cluster=mahe-test" propertyEnv = "test" previous = createPropertyWithId(propertyId) + } + def "does not attempt to roll back created property if it has been updated since pipeline created it"() { + def stageContext = [rollback: true, persistedProperties: [previous], originalProperties: [], propertyAction: CREATE.toString()] + def pipeline = pipeline { + stage { + type = PIPELINE_CONFIG_TYPE + name = PIPELINE_CONFIG_TYPE + context = stageContext + } + } + + when: + listener.afterExecution(null, pipeline, ExecutionStatus.SUCCEEDED, true) + + then: + + 1 * mahe.getPropertyById(propertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([ts: "2018"]))) + 0 * _ + + where: + propertyId = "a|b|test|us-west-1||||asg=mahe-test-v010|cluster=mahe-test" + propertyEnv = "test" + previous = createPropertyWithId(propertyId) + } + + def "does not attempt to roll back updated property if it has been updated since pipeline created it"() { + def stageContext = [rollback: true, persistedProperties: [previous], originalProperties: [[property: previous]], propertyAction: UPDATE.toString()] + def pipeline = pipeline { + stage { + type = PIPELINE_CONFIG_TYPE + name = PIPELINE_CONFIG_TYPE + context = stageContext + } + } + + when: + listener.afterExecution(null, pipeline, ExecutionStatus.SUCCEEDED, true) + + then: + + 1 * mahe.getPropertyById(propertyId, _) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([ts: "2018"]))) + 0 * _ + + where: + propertyId = "a|b|test|us-west-1||||asg=mahe-test-v010|cluster=mahe-test" + propertyEnv = "test" + previous = createPropertyWithId(propertyId) } @@ -405,4 +500,8 @@ class PropertyChangeCleanupSpec extends Specification { "createdAsCanary": false ] } + static class NoSleepRetry extends RetrySupport { + void sleep(long time) {} + } + }