Skip to content

Commit

Permalink
fix(mahe): do not clean up properties that have been updated (#1741)
Browse files Browse the repository at this point in the history
  • Loading branch information
anotherchrisberry authored Oct 27, 2017
1 parent 7a8ef54 commit 02c133e
Show file tree
Hide file tree
Showing 5 changed files with 243 additions and 52 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,9 @@ interface MaheService {
@POST('/properties/upsert')
Response upsertProperty(@Body Map property)

@GET('/properties/find')
Response findProperty(@QueryMap Map<String, String> property)

@GET('/properties/prop')
Response getPropertyById(@Query('propId') String propId, @Query('env') String env)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ package com.netflix.spinnaker.orca.mahe.cleanup

import com.fasterxml.jackson.databind.ObjectMapper
import com.netflix.spinnaker.orca.ExecutionStatus
import com.netflix.spinnaker.orca.RetrySupport
import com.netflix.spinnaker.orca.listeners.ExecutionListener
import com.netflix.spinnaker.orca.listeners.Persister
import com.netflix.spinnaker.orca.mahe.MaheService
Expand All @@ -27,6 +28,7 @@ import com.netflix.spinnaker.orca.pipeline.model.Stage
import groovy.util.logging.Slf4j
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component
import retrofit.RetrofitError
import retrofit.client.Response

@Slf4j
Expand All @@ -41,6 +43,7 @@ class FastPropertyCleanupListener implements ExecutionListener {
}

@Autowired ObjectMapper mapper
@Autowired RetrySupport retrySupport

@Override
void afterExecution(Persister persister,
Expand All @@ -55,34 +58,82 @@ class FastPropertyCleanupListener implements ExecutionListener {
rollbacks.each { stage ->
switch (stage.context.propertyAction) {
case PropertyAction.CREATE.toString():
stage.context.propertyIdList.each { prop ->
log.info("Rolling back the creation of: ${prop.propertyId} on execution ${execution.id} by deleting")
Response response = mahe.deleteProperty(prop.propertyId, "spinnaker rollback", extractEnvironment(prop.propertyId))
resolveRollbackResponse(response, stage.context.propertyAction.toString(), prop)
stage.context.persistedProperties.each { Map prop ->
String propertyId = prop.propertyId
if (shouldRollback(prop)) {
log.info("Rolling back the creation of: ${propertyId} on execution ${execution.id} by deleting")
Response response = mahe.deleteProperty(propertyId, "spinnaker rollback", extractEnvironment(propertyId))
resolveRollbackResponse(response, stage.context.propertyAction.toString(), prop)
} else {
log.info("Property has been updated since this execution; not rolling back")
}
}
break
case PropertyAction.UPDATE.toString():
stage.context.originalProperties.each { prop ->
log.info("Rolling back the ${stage.context.propertyAction} of: ${prop.property.propertyId} on execution ${execution.id} by upserting")
Response response = mahe.upsertProperty(prop)
resolveRollbackResponse(response, stage.context.propertyAction.toString(), prop.property)
stage.context.originalProperties.each { Map originalProp ->
Map property = originalProp.property
Map updatedProperty = (Map) stage.context.persistedProperties.find { it.propertyId == property.propertyId }
String propertyId = property.propertyId
if (shouldRollback(updatedProperty)) {
log.info("Rolling back the update of: ${propertyId} on execution ${execution.id} by upserting")
Response response = mahe.upsertProperty(originalProp)
resolveRollbackResponse(response, stage.context.propertyAction.toString(), property)
} else {
log.info("Property has been updated since this execution; not rolling back")
}
}
break
case PropertyAction.DELETE.toString():
stage.context.originalProperties.each { prop ->
if (prop.property.propertyId) {
prop.property.remove('propertyId')
}
log.info("Rolling back the ${stage.context.propertyAction} of: ${prop.property.key}|${prop.property.value} on execution ${execution.id} by re-creating")
stage.context.originalProperties.each { Map prop ->
Map property = prop.property
if (propertyExists(property)) {
log.info("Property exists, not restoring to original state after delete.")
} else {
if (property.propertyId) {
property.remove('propertyId')
}
log.info("Rolling back the delete of: ${property.key}|${property.value} on execution ${execution.id} by re-creating")

Response response = mahe.upsertProperty(prop)
resolveRollbackResponse(response, stage.context.propertyAction.toString(), prop.property)
Response response = mahe.upsertProperty(prop)
resolveRollbackResponse(response, stage.context.propertyAction.toString(), property)
}
}
}
}
}
}

private boolean shouldRollback(Map property) {
String propertyId = property.propertyId
String env = extractEnvironment(propertyId)
try {
return retrySupport.retry({
Response propertyResponse = mahe.getPropertyById(propertyId, env)
Map currentProperty = mapper.readValue(propertyResponse.body.in().text, Map)
return currentProperty.ts == property.ts
}, 3, 2, false)
} catch (RetrofitError error) {
if (error.response.status == 404) {
return false
}
throw error
}
}

private boolean propertyExists(Map<String, String> property) {
try {
return retrySupport.retry({
mahe.getPropertyById(property.propertyId, property.env)
return true
}, 3, 2, false)
} catch (RetrofitError error) {
if (error.kind == RetrofitError.Kind.HTTP && error.response.status == 404) {
return false
}
throw error
}
}

private void resolveRollbackResponse(Response response, String initialPropertyAction, def property) {
if(response.status == 200) {
log.info("Successful Fast Property rollback for $initialPropertyAction")
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@ import com.netflix.spinnaker.orca.pipeline.model.Stage
import groovy.util.logging.Slf4j
import org.springframework.beans.factory.annotation.Autowired
import org.springframework.stereotype.Component
import retrofit.RetrofitError
import retrofit.client.Response
import static com.netflix.spinnaker.orca.ExecutionStatus.SUCCEEDED

Expand All @@ -44,7 +45,10 @@ class CreatePropertiesTask implements Task {
context = overrides.find { it.refId == stage.refId } ?: context
}
List properties = assemblePersistedPropertyListFromContext(context, context.persistedProperties)
List originalProperties = assemblePersistedPropertyListFromContext(context, context.originalProperties)
// originalProperties field is only present on ad-hoc property pipelines - not as part of a createProperty stage,
// so we'll need to add the original property if found
boolean hasOriginalProperties = context.originalProperties
List originalProperties = assemblePersistedPropertyListFromContext(context, context.originalProperties ?: [])
List propertyIdList = []
PropertyAction propertyAction = PropertyAction.UNKNOWN

Expand All @@ -56,8 +60,13 @@ class CreatePropertiesTask implements Task {
propertyAction = PropertyAction.DELETE
} else {
log.info("Upserting Property: ${prop} on execution ${stage.execution.id}")
Map existingProperty = getExistingProperty(prop)
log.info("Property ${prop.key} ${existingProperty ? 'exists' : 'does not exist'}")
response = maheService.upsertProperty(prop)
propertyAction = prop.property.propertyId ? PropertyAction.UPDATE : PropertyAction.CREATE
propertyAction = existingProperty ? PropertyAction.UPDATE : PropertyAction.CREATE
if (existingProperty && !hasOriginalProperties) {
originalProperties << existingProperty
}
}

if (response.status == 200) {
Expand All @@ -82,6 +91,16 @@ class CreatePropertiesTask implements Task {

}

private Map getExistingProperty(Map prop) {
try {
return mapper.readValue(maheService.findProperty(prop).body.in().text, Map)
} catch (RetrofitError error) {
if (error.kind == RetrofitError.Kind.HTTP && error.response.status == 404) {
return null
}
throw error
}
}


List assemblePersistedPropertyListFromContext(Map<String, Object> context, List propertyList) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,31 +17,19 @@
package com.netflix.spinnaker.orca.mahe.tasks

import com.fasterxml.jackson.databind.ObjectMapper
import com.netflix.spinnaker.orca.RetrySupport
import com.netflix.spinnaker.orca.mahe.MaheService
import com.netflix.spinnaker.orca.mahe.pipeline.CreatePropertyStage
import com.netflix.spinnaker.orca.mahe.pipeline.MonitorCreatePropertyStage
import com.netflix.spinnaker.orca.pipeline.model.Pipeline
import com.netflix.spinnaker.orca.pipeline.model.PipelineBuilder
import com.netflix.spinnaker.orca.pipeline.model.Stage
import retrofit.RetrofitError
import retrofit.client.Response
import retrofit.mime.TypedByteArray
import retrofit.mime.TypedString
import spock.lang.Specification
import spock.lang.Unroll
/*
* Copyright 2016 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the 'License');
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an 'AS IS' BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

class CreatePropertiesTaskSpec extends Specification {

Expand All @@ -50,6 +38,8 @@ class CreatePropertiesTaskSpec extends Specification {

CreatePropertiesTask task = new CreatePropertiesTask(maheService: maheService, mapper: mapper)

RetrofitError NOT_FOUND = new RetrofitError(null, null, new Response("http://clouddriver", 404, "null", [], null), null, null, RetrofitError.Kind.HTTP, null)

def "assemble the changed property list and original from the context"() {
given:
def pipeline = new Pipeline('foo')
Expand All @@ -63,7 +53,7 @@ class CreatePropertiesTaskSpec extends Specification {
List properties = task.assemblePersistedPropertyListFromContext(stage.context, stage.context.persistedProperties)
List originalProperties = task.assemblePersistedPropertyListFromContext(stage.context, stage.context.originalProperties)

then: "this is what the property payload the is sent to MAHE needs to look like"
then: "this is what the property payload that is sent to MAHE needs to look like"
properties.size() == 1
originalProperties.size() == 1

Expand All @@ -85,6 +75,33 @@ class CreatePropertiesTaskSpec extends Specification {
}
}

def "adds original property to outputs if none present in stage context"() {
given:
def pipeline = new Pipeline('foo')
def scope = createScope()
def property = createProperty()
def originalProperty = createProperty()

def stage = createPropertiesStage(pipeline, scope, property, originalProperty )
stage.context.remove("originalProperties")

when:
def results = task.execute(stage)

then:
1 * maheService.findProperty(_) >> new Response('', 200, 'OK', [], new TypedString(mapper.writeValueAsString([a:1])))
1 * maheService.upsertProperty(_) >> { Map res ->
def json = mapper.writeValueAsString([propertyId: 'propertyId'])
new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes))
}

then:
with(results.outputs) {
originalProperties.size() == 1
originalProperties[0].a == 1
}
}

def "prefer a stage override if present for context"() {
given:
def trigger = [ stageOverrides: [] ]
Expand All @@ -98,11 +115,11 @@ class CreatePropertiesTaskSpec extends Specification {

pipeline.trigger.stageOverrides << stageOverride.context


when:
when:
def results = task.execute(createPropertiesStage)

then:
1 * maheService.findProperty(_) >> { throw NOT_FOUND }
1 * maheService.upsertProperty(_) >> { Map res ->
def json = mapper.writeValueAsString([propertyId: 'other'])
new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes))
Expand Down Expand Up @@ -172,6 +189,7 @@ class CreatePropertiesTaskSpec extends Specification {
def results = task.execute(createPropertiesStage)

then:
1 * maheService.findProperty(_) >> { throw NOT_FOUND }
1 * maheService.upsertProperty(_) >> { Map res ->
def json = mapper.writeValueAsString([propertyId: 'propertyId'])
new Response("http://mahe", 200, "OK", [], new TypedByteArray('application/json', json.bytes))
Expand Down Expand Up @@ -236,7 +254,7 @@ class CreatePropertiesTaskSpec extends Specification {
}


def "create multiple new persistent property"() {
def "create multiple new persistent properties"() {
given:
def pipeline = new Pipeline('foo')
def parentStageId = UUID.randomUUID().toString()
Expand Down Expand Up @@ -266,6 +284,7 @@ class CreatePropertiesTaskSpec extends Specification {

then:

2 * maheService.findProperty(_) >> { throw NOT_FOUND }
2 * maheService.upsertProperty(_) >> { Map res ->
captured = res
String propId = "${res.property.key}|${res.property.value}"
Expand Down
Loading

0 comments on commit 02c133e

Please sign in to comment.