Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(pipeline_template): Convert to pipeline template endpoint #1615

Merged
merged 2 commits into from
Sep 15, 2017
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions orca-pipelinetemplate/orca-pipelinetemplate.gradle
Original file line number Diff line number Diff line change
Expand Up @@ -20,4 +20,5 @@ dependencies {

testCompile spinnaker.dependency("slf4jSimple")
testCompile 'org.spockframework:spock-unitils:1.1-groovy-2.4-rc-2'
testCompile 'org.codehaus.groovy:groovy-json:2.4.11'
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,171 @@
/*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.orca.pipelinetemplate.v1schema.converter;

import com.google.common.base.Charsets;
import com.google.common.io.Files;
import com.google.common.io.Resources;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.yaml.snakeyaml.DumperOptions;
import org.yaml.snakeyaml.Yaml;
import org.yaml.snakeyaml.representer.Representer;

import java.io.File;
import java.io.IOException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.stream.Collectors;

// Who needs type-checking anyway?
@SuppressWarnings("unchecked")
public class PipelineTemplateConverter {

private final static Logger log = LoggerFactory.getLogger(PipelineTemplateConverter.class);

public String convertToPipelineTemplate(Map<String, Object> pipeline) {
Map<String, Object> p = new LinkedHashMap<>();
p.put("schema", "1");
p.put("id", String.format("%s-%s", pipeline.getOrDefault("application", "spinnaker"), ((String) pipeline.getOrDefault("name", "generatedTemplate")).replaceAll("\\W", "")));
p.put("metadata", generateMetadata(pipeline));
p.put("protect", false);
p.put("configuration", generateConfiguration(pipeline));
p.put("variables", new ArrayList<>());
p.put("stages", convertStages((List) pipeline.get("stages")));

Representer representer = new Representer();
DumperOptions options = new DumperOptions();
options.setIndent(2);
options.setDefaultFlowStyle(DumperOptions.FlowStyle.BLOCK);
Yaml yaml = new Yaml(representer, options);

String output = yaml.dump(p);

return String.format("%s%s", loadTemplateHeader(), output);
}

private Map<String, Object> generateMetadata(Map<String, Object> pipeline) {
Map<String, Object> m = new LinkedHashMap<>();
m.put("name", pipeline.getOrDefault("name", "GIVE ME A NAME"));
m.put("description", pipeline.getOrDefault("description", "GIVE ME A DESCRIPTION"));
m.put("owner", pipeline.get("lastModifiedBy"));
m.put("scopes", (pipeline.get("application") == null) ? new ArrayList<>() : Collections.singletonList(pipeline.get("application")));
return m;
}

private Map<String, Object> generateConfiguration(Map<String, Object> pipeline) {
Map<String, Object> m = new LinkedHashMap<>();
Map<String, Object> cm = new LinkedHashMap<>();
cm.put("limitConcurrent", true);
m.put("concurrentExecutions", cm);
m.put("triggers", convertTriggers((List) pipeline.get("triggers")));
m.put("parameters", pipeline.getOrDefault("parameterConfig", new ArrayList<>()));
m.put("notifications", convertNotifications((List) pipeline.get("notifications")));
return m;
}

private List<Map<String, Object>> convertStages(List<Map<String, Object>> stages) {
return stages.stream()
.map(s -> {
List<String> dependsOn = new ArrayList<>();
if (s.containsKey("requisiteStageRefIds") && !((List) s.get("requisiteStageRefIds")).isEmpty()) {
dependsOn = buildStageRefIds(stages, (List) s.get("requisiteStageRefIds"));
}

Map<String, Object> stage = new LinkedHashMap<>();
stage.put("id", getStageId((String) s.get("type"), (String) s.get("refId")));
stage.put("type", s.get("type"));
stage.put("dependsOn", dependsOn);
stage.put("name", s.get("name"));
stage.put("config", scrubStageConfig(s));
return stage;
})
.collect(Collectors.toList());
}

private static Map<String, Object> scrubStageConfig(Map<String, Object> config) {
Map<String, Object> working = new LinkedHashMap<>(config);
working.remove("type");
working.remove("name");
working.remove("refId");
working.remove("requisiteStageRefIds");
return working;
}

private static List<String> buildStageRefIds(List<Map<String, Object>> stages, List<String> requisiteStageRefIds) {
List<String> refIds = new ArrayList<>();
for (String refId : requisiteStageRefIds) {
Optional<String> stage = stages.stream()
.filter(s -> refId.equals(s.get("refId")))
.map(s -> getStageId((String) s.get("type"), (String) s.get("refId")))
.findFirst();
stage.ifPresent(refIds::add);
}
return refIds;
}

private static String getStageId(String type, String refId) {
return String.format("%s%s", type, refId);
}

private List<Map<String, Object>> convertTriggers(List<Map<String, Object>> triggers) {
if (triggers == null) {
return Collections.emptyList();
}

List<Map<String, Object>> ret = new ArrayList<>(triggers.size());

int i = 0;
for (Map<String, Object> trigger : triggers) {
trigger.put("name", String.format("unnamed%d", i));
i++;
ret.add(trigger);
}

return ret;
}

private List<Map<String, Object>> convertNotifications(List<Map<String, Object>> notifications) {
if (notifications == null) {
return Collections.emptyList();
}

List<Map<String, Object>> ret = new ArrayList<>(notifications.size());

int i = 0;
for (Map<String, Object> notification : notifications) {
notification.put("name", String.format("%s%d", notification.get("type"), i));
i++;
ret.add(notification);
}

return ret;
}

private String loadTemplateHeader() {
try {
return Files.toString(new File(Resources.getResource("pipelineTemplateHeader.txt").toURI()), Charsets.UTF_8);
} catch (IOException | URISyntaxException e) {
log.error("Could not load pipeline template header resource", e);
return "GENERATED BY spinnaker";
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
# GENERATED BY spinnaker
#
# The template generated below should be used as a base for further modifications.
# It does not make assumptions as to what things can be made into variables,
# modules, partials or Jinja expressions. This is your responsibility as the owner
# of the template.
#
# Some recommendations to massage the initial output:
#
# * Give your pipeline template a unique ID. It's best to namespace it by your
# application or team name, so that it does not conflict with other teams,
# e.g. "myteam-myTemplate".
# * Rename the pipeline stage IDs, notifications and trigger names to be more
# meaningful. Enumerated stage IDs are ultimately a detriment for long-term
# maintenance of your template.
# * Best intentions are made to order configuration, but the list of stages
# themselves are not ordered: Rearrange the stages so that they're roughly
# chronological.
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Copyright 2017 Netflix, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.netflix.spinnaker.orca.pipelinetemplate.v1schema.converter

import groovy.json.JsonSlurper
import spock.lang.Specification

class PipelineTemplateConverterSpec extends Specification {

def "should convert a pipeline to an ordered pipeline template yaml document"() {
given:
def pipeline = new JsonSlurper().parse(new File("src/test/resources/convertedPipelineTemplateSource.json"))

and:
String expected = new File("src/test/resources/convertedPipelineTemplate.yml").text

when:
String result = new PipelineTemplateConverter().convertToPipelineTemplate(pipeline)

then:
expected == result
}
}
Loading