diff --git a/Jenkinsfile b/Jenkinsfile index 0b2e1cb123..59b716eb68 100644 --- a/Jenkinsfile +++ b/Jenkinsfile @@ -108,7 +108,7 @@ pipeline { stage ('2: run second test suite') { steps { catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') { - sh script: "make -j$NPROC kind/retest TESTS=[bulk-deployment,gitlab,github,bitbucket,python,node-mongodb,elasticsearch,image-cache] BRANCH_NAME=${SAFEBRANCH_NAME}", label: "Running second test suite on kind cluster" + sh script: "make -j$NPROC kind/retest TESTS=[bulk-deployment,gitlab,github,bitbucket,python,node-mongodb,elasticsearch,image-cache,workflows] BRANCH_NAME=${SAFEBRANCH_NAME}", label: "Running second test suite on kind cluster" } sh script: "pkill -f './local-dev/stern'", label: "Closing off test-suite-2 log after test completion" } diff --git a/Makefile b/Makefile index 45b845543c..00fa03d426 100644 --- a/Makefile +++ b/Makefile @@ -202,7 +202,8 @@ services := api \ storage-calculator \ ui \ webhook-handler \ - webhooks2tasks + webhooks2tasks \ + workflows service-images += $(services) @@ -246,6 +247,7 @@ build/local-api-data-watcher-pusher: local-dev/api-data-watcher-pusher/Dockerfil build/local-registry: local-dev/registry/Dockerfile build/local-dbaas-provider: local-dev/dbaas-provider/Dockerfile build/local-mongodb-dbaas-provider: local-dev/mongodb-dbaas-provider/Dockerfile +build/workflows: services/workflows/Dockerfile # Images for local helpers that exist in another folder than the service images localdevimages := local-git \ @@ -511,9 +513,9 @@ KIND_VERSION = v0.12.0 GOJQ_VERSION = v0.12.5 STERN_VERSION = 2.1.17 CHART_TESTING_VERSION = v3.4.0 -KIND_IMAGE = kindest/node:v1.21.10@sha256:84709f09756ba4f863769bdcabe5edafc2ada72d3c8c44d6515fc581b66b029c -TESTS = [nginx,api,features-kubernetes,bulk-deployment,features-kubernetes-2,features-api-variables,active-standby-kubernetes,tasks,drush,drupal-php80,drupal-postgres,python,gitlab,github,bitbucket,node-mongodb,elasticsearch] -CHARTS_TREEISH = main +KIND_IMAGE = kindest/node:v1.21.1@sha256:69860bda5563ac81e3c0057d654b5253219618a22ec3a346306239bba8cfa1a6 +TESTS = [nginx,api,features-kubernetes,bulk-deployment,features-kubernetes-2,features-api-variables,active-standby-kubernetes,tasks,drush,drupal-php80,drupal-postgres,python,gitlab,github,bitbucket,node-mongodb,elasticsearch,workflows] +CHARTS_TREEISH = "feature/workflows" # Symlink the installed kubectl client if the correct version is already # installed, otherwise downloads it. @@ -632,7 +634,7 @@ ifeq ($(ARCH), darwin) tcp-listen:32080,fork,reuseaddr tcp-connect:target:32080 endif -KIND_SERVICES = api api-db api-redis auth-server actions-handler broker controllerhandler docker-host drush-alias keycloak keycloak-db logs2s3 webhook-handler webhooks2tasks kubectl-build-deploy-dind local-api-data-watcher-pusher local-git ssh tests ui +KIND_SERVICES = api api-db api-redis auth-server actions-handler broker controllerhandler docker-host drush-alias keycloak keycloak-db logs2s3 webhook-handler webhooks2tasks kubectl-build-deploy-dind local-api-data-watcher-pusher local-git ssh tests ui workflows KIND_TESTS = local-api-data-watcher-pusher local-git tests KIND_TOOLS = kind helm kubectl jq stern @@ -773,8 +775,8 @@ kind/retest: && export IMAGE_REGISTRY="registry.$$(./local-dev/kubectl get nodes -o jsonpath='{.items[0].status.addresses[0].address}').nip.io:32080/library" \ && cd lagoon-charts.kind.lagoon \ && $(MAKE) fill-test-ci-values TESTS=$(TESTS) IMAGE_TAG=$(SAFE_BRANCH_NAME) \ - HELM=$$(cd .. && realpath ./local-dev/helm) KUBECTL=$$(cd .. && realpath ./local-dev/kubectl) \ - JQ=$$(cd .. && realpath ./local-dev/jq) \ + HELM=$$(realpath ../local-dev/helm) KUBECTL=$$(realpath ../local-dev/kubectl) \ + JQ=$$(realpath ../local-dev/jq) \ OVERRIDE_BUILD_DEPLOY_DIND_IMAGE=$$IMAGE_REGISTRY/kubectl-build-deploy-dind:$(SAFE_BRANCH_NAME) \ IMAGE_REGISTRY=$$IMAGE_REGISTRY \ SKIP_ALL_DEPS=true \ @@ -784,7 +786,7 @@ kind/retest: && docker run --rm --network host --name ct-$(CI_BUILD_TAG) \ --volume "$$(pwd)/test-suite-run.ct.yaml:/etc/ct/ct.yaml" \ --volume "$$(pwd):/workdir" \ - --volume "$$(cd .. && realpath ./kubeconfig.kind.$(CI_BUILD_TAG)):/root/.kube/config" \ + --volume "$$(realpath ../kubeconfig.kind.$(CI_BUILD_TAG)):/root/.kube/config" \ --workdir /workdir \ "quay.io/helmpack/chart-testing:$(CHART_TESTING_VERSION)" \ ct install diff --git a/services/api-db/docker-entrypoint-initdb.d/00-tables.sql b/services/api-db/docker-entrypoint-initdb.d/00-tables.sql index c28d50de9d..6f5788a069 100644 --- a/services/api-db/docker-entrypoint-initdb.d/00-tables.sql +++ b/services/api-db/docker-entrypoint-initdb.d/00-tables.sql @@ -307,3 +307,14 @@ CREATE TABLE IF NOT EXISTS notification_webhook ( name varchar(50) UNIQUE, webhook varchar(2000) ); + + +CREATE TABLE IF NOT EXISTS workflow ( + id int NOT NULL auto_increment PRIMARY KEY, + name varchar(50) NOT NULL, + event varchar(300) NOT NULL, + project int NOT NULL REFERENCES project(id), + advanced_task_definition int NOT NULL REFERENCES advanced_task_definition(id), + created timestamp NOT NULL DEFAULT CURRENT_TIMESTAMP, + deleted timestamp NOT NULL DEFAULT '0000-00-00 00:00:00' +); diff --git a/services/api/src/models/workflows.ts b/services/api/src/models/workflows.ts new file mode 100644 index 0000000000..434f661d12 --- /dev/null +++ b/services/api/src/models/workflows.ts @@ -0,0 +1,17 @@ +import { Pool } from 'mariadb'; + + +export interface WorkflowInterface { + id?: number + name: string + project: number + event: string + advancedTaskDefinition: number +} + +export interface WorkflowInputInterface { + name: string + project: number + event: string + advanced_task_definition: number +} diff --git a/services/api/src/resolvers.js b/services/api/src/resolvers.js index dfef114122..7f27b33ebc 100644 --- a/services/api/src/resolvers.js +++ b/services/api/src/resolvers.js @@ -72,6 +72,7 @@ const { const { addAdvancedTaskDefinition, + updateAdvancedTaskDefinition, advancedTaskDefinitionById, resolveTasksForEnvironment, getRegisteredTasksByEnvironmentId, @@ -233,6 +234,15 @@ const { deleteEnvVariable, } = require('./resources/env-variables/resolvers'); +const { + addWorkflow, + updateWorkflow, + deleteWorkflow, + resolveWorkflowsForEnvironment, + getWorkflowsByEnvironmentId, + resolveAdvancedTaskDefinitionsForWorkflow, +} = require("./resources/workflow/resolvers"); + const resolvers = { Upload: GraphQLUpload, GroupRole: { @@ -344,6 +354,7 @@ const resolvers = { facts: getFactsByEnvironmentId, openshift: getOpenshiftByEnvironmentId, kubernetes: getOpenshiftByEnvironmentId, + workflows: getWorkflowsByEnvironmentId, }, Fact: { references: getFactReferencesByFactId, @@ -402,6 +413,9 @@ const resolvers = { Restore: { restoreLocation: getRestoreLocation, }, + Workflow: { + advancedTaskDefinition: resolveAdvancedTaskDefinitionsForWorkflow, + }, Query: { me: getMe, lagoonVersion: getLagoonVersion, @@ -434,6 +448,7 @@ const resolvers = { allProblemHarborScanMatchers: getProblemHarborScanMatches, projectsByMetadata: getProjectsByMetadata, projectsByFactSearch: getProjectsByFactSearch, + workflowsForEnvironment: resolveWorkflowsForEnvironment, deployTargetConfigById: getDeployTargetConfigById, deployTargetConfigsByProjectId: getDeployTargetConfigsByProjectId, deployTargetConfigsByDeployTarget: getDeployTargetConfigsByDeployTarget, @@ -517,6 +532,7 @@ const resolvers = { deleteEnvVariable, addTask, addAdvancedTaskDefinition, + updateAdvancedTaskDefinition, deleteAdvancedTaskDefinition, invokeRegisteredTask, taskDrushArchiveDump, @@ -544,6 +560,9 @@ const resolvers = { removeUserFromGroup, addGroupsToProject, removeGroupsFromProject, + addWorkflow, + updateWorkflow, + deleteWorkflow, addDeployTargetConfig, deleteDeployTargetConfig, updateDeployTargetConfig, diff --git a/services/api/src/resources/task/models/advancedTaskDefinitionArgument.ts b/services/api/src/resources/task/models/advancedTaskDefinitionArgument.ts index a9f3b488ce..4e125ac82c 100644 --- a/services/api/src/resources/task/models/advancedTaskDefinitionArgument.ts +++ b/services/api/src/resources/task/models/advancedTaskDefinitionArgument.ts @@ -1,14 +1,4 @@ - - -// class AdvancedTaskDefinitionArgument { -// constructor - -import { sqlClientPool } from "../../../clients/sqlClient"; -import { advancedTaskDefinitionArgumentById } from "../task_definition_resolvers"; -import { Helpers as environmentHelpers } from '../../environment/helpers'; -import { Helpers as projectHelpers } from '../../project/helpers'; -import { Helpers } from "../../project/helpers"; -import { isPatchEmpty, query, knex } from '../../../util/db'; +import { query } from '../../../util/db'; import * as R from 'ramda'; export class ArgumentBase { @@ -117,4 +107,4 @@ export const advancedTaskDefinitionTypeFactory = (sqlClientPool, task, environme throw new Error(`Unable to find AdvancedTaskDefinitionType ${name}`); break; } -} \ No newline at end of file +} diff --git a/services/api/src/resources/task/models/taskRegistration.ts b/services/api/src/resources/task/models/taskRegistration.ts index aad75bf00d..87ac27c9f9 100644 --- a/services/api/src/resources/task/models/taskRegistration.ts +++ b/services/api/src/resources/task/models/taskRegistration.ts @@ -23,3 +23,50 @@ export class TaskRegistration { image: string; permission: string; } + +export interface AdvancedTaskDefinitionInterface { + id?: number; + type: string; + name: string; + description?: string; + environment?: number; + project?: number; + groupName?: string; + created: string; + deleted: string; + permission?: string; + command?: string; + service?: string; + image?: string; + advancedTaskDefinitionArguments?: Partial; +} + + +export const AdvancedTaskDefinitionType = { + command: 'COMMAND', + image: 'IMAGE' +}; + +export interface AdvancedTaskDefinitionArguments { + name?: string; + type?: string; + range?: string; + advancedTaskDefinition?: number; +}; + +export const getAdvancedTaskDefinitionType = (taskDef:AdvancedTaskDefinitionInterface) => { + if(taskDef.type.toLowerCase() == AdvancedTaskDefinitionType.command.toLowerCase()) { + return AdvancedTaskDefinitionType.command; + } + return AdvancedTaskDefinitionType.image; + } + +export const isAdvancedTaskDefinitionSystemLevelTask = (taskDef:AdvancedTaskDefinitionInterface): boolean => { + return taskDef.project == null && taskDef.environment == null && taskDef.groupName == null; +} + +export const doesAdvancedTaskDefinitionNeedAdminRights = (taskDef:AdvancedTaskDefinitionInterface): boolean => { + return isAdvancedTaskDefinitionSystemLevelTask(taskDef) + || getAdvancedTaskDefinitionType(taskDef) == AdvancedTaskDefinitionType.image + || taskDef.groupName != undefined; +} diff --git a/services/api/src/resources/task/sql.ts b/services/api/src/resources/task/sql.ts index a573873fe3..42bb860acd 100644 --- a/services/api/src/resources/task/sql.ts +++ b/services/api/src/resources/task/sql.ts @@ -129,6 +129,11 @@ export const Sql = { type }) .toString(), + updateAdvancedTaskDefinition: ({ id, patch }: { id: number; patch: { [key: string]: any } }) => + knex('advanced_task_definition') + .where('id', id) + .update(patch) + .toString(), selectAdvancedTaskDefinitionEnvironmentLinkById: (id: number) => knex('task_registration') .where('task_registration.id', '=', id) @@ -160,6 +165,11 @@ export const Sql = { knex('advanced_task_definition_argument') .where('advanced_task_definition_argument.id', '=', id) .toString(), + deleteAdvancedTaskDefinitionArgumentByTaskDef:(advanced_task_definition: number) => + knex('advanced_task_definition_argument') + .where('advanced_task_definition_argument.advanced_task_definition', '=', advanced_task_definition) + .del() + .toString(), selectAdvancedTaskDefinitionByName:(name: string) => knex('advanced_task_definition') .where('advanced_task_definition.name', '=', name) @@ -194,12 +204,12 @@ export const Sql = { .where('group_name', 'in', groups) .toString(), deleteAdvancedTaskDefinition:(id: number) => - knex('advanced_task_definition') - .where('id', id) - .del() - .toString(), + knex('advanced_task_definition') + .where('id', id) + .del() + .toString(), deleteAdvancedTaskDefinitionArgumentsForTask:(taskId: number) => knex('advanced_task_definition_argument') - .where('advanced_task_definition', taskId) - .del() - .toString() + .where('advanced_task_definition', taskId) + .del() + .toString(), }; diff --git a/services/api/src/resources/task/task_definition_resolvers.ts b/services/api/src/resources/task/task_definition_resolvers.ts index 21e125135b..1258511e6b 100644 --- a/services/api/src/resources/task/task_definition_resolvers.ts +++ b/services/api/src/resources/task/task_definition_resolvers.ts @@ -1,5 +1,5 @@ import * as R from 'ramda'; -import { query } from '../../util/db'; +import { query, isPatchEmpty } from '../../util/db'; import { Sql } from './sql'; import { Helpers } from './helpers'; import { Helpers as environmentHelpers } from '../environment/helpers'; @@ -7,7 +7,11 @@ import { Helpers as projectHelpers } from '../project/helpers'; import { Validators as envValidators } from '../environment/validators'; import { TaskRegistration, - newTaskRegistrationFromObject + newTaskRegistrationFromObject, + AdvancedTaskDefinitionInterface, + AdvancedTaskDefinitionType, + isAdvancedTaskDefinitionSystemLevelTask, + getAdvancedTaskDefinitionType } from './models/taskRegistration'; import * as advancedTaskArgument from './models/advancedTaskDefinitionArgument' import sql from '../user/sql'; @@ -15,11 +19,6 @@ import convertDateToMYSQLDateTimeFormat from '../../util/convertDateToMYSQLDateT import * as advancedTaskToolbox from './advancedtasktoolbox'; import { IKeycloakAuthAttributes, KeycloakUnauthorizedError } from '../../util/auth'; -const AdvancedTaskDefinitionType = { - command: 'COMMAND', - image: 'IMAGE' -}; - enum AdvancedTaskDefinitionTarget { Group, Project, @@ -217,11 +216,10 @@ export const addAdvancedTaskDefinition = async ( { input }, - { sqlClientPool, hasPermission, models } + { sqlClientPool, hasPermission, models, userActivityLogger } ) => { - - let { + const { name, description, image = '', @@ -246,59 +244,9 @@ export const addAdvancedTaskDefinition = async ( project ); - const systemLevelTask = - project == null && environment == null && groupName == null; - const advancedTaskWithImage = type == AdvancedTaskDefinitionType.image; - const needsAdminRightsToCreate = - systemLevelTask || advancedTaskWithImage || groupName; - - if (systemLevelTask) { - //if they pass this, they can do basically anything - //In the first release, we're not actually supporting this - //TODO: add checks once images are officially supported - for now, throw an error - throw Error('Adding Images and System Wide Tasks are not yet supported'); - } else if (advancedTaskWithImage) { - //We're only going to allow administrators to add these for now ... - await hasPermission('advanced_task','create:advanced'); - } else if (groupName) { - const group = await models.GroupModel.loadGroupByIdOrName({ - name: groupName - }); - await hasPermission('group', 'update', { - group: group.id - }); - } else if (projectObj) { - //does the user have permission to actually add to this? - //i.e. are they a maintainer? - - await hasPermission('task', `add:development`, { - project: projectObj.id - }); - } + await checkAdvancedTaskPermissions(input, hasPermission, models, projectObj); - // There are two cases, either it's a command, in which case the command + service needs to be part of the definition - // or it's a legit advanced task and we need an image. - - switch (type) { - case AdvancedTaskDefinitionType.image: - if (!image || 0 === image.length) { - throw new Error( - 'Unable to create image based task with no image supplied' - ); - } - break; - case AdvancedTaskDefinitionType.command: - if (!command || 0 === command.length) { - throw new Error('Unable to create Advanced task definition'); - } - break; - default: - throw new Error( - 'Undefined Advanced Task Definition type passed at creation time: ' + - type - ); - break; - } + validateAdvancedTaskDefinitionData(input, image, command, type); //let's see if there's already an advanced task definition with this name ... // Note: this will all be scoped to either System, group, project, or environment @@ -315,7 +263,6 @@ export const addAdvancedTaskDefinition = async ( let taskDef = R.prop(0, rows); if (taskDef) { - // At this point, `taskDefMatchedIncoming` will indicate // whether the incoming details for a similarly named // task _scoped to the system/group/project/environment_ @@ -355,7 +302,7 @@ export const addAdvancedTaskDefinition = async ( description, image, command, - created: null, + created, type, service, project, @@ -380,11 +327,118 @@ export const addAdvancedTaskDefinition = async ( } } + userActivityLogger(`User added advanced task definition '${name}'`, { + project: project, + event: 'api:updateTaskDefinition', + payload: { + taskDef: insertId + } + }); + return await atb.advancedTaskDefinitionById( insertId ); }; +export const updateAdvancedTaskDefinition = async ( + root, + { + input: { + id, + patch, + patch: { + name, + description, + image = '', + type, + service, + command, + project, + groupName, + environment, + permission, + advancedTaskDefinitionArguments, + created, + deleted + } + } + }, + { sqlClientPool, hasPermission, models, userActivityLogger } +) => { + if (isPatchEmpty({ patch })) { + throw new Error('Input patch requires at least 1 attribute'); + } + + let projectObj = await getProjectByEnvironmentIdOrProjectId( + sqlClientPool, + environment, + project + ); + + + await checkAdvancedTaskPermissions(patch, hasPermission, models, projectObj); + + validateAdvancedTaskDefinitionData(patch, image, command, type); + + await query( + sqlClientPool, + Sql.updateAdvancedTaskDefinition({ + id, + patch: { + name, + description, + image, + command, + created, + deleted, + type, + service, + project, + environment, + group_name: groupName, + permission, + } + }) + ); + + try { + if (advancedTaskDefinitionArguments) { + //remove current arguments from task defintion before we add new ones + await query( + sqlClientPool, + Sql.deleteAdvancedTaskDefinitionArgumentByTaskDef(id) + ); + + //add advanced task definition arguments + for(let i = 0; i < advancedTaskDefinitionArguments.length; i++) { + await query( + sqlClientPool, + Sql.insertAdvancedTaskDefinitionArgument({ + id: null, + advanced_task_definition: id, + name: advancedTaskDefinitionArguments[i].name, + type: advancedTaskDefinitionArguments[i].type + }) + ); + } + } + + userActivityLogger(`User updated advanced task definition '${id}'`, { + project: project, + event: 'api:updateTaskDefinition', + payload: { + taskDef: id + } + }); + + const atf = advancedTaskToolbox.advancedTaskFunctions(sqlClientPool, models, hasPermission); + return await atf.advancedTaskDefinitionById(id); + } catch (error) { + throw error + } +} + + const getProjectByEnvironmentIdOrProjectId = async ( sqlClientPool, environment, @@ -457,64 +511,63 @@ export const invokeRegisteredTask = async ( }); switch (task.type) { - case TaskRegistration.TYPE_STANDARD: - - let taskCommandEnvs = ''; - - let taskCommand = ""; + case TaskRegistration.TYPE_STANDARD: - if(argumentValues && argumentValues.length > 0) { - taskCommandEnvs = R.reduce((acc, val) => { - //@ts-ignore - return `${acc} ${val.advancedTaskDefinitionArgumentName}="${val.value}"` - }, taskCommandEnvs, argumentValues); + let taskCommandEnvs = ''; + let taskCommand = ""; - taskCommand += `${taskCommandEnvs}; `; - } - - taskCommand += `${task.command}`; + if(argumentValues && argumentValues.length > 0) { + taskCommandEnvs = R.reduce((acc, val) => { + //@ts-ignore + return `${acc} ${val.advancedTaskDefinitionArgumentName}="${val.value}"` + }, taskCommandEnvs, argumentValues); - const taskData = await Helpers(sqlClientPool).addTask({ - name: task.name, - environment: environment, - service: task.service, - command: taskCommand, - execute: true - }); - return taskData; - break; - case TaskRegistration.TYPE_ADVANCED: - // the return data here is basically what gets dropped into the DB. - - // get any arguments ready for payload - let payload = {}; - if(argumentValues) { - for(let i = 0; i < argumentValues.length; i++) { - //@ts-ignore - payload[argumentValues[i].advancedTaskDefinitionArgumentName] = argumentValues[i].value; + taskCommand += `${taskCommandEnvs}; `; } - } + taskCommand += `${task.command}`; + + const taskData = await Helpers(sqlClientPool).addTask({ + name: task.name, + environment: environment, + service: task.service, + command: taskCommand, + execute: true + }); + return taskData; + break; + case TaskRegistration.TYPE_ADVANCED: + // the return data here is basically what gets dropped into the DB. + + // get any arguments ready for payload + let payload = {}; + if(argumentValues) { + for(let i = 0; i < argumentValues.length; i++) { + //@ts-ignore + payload[argumentValues[i].advancedTaskDefinitionArgumentName] = argumentValues[i].value; + } + } - const advancedTaskData = await Helpers(sqlClientPool).addAdvancedTask({ - name: task.name, - created: undefined, - started: undefined, - completed: undefined, - environment, - service: task.service || 'cli', - image: task.image, //the return data here is basically what gets dropped into the DB. - payload: payload, - remoteId: undefined, - execute: true - }); - return advancedTaskData; - break; - default: - throw new Error('Cannot find matching task'); - break; - } + const advancedTaskData = await Helpers(sqlClientPool).addAdvancedTask({ + name: task.name, + created: undefined, + started: undefined, + completed: undefined, + environment, + service: task.service || 'cli', + image: task.image, //the return data here is basically what gets dropped into the DB. + payload: payload, + remoteId: undefined, + execute: true + }); + + return advancedTaskData; + break; + default: + throw new Error('Cannot find matching task'); + break; + } }; const getNamedAdvancedTaskForEnvironment = async ( @@ -523,7 +576,7 @@ const getNamedAdvancedTaskForEnvironment = async ( advancedTaskDefinition, environment, models -) => { +):Promise => { let rows = await resolveTasksForEnvironment( {}, { environment }, @@ -536,9 +589,11 @@ const getNamedAdvancedTaskForEnvironment = async ( `Task registration '${advancedTaskDefinition}' could not be found.` ); } - return newTaskRegistrationFromObject(taskDef); + //@ts-ignore + return taskDef; }; + export const deleteAdvancedTaskDefinition = async ( root, { advancedTaskDefinition }, @@ -616,3 +671,74 @@ const getAdvancedTaskTarget = advancedTask => { } }; +// const advancedTaskFunctions = sqlClientPool => { +// return { +// advancedTaskDefinitionById: async function(id) { +// const rows = await query( +// sqlClientPool, +// Sql.selectAdvancedTaskDefinition(id) +// ); +// let taskDef = R.prop(0, rows); +// taskDef.advancedTaskDefinitionArguments = await this.advancedTaskDefinitionArguments( +// taskDef.id +// ); +// return taskDef; +// }, +// advancedTaskDefinitionArguments: async function(task_definition_id) { +// const rows = await query( +// sqlClientPool, +// Sql.selectAdvancedTaskDefinitionArguments(task_definition_id) +// ); +// let taskDefArgs = rows; +// return taskDefArgs; +// } +// }; +// }; + +function validateAdvancedTaskDefinitionData(input: any, image: any, command: any, type: any) { + switch (getAdvancedTaskDefinitionType(input)) { + case AdvancedTaskDefinitionType.image: + if (!image || 0 === image.length) { + throw new Error( + 'Unable to create image based task with no image supplied' + ); + } + break; + case AdvancedTaskDefinitionType.command: + if (!command || 0 === command.length) { + throw new Error('Unable to create Advanced task definition'); + } + break; + default: + throw new Error( + 'Undefined Advanced Task Definition type passed at creation time: ' + + type + ); + break; + } +} + +async function checkAdvancedTaskPermissions(input:AdvancedTaskDefinitionInterface, hasPermission: any, models: any, projectObj: any) { + if (isAdvancedTaskDefinitionSystemLevelTask(input)) { + //if they pass this, they can do basically anything + //In the first release, we're not actually supporting this + //TODO: add checks once images are officially supported - for now, throw an error + throw Error('Adding Images and System Wide Tasks are not yet supported'); + } else if (getAdvancedTaskDefinitionType(input) == AdvancedTaskDefinitionType.image) { + //We're only going to allow administrators to add these for now ... + await hasPermission('advanced_task', 'create:advanced'); + } else if (input.groupName) { + const group = await models.GroupModel.loadGroupByIdOrName({ + name: input.groupName + }); + await hasPermission('group', 'update', { + group: group.id + }); + } else if (projectObj) { + //does the user have permission to actually add to this? + //i.e. are they a maintainer? + await hasPermission('task', `add:production`, { + project: projectObj.id + }); + } +} diff --git a/services/api/src/resources/workflow/resolvers.ts b/services/api/src/resources/workflow/resolvers.ts new file mode 100644 index 0000000000..6d1ae17bf1 --- /dev/null +++ b/services/api/src/resources/workflow/resolvers.ts @@ -0,0 +1,180 @@ +import { WorkflowInterface, WorkflowInputInterface } from "../../models/workflows"; +import Sql from "./sql"; +import { query, isPatchEmpty } from '../../util/db'; +import { ResolverFn } from '../'; +import { Helpers as projectHelpers } from '../project/helpers'; + +// Here we abstract permissions in case we want to change the underlying functionality later +// TODO: Question - do we want to handle the failure of perms checks _any other way_ +// than simply throwing exceptions? +class WorkflowPermissionHandler { + hasPermissions: any; + constructor(hasPermissions) { + this.hasPermissions = hasPermissions; + } + async canCreateWorkflow() { + return await this.hasPermissions('advanced_task', 'create:advanced'); + } + async canViewWorkflowForProject(projectId: number) { + return true; + } + async canDeleteWorkflow() { + return await this.hasPermissions('advanced_task', 'delete:advanced'); + } +} + + +export const getWorkflowsByEnvironmentId = async ( +{ id }, +{}, +extras +) => { + return await resolveWorkflowsForEnvironment({}, {environment: id}, extras); +} + +export const addWorkflow: ResolverFn = async ( + root, + { input }, + { sqlClientPool, hasPermission, models, userActivityLogger } +) => { + const perms = new WorkflowPermissionHandler(hasPermission); + + perms.canCreateWorkflow(); + + const { insertId } = await query( + sqlClientPool, + Sql.insertWorkflow(input) + ); + + let workflowObj = await query( + sqlClientPool, + Sql.selectWorkflowById(insertId) + ); + + userActivityLogger(`User added a workflow '${insertId}'`, { + project: input.project || '', + event: 'api:addWorkflow', + payload: { + data: { + name: input.name, + event: input.event, + project: input.project, + advanced_task_definition: input.advancedTaskDefinition + } + } + }); + + return workflowObj[0]; +} + +export const updateWorkflow: ResolverFn = async ( + root, + { + input: { + id, + patch, + patch: { + name, + event, + project, + advanced_task_definition: advancedTaskDefinition, + } + } + }: { input: { id: number, patch: WorkflowInputInterface } }, + { sqlClientPool, hasPermission, models, userActivityLogger } +) => { + if (isPatchEmpty({ patch })) { + throw new Error('Input patch requires at least 1 attribute'); + } + + const perms = new WorkflowPermissionHandler(hasPermission); + perms.canCreateWorkflow(); + + await query( + sqlClientPool, + Sql.updateWorkflow({ + id, + patch: { + name, + event, + project, + advanced_task_definition: advancedTaskDefinition, + } + }) + ); + + let workflowObj = await query( + sqlClientPool, + Sql.selectWorkflowById(id) + ); + + userActivityLogger(`User updated a workflow '${id}'`, { + project: project || '', + event: 'api:updateWorkflow', + payload: { + id: id, + patch: { + name, + event, + project, + advanced_task_definition: advancedTaskDefinition + } + } + }); + + return workflowObj[0]; +} + +export const deleteWorkflow: ResolverFn = async ( + root, + { input: { id } }, + { sqlClientPool, hasPermission, models, userActivityLogger } +) => { + const perms = new WorkflowPermissionHandler(hasPermission); + perms.canDeleteWorkflow(); + + let workflowObj = await query( + sqlClientPool, + Sql.selectWorkflowById(id) + ); + + if (workflowObj[0] == "undefined" || workflowObj[0] == null) { + throw new Error('Workflow not found'); + } + + try { + await query( + sqlClientPool, + Sql.deleteWorkflow(id) + ); + + return `successfully deleted workflow ${id}`; + } catch (error) { + return `failed to delete workflow: ${error}`; + } +} + +export const resolveAdvancedTaskDefinitionsForWorkflow = async(root, parameters, meta) => { + const { id: workflowId } = root; + const { sqlClientPool, hasPermission, models } = meta; + const perms = new WorkflowPermissionHandler(hasPermission); + let tasks = await query(sqlClientPool, Sql.selectTaskForWorkflow(workflowId)); + return tasks[0]; +} + +export const resolveWorkflowsForEnvironment = async ( + root, + { environment }, + { sqlClientPool, hasPermission, models } + ) => { + + + let project = await projectHelpers(sqlClientPool).getProjectByEnvironmentId(environment); + (new WorkflowPermissionHandler(hasPermission)).canViewWorkflowForProject(project.projectId); + let workflowObjs = await query( + sqlClientPool, + Sql.selectWorkflowsForProject(project.projectId) + ); + + return workflowObjs; + } diff --git a/services/api/src/resources/workflow/sql.ts b/services/api/src/resources/workflow/sql.ts new file mode 100644 index 0000000000..8b8f53f569 --- /dev/null +++ b/services/api/src/resources/workflow/sql.ts @@ -0,0 +1,36 @@ +import { WorkflowInterface, WorkflowInputInterface } from '../../models/workflows'; +import { knex } from '../../util/db'; + +export default { + insertWorkflow: (workflow: WorkflowInterface): string => { + return knex("workflow").insert({ + name: workflow.name, + event: workflow.event, + advanced_task_definition: workflow.advancedTaskDefinition, + project: workflow.project + }).toString(); + }, + updateWorkflow: ({ id, patch }: { id: number; patch: WorkflowInputInterface }): string => { + return knex("workflow") + .where('id', id) + .update(patch) + .toString(); + }, + deleteWorkflow: (id: number) => + knex('workflow') + .where('id', id) + .del() + .toString(), + saveWorkflowJobs: (workflow: WorkflowInterface) => { + + }, + selectWorkflowById: (id: number): string => { + return knex("workflow").select("*").where("id",id).toString(); + }, + selectWorkflowsForProject: (project: number): string => { + return knex("workflow").select("*").where("project", project).toString(); + }, + selectTaskForWorkflow: (id: number): string => { + return knex("advanced_task_definition").join('workflow', 'advanced_task_definition.id', '=', 'workflow.advanced_task_definition').select('advanced_task_definition.*').where('workflow.id', id).toString(); + } +}; diff --git a/services/api/src/typeDefs.js b/services/api/src/typeDefs.js index f735677755..cbd6452c7b 100644 --- a/services/api/src/typeDefs.js +++ b/services/api/src/typeDefs.js @@ -180,6 +180,39 @@ const typeDefs = gql` deleted: String } + + type Workflow { + id: Int + name: String + event: String + project: Int + advancedTaskDefinition: AdvancedTaskDefinition + } + + input AddWorkflowInput { + name: String + event: String + project: Int + advancedTaskDefinition: Int + } + + input DeleteWorkflowInput { + id: Int! + } + + input UpdateWorkflowPatchInput { + name: String + event: String + project: Int + advancedTaskDefinition: Int + } + + input UpdateWorkflowInput { + id: Int! + patch: UpdateWorkflowPatchInput! + } + + type Problem { id: Int environment: Environment @@ -799,6 +832,7 @@ const typeDefs = gql` openshiftProjectPattern: String kubernetes: Kubernetes kubernetesNamespacePattern: String + workflows: [Workflow] } type EnvironmentHitsMonth { @@ -1086,6 +1120,12 @@ const typeDefs = gql` Returns a AdvancedTaskDefinitionArgument by Id """ advancedTaskDefinitionArgumentById(id: Int!) : [AdvancedTaskDefinitionArgument] + + """ + Returns all Workflows for an environment + """ + workflowsForEnvironment(environment: Int!) : [Workflow] + """ Returns the DeployTargetConfig by a deployTargetConfig Id """ @@ -1327,6 +1367,25 @@ const typeDefs = gql` advancedTaskDefinitionArguments: [AdvancedTaskDefinitionArgumentInput] } + input UpdateAdvancedTaskDefinitionInput { + id: Int! + patch: UpdateAdvancedTaskDefinitionPatchInput! + } + + input UpdateAdvancedTaskDefinitionPatchInput { + name: String + description: String + image: String + type: AdvancedTaskDefinitionTypes + service: String + command: String + environment: Int + project: Int + groupName: String + permission: TaskPermission + advancedTaskDefinitionArguments: [AdvancedTaskDefinitionArgumentInput] + } + input DeleteTaskInput { id: Int! } @@ -1901,8 +1960,12 @@ const typeDefs = gql` deleteEnvVariable(input: DeleteEnvVariableInput!): String addTask(input: TaskInput!): Task addAdvancedTaskDefinition(input: AdvancedTaskDefinitionInput!): AdvancedTaskDefinition + updateAdvancedTaskDefinition(input: UpdateAdvancedTaskDefinitionInput!): AdvancedTaskDefinition invokeRegisteredTask(advancedTaskDefinition: Int!, environment: Int!, argumentValues: [AdvancedTaskDefinitionArgumentValueInput]): Task deleteAdvancedTaskDefinition(advancedTaskDefinition: Int!): String + addWorkflow(input: AddWorkflowInput!): Workflow + updateWorkflow(input: UpdateWorkflowInput): Workflow + deleteWorkflow(input: DeleteWorkflowInput!): String taskDrushArchiveDump(environment: Int!): Task taskDrushSqlDump(environment: Int!): Task taskDrushCacheClear(environment: Int!): Task diff --git a/services/keycloak/start.sh b/services/keycloak/start.sh index bf1ae15bfc..56bbb7cb8c 100755 --- a/services/keycloak/start.sh +++ b/services/keycloak/start.sh @@ -153,7 +153,7 @@ function configure_api_client { PLATFORM_OWNER_ROLE_ID=$(/opt/jboss/keycloak/bin/kcadm.sh get -r lagoon roles/platform-owner --config $CONFIG_PATH | python -c 'import sys, json; print json.load(sys.stdin)["id"]') # Resource Scopes - resource_scope_names=(add add:development add:production addGroup addNoExec addNotification addOrUpdate:development addOrUpdate:production addUser delete delete:development delete:production deleteAll deleteNoExec deploy:development deploy:production drushArchiveDump:development drushArchiveDump:production drushCacheClear:development drushCacheClear:production drushRsync:destination:development drushRsync:destination:production drushRsync:source:development drushRsync:source:production drushSqlDump:development drushSqlDump:production drushSqlSync:destination:development drushSqlSync:destination:production drushSqlSync:source:development drushSqlSync:source:production environment:add:development environment:add:production environment:view:development environment:view:production getBySshKey invoke:guest invoke:developer invoke:maintainer create:advanced project:add project:view removeAll removeGroup removeNotification removeUser ssh:development ssh:production storage update update:development update:production view view:token view:user viewAll viewPrivateKey) + resource_scope_names=(add add:development add:production addGroup addNoExec addNotification addOrUpdate:development addOrUpdate:production addUser delete delete:development delete:production deleteAll deleteNoExec deploy:development deploy:production drushArchiveDump:development drushArchiveDump:production drushCacheClear:development drushCacheClear:production drushRsync:destination:development drushRsync:destination:production drushRsync:source:development drushRsync:source:production drushSqlDump:development drushSqlDump:production drushSqlSync:destination:development drushSqlSync:destination:production drushSqlSync:source:development drushSqlSync:source:production environment:add:development environment:add:production environment:view:development environment:view:production getBySshKey invoke:guest invoke:developer invoke:maintainer create:advanced delete:advanced project:add project:view removeAll removeGroup removeNotification removeUser ssh:development ssh:production storage update update:development update:production view view:token view:user viewAll viewPrivateKey) for rsn_key in ${!resource_scope_names[@]}; do echo Creating resource scope ${resource_scope_names[$rsn_key]} /opt/jboss/keycloak/bin/kcadm.sh create clients/$CLIENT_ID/authz/resource-server/scope --config $CONFIG_PATH -r ${KEYCLOAK_REALM:-master} -s name=${resource_scope_names[$rsn_key]} @@ -1600,7 +1600,7 @@ function configure_advanced_task_system { echo Creating resource fact - echo '{"name":"advanced_task","displayName":"advanced_task","scopes":[{"name":"invoke:guest"}, {"name":"invoke:developer"},{"name":"invoke:maintainer"}, {"name":"create:advanced"}],"attributes":{},"uris":[],"ownerManagedAccess":""}' | /opt/jboss/keycloak/bin/kcadm.sh create clients/$CLIENT_ID/authz/resource-server/resource --config $CONFIG_PATH -r ${KEYCLOAK_REALM:-master} -f - + echo '{"name":"advanced_task","displayName":"advanced_task","scopes":[{"name":"invoke:guest"}, {"name":"invoke:developer"},{"name":"invoke:maintainer"}, {"name":"create:advanced"}, {"name":"delete:advanced"}],"attributes":{},"uris":[],"ownerManagedAccess":""}' | /opt/jboss/keycloak/bin/kcadm.sh create clients/$CLIENT_ID/authz/resource-server/resource --config $CONFIG_PATH -r ${KEYCLOAK_REALM:-master} -f - # Create new permissions /opt/jboss/keycloak/bin/kcadm.sh create clients/$CLIENT_ID/authz/resource-server/permission/scope --config $CONFIG_PATH -r lagoon -f - < github.com/shreddedbacon/go-mq v0.0.0-20200419104937-b8e9af912ead + +replace github.com/NeowayLabs/wabbit v0.0.0-20200409220312-12e68ab5b0c6 => github.com/shreddedbacon/wabbit v0.0.0-20200419104837-5b7b769d7204 diff --git a/services/workflows/go.sum b/services/workflows/go.sum new file mode 100644 index 0000000000..647c7479a6 --- /dev/null +++ b/services/workflows/go.sum @@ -0,0 +1,292 @@ +bazil.org/fuse v0.0.0-20160811212531-371fbbdaa898/go.mod h1:Xbm+BRKSBEpa4q4hTSxohYNQpsxXPbPry4JJWOB3LB8= +cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= +github.com/99designs/gqlgen v0.13.0/go.mod h1:NV130r6f4tpRWuAI+zsrSdooO/eWUv+Gyyoi3rEfXIk= +github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78 h1:w+iIsaOQNcT7OZ575w+acHgRric5iCyQh+xv+KJ4HB8= +github.com/Azure/go-ansiterm v0.0.0-20170929234023-d6e3b3328b78/go.mod h1:LmzpDX56iTiv29bbRTIsUNlaFfuhWRQBWjQdVyAevI8= +github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= +github.com/Khan/genqlient v0.3.0 h1:G35N630mNCW+j0rqSJUsvNkPLoX0bjrllRMnaQTbCak= +github.com/Khan/genqlient v0.3.0/go.mod h1:o9QUG7O7GhCeB3C83scbUQtdp+tdErC8OkVbSxIw1g4= +github.com/Microsoft/go-winio v0.4.16-0.20201130162521-d1ffc52c7331/go.mod h1:XB6nPKklQyQ7GC9LdcBEcBl8PF76WugXOPRXwdLnMv0= +github.com/Microsoft/go-winio v0.5.0 h1:Elr9Wn+sGKPlkaBvwu4mTrxtmOp3F3yV9qhaHbXGjwU= +github.com/Microsoft/go-winio v0.5.0/go.mod h1:JPGBdM1cNvN/6ISo+n8V5iA4v8pBzdOpzfwIujj1a84= +github.com/Microsoft/hcsshim v0.8.14 h1:lbPVK25c1cu5xTLITwpUcxoA9vKrKErASPYygvouJns= +github.com/Microsoft/hcsshim v0.8.14/go.mod h1:NtVKoYxQuTLx6gEq0L96c9Ju4JbRJ4nY2ow3VK6a9Lg= +github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM= +github.com/agnivade/levenshtein v1.0.3 h1:M5ZnqLOoZR8ygVq0FfkXsNOKzMCk0xRiow0R5+5VkQ0= +github.com/agnivade/levenshtein v1.0.3/go.mod h1:4SFRZbbXWLF4MU1T9Qg0pGgH3Pjs+t6ie5efyrwRJXs= +github.com/alexflint/go-arg v1.4.2 h1:lDWZAXxpAnZUq4qwb86p/3rIJJ2Li81EoMbTMujhVa0= +github.com/alexflint/go-arg v1.4.2/go.mod h1:9iRbDxne7LcR/GSvEr7ma++GLpdIU1zrghf2y2768kM= +github.com/alexflint/go-scalar v1.0.0 h1:NGupf1XV/Xb04wXskDFzS0KWOLH632W/EO4fAFi+A70= +github.com/alexflint/go-scalar v1.0.0/go.mod h1:GpHzbCOZXEKMEcygYQ5n/aa4Aq84zbxjy3MxYW0gjYw= +github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8= +github.com/arbovm/levenshtein v0.0.0-20160628152529-48b4e1c0c4d0/go.mod h1:t2tdKJDJF9BV14lnkjHmOQgcvEKgtqs5a1N3LNdJhGE= +github.com/bradleyjkemp/cupaloy/v2 v2.6.0/go.mod h1:bm7JXdkRd4BHJk9HpwqAI8BoAY1lps46Enkdqw6aRX0= +github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927 h1:SKI1/fuSdodxmNNyVBR8d7X/HuLnRpvvFO0AgyQk764= +github.com/cheekybits/is v0.0.0-20150225183255-68e9c0620927/go.mod h1:h/aW8ynjgkuj+NQRlZcDbAbM1ORAbXjXX77sX7T289U= +github.com/cheshir/go-mq v1.2.0 h1:Vj83B1SLsfceWbs+uRXhVf6NAdeiyrIJhFiU7qkF56w= +github.com/cheshir/go-mq v1.2.0/go.mod h1:lAwW/xhfO27t6WSVHFtLdgYioymwJvQxMSH19z00/BY= +github.com/cilium/ebpf v0.0.0-20200110133405-4032b1d8aae3/go.mod h1:MA5e5Lr8slmEg9bt0VpxxWqJlO4iwu3FBdHUzV7wQVg= +github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59 h1:qWj4qVYZ95vLWwqyNJCQg7rDsG5wPdze0UaPolH7DUk= +github.com/containerd/cgroups v0.0.0-20200531161412-0dbf7f05ba59/go.mod h1:pA0z1pT8KYB3TCXK/ocprsh7MAkoW8bZVzPdih9snmM= +github.com/containerd/console v0.0.0-20180822173158-c12b1e7919c1/go.mod h1:Tj/on1eG8kiEhd0+fhSDzsPAFESxzBBvdyEgyryXffw= +github.com/containerd/containerd v1.3.2/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/containerd v1.4.3 h1:ijQT13JedHSHrQGWFcGEwzcNKrAGIiZ+jSD5QQG07SY= +github.com/containerd/containerd v1.4.3/go.mod h1:bC6axHOhabU15QhwfG7w5PipXdVtMXFTttgp+kVtyUA= +github.com/containerd/continuity v0.0.0-20190426062206-aaeac12a7ffc/go.mod h1:GL3xCUCBDV3CZiTSEKksMWbLE66hEyuu9qyDOOqM47Y= +github.com/containerd/continuity v0.0.0-20210208174643-50096c924a4e h1:6JKvHHt396/qabvMhnhUZvWaHZzfVfldxE60TK8YLhg= +github.com/containerd/continuity v0.0.0-20210208174643-50096c924a4e/go.mod h1:EXlVlkqNba9rJe3j7w3Xa924itAMLgZH4UD/Q4PExuQ= +github.com/containerd/fifo v0.0.0-20190226154929-a9fb20d87448/go.mod h1:ODA38xgv3Kuk8dQz2ZQXpnv/UZZUHUCL7pnLehbXgQI= +github.com/containerd/go-runc v0.0.0-20180907222934-5a6d9f37cfa3/go.mod h1:IV7qH3hrUgRmyYrtgEeGWJfWbgcHL9CSRruz2Vqcph0= +github.com/containerd/ttrpc v0.0.0-20190828154514-0e0f228740de/go.mod h1:PvCDdDGpgqzQIzDW1TphrGLssLDZp2GuS+X5DkEJB8o= +github.com/containerd/typeurl v0.0.0-20180627222232-a93fcdb778cd/go.mod h1:Cm3kwCdlkCfMSHURc+r6fwoGH6/F1hH3S4sg0rLFWPc= +github.com/coreos/go-systemd/v22 v22.0.0/go.mod h1:xO0FLkIi5MaZafQlIrOotqXZ90ih+1atmu1JpKERPPk= +github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU= +github.com/creack/pty v1.1.11 h1:07n33Z8lZxZ2qwegKbObQohDhXDQxiMMz1NOUGYlesw= +github.com/creack/pty v1.1.11/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= +github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM= +github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ= +github.com/dgryski/trifles v0.0.0-20190318185328-a8d75aae118c/go.mod h1:if7Fbed8SFyPtHLHbg49SI7NAdJiC5WIA09pe59rfAA= +github.com/docker/docker v20.10.7+incompatible h1:Z6O9Nhsjv+ayUEeI1IojKbYcsGdgYSNqxe1s2MYzUhQ= +github.com/docker/docker v20.10.7+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk= +github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ= +github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec= +github.com/docker/go-units v0.4.0 h1:3uh0PgVws3nIA0Q+MwDC8yjEPf9zjRfZZWXZYDct3Tw= +github.com/docker/go-units v0.4.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk= +github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk= +github.com/fsouza/go-dockerclient v1.7.3 h1:i6iMcktl688vsKUEExA6gU1UjPgIvmGtJeQ0mbuFqZo= +github.com/fsouza/go-dockerclient v1.7.3/go.mod h1:8xfZB8o9SptLNJ13VoV5pMiRbZGWkU/Omu5VOu/KC9Y= +github.com/go-chi/chi v3.3.2+incompatible/go.mod h1:eB3wogJHnLi3x/kFX2A+IbTBlXxmMeXJVKy9tTv1XzQ= +github.com/godbus/dbus/v5 v5.0.3/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA= +github.com/gogo/protobuf v1.0.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= +github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o= +github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q= +github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q= +github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= +github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= +github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/golang/protobuf v1.3.2 h1:6nsPYzhq5kReh6QImI3k5qWzO4PEbvbIW2cwSfR/6xs= +github.com/golang/protobuf v1.3.2/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U= +github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= +github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= +github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/go-cmp v0.5.6 h1:BKbKCqvP6I+rmFHt06ZmyQtvB8xAkWdhFyr0ZUNZcxQ= +github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE= +github.com/google/uuid v1.0.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/google/uuid v1.1.1 h1:Gkbcsh/GbpXz7lPftLA3P6TYMwjCLYm83jiFQZF/3gY= +github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= +github.com/gorilla/context v0.0.0-20160226214623-1ea25387ff6f/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= +github.com/gorilla/mux v1.6.1/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= +github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So= +github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= +github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/hashicorp/golang-lru v0.5.1 h1:0hERBMJE1eitiLkihrMvRVBYAkpHzc/J3QdDN+dAcgU= +github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= +github.com/inconshreveable/mousetrap v1.0.0/go.mod h1:PxqpIevigyE2G7u3NXJIT2ANytuPF1OarO4DADm73n8= +github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00= +github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8= +github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= +github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= +github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo= +github.com/kr/pretty v0.2.1 h1:Fmg33tUaq4/8ym9TJN1x7sLJnHVwhP33CNkpYV/7rwI= +github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI= +github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= +github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE= +github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= +github.com/logrusorgru/aurora v0.0.0-20200102142835-e9ef32dff381/go.mod h1:7rIyQOR62GCctdiQpZ/zOJlFyk6y+94wXzv6RNZgaR4= +github.com/machinebox/graphql v0.2.3-0.20181106130121-3a9253180225 h1:guHWmqIKr4G+gQ4uYU5vcZjsUhhklRA2uOcGVfcfqis= +github.com/machinebox/graphql v0.2.3-0.20181106130121-3a9253180225/go.mod h1:F+kbVMHuwrQ5tYgU9JXlnskM8nOaFxCAEolaQybkjWA= +github.com/matryer/is v1.4.0 h1:sosSmIWwkYITGrxZ25ULNDeKiMNzFSr4V/eqBQP0PeE= +github.com/matryer/is v1.4.0/go.mod h1:8I/i5uYgLzgsgEloJE1U6xx5HkBQpAZvepWuujKwMRU= +github.com/matryer/moq v0.0.0-20200106131100-75d0ddfc0007/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ= +github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2 h1:JAEbJn3j/FrhdWA9jW8B5ajsLIjeuEHLi8xE4fk997o= +github.com/matryer/try v0.0.0-20161228173917-9ac251b645a2/go.mod h1:0KeJpeMD6o+O4hW7qJOT7vyQPKrWmj26uf5wMc/IiIs= +github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= +github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= +github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU= +github.com/mitchellh/mapstructure v0.0.0-20180203102830-a4e142e9c047/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= +github.com/moby/sys/mount v0.2.0 h1:WhCW5B355jtxndN5ovugJlMFJawbUODuW8fSnEH6SSM= +github.com/moby/sys/mount v0.2.0/go.mod h1:aAivFE2LB3W4bACsUXChRHQ0qKWsetY4Y9V7sxOougM= +github.com/moby/sys/mountinfo v0.4.0 h1:1KInV3Huv18akCu58V7lzNlt+jFmqlu1EaErnEHE/VM= +github.com/moby/sys/mountinfo v0.4.0/go.mod h1:rEr8tzG/lsIZHBtN/JjGG+LMYx9eXgW2JI+6q0qou+A= +github.com/moby/term v0.0.0-20201216013528-df9cb8a40635 h1:rzf0wL0CHVc8CEsgyygG0Mn9CNCCPZqOPaz8RiiHYQk= +github.com/moby/term v0.0.0-20201216013528-df9cb8a40635/go.mod h1:FBS0z0QWA44HXygs7VXDUOGoN/1TV3RuWkLO04am3wc= +github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A= +github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc= +github.com/opencontainers/go-digest v0.0.0-20180430190053-c9281466c8b2/go.mod h1:cMLVZDEM3+U2I4VmLI6N8jQYUd2OVphdqWwCJHrFt2s= +github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= +github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.0.1 h1:JMemWkRwHx4Zj+fVxWoMCFm/8sYGGrUVojFA6h/TRcI= +github.com/opencontainers/image-spec v1.0.1/go.mod h1:BtxoFyWECRxE4U/7sNtV5W15zMzWCbyJoFRP3s7yZA0= +github.com/opencontainers/runc v0.0.0-20190115041553-12f6a991201f/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runc v0.1.1 h1:GlxAyO6x8rfZYN9Tt0Kti5a/cP41iuiO2yYT0IJGY8Y= +github.com/opencontainers/runc v0.1.1/go.mod h1:qT5XzbpPznkRYVz/mWwUaVBUv2rmF59PVA73FjuZG0U= +github.com/opencontainers/runtime-spec v1.0.2/go.mod h1:jwyrGlmzljRJv/Fgzds9SsS/C5hL+LL3ko9hs6T5lQ0= +github.com/opentracing/basictracer-go v1.0.0/go.mod h1:QfBfYuafItcjQuMwinw9GhYKwFXS9KnPs5lxoYwgW74= +github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o= +github.com/pborman/uuid v1.2.0 h1:J7Q5mO4ysT1dv8hyrUGHb9+ooztCXu1D8MY8DZYsu3g= +github.com/pborman/uuid v1.2.0/go.mod h1:X/NO0urCmaxf9VXbdlT7C2Yzkj2IKimNn4k+gtPdI/k= +github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4= +github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= +github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= +github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= +github.com/prometheus/procfs v0.0.0-20180125133057-cb4147076ac7/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk= +github.com/rs/cors v1.6.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU= +github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM= +github.com/sergi/go-diff v1.1.0/go.mod h1:STckp+ISIX8hZLjrqAeVduY0gWCT9IjLuqbuNXdaHfM= +github.com/shreddedbacon/go-mq v0.0.0-20200419104937-b8e9af912ead h1:brBqfI3SWHkBhydQ4zdYbeQj/4Rq68GHO+Me8W7Fji8= +github.com/shreddedbacon/go-mq v0.0.0-20200419104937-b8e9af912ead/go.mod h1:lAwW/xhfO27t6WSVHFtLdgYioymwJvQxMSH19z00/BY= +github.com/shreddedbacon/wabbit v0.0.0-20200419104837-5b7b769d7204 h1:jXml7E4X/d9v6LATMXFPCF1yW6TKrs+o5wMtYTaBdTw= +github.com/shreddedbacon/wabbit v0.0.0-20200419104837-5b7b769d7204/go.mod h1:l7t6U3j3uZUYroWctp1FvWEktRMuGqx2zCcb5cd8cS8= +github.com/shurcooL/httpfs v0.0.0-20171119174359-809beceb2371/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg= +github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc= +github.com/shurcooL/vfsgen v0.0.0-20180121065927-ffb13db8def0/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= +github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= +github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= +github.com/sirupsen/logrus v1.7.0 h1:ShrD1U9pZB12TX0cVy0DtePoCH97K8EtX+mg7ZARUtM= +github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0= +github.com/spf13/cobra v0.0.2-0.20171109065643-2da4a54c5cee/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ= +github.com/spf13/pflag v1.0.1-0.20171106142849-4c012f6dcd95/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= +github.com/streadway/amqp v0.0.0-20200108173154-1c71cc93ed71 h1:2MR0pKUzlP3SGgj5NYJe/zRYDwOu9ku6YHy+Iw7l5DM= +github.com/streadway/amqp v0.0.0-20200108173154-1c71cc93ed71/go.mod h1:AZpEONHx3DKn8O/DFsRAY58/XVQiIPMTMB1SddzLXVw= +github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME= +github.com/stretchr/testify v1.2.1/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs= +github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk= +github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4= +github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg= +github.com/tiago4orion/conjure v0.0.0-20150908101743-93cb30b9d218 h1:tOESt7J50fPC9NqR0VdU1Zxk2zo5QYH70ap5TsU1bt4= +github.com/tiago4orion/conjure v0.0.0-20150908101743-93cb30b9d218/go.mod h1:GQei++1WClbEC7AN1B9ipY1jCjzllM/7UNg0okAh/Z4= +github.com/urfave/cli v1.22.2/go.mod h1:Gos4lmkARVdJ6EkW0WaNv/tZAAMe9V7XWyB60NtXRu0= +github.com/urfave/cli/v2 v2.1.1/go.mod h1:SE9GqnLQmjVa0iPEY0f1w3ygNIYcIJ0OKPMoW2caLfQ= +github.com/vektah/dataloaden v0.2.1-0.20190515034641-a19b9a6e7c9e/go.mod h1:/HUdMve7rvxZma+2ZELQeNh88+003LL7Pf/CZ089j8U= +github.com/vektah/gqlparser/v2 v2.1.0 h1:uiKJ+T5HMGGQM2kRKQ8Pxw8+Zq9qhhZhz/lieYvCMns= +github.com/vektah/gqlparser/v2 v2.1.0/go.mod h1:SyUiHgLATUR8BiYURfTirrTcGpcE+4XkV2se04Px1Ms= +github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= +github.com/yuin/goldmark v1.3.5/go.mod h1:mwnBkeHKe2W/ZEtQ+71ViKU8L12m81fl3OWwC1Zlc8k= +go.opencensus.io v0.22.0 h1:C9hSCOW830chIVkdja34wa6Ky+IzWllkUinR+BtRZd4= +go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= +golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w= +golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= +golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= +golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= +golang.org/x/lint v0.0.0-20190227174305-5b3e6a55c961/go.mod h1:wehouNa3lNwaWXcvxsM5YxQ5yQlVC4a0KAMCusXpPoU= +golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc= +golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg= +golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/mod v0.4.2 h1:Gz96sIWK3OalVv/I/qNygP42zyoKp3xptRVCWRFEBvo= +golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= +golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190213061140-3a22650c66bd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= +golang.org/x/net v0.0.0-20190311183353-d8887717615a/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg= +golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20191004110552-13f9640d40b9/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s= +golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU= +golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM= +golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= +golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= +golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= +golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191022100944-742c48ecaeb7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20191026070338-33540a1f6037/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200120151820-655fe14d7479/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200831180312-196b9ba8737a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200909081042-eff7692f9009/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200922070232-aee5d888a860/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20201201145000-ef89a241ccb3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210216224549-f992740a1bac h1:9glrpwtNjBYgRpb67AZJKHfzj1stG/8BL5H7In2oTC4= +golang.org/x/sys v0.0.0-20210216224549-f992740a1bac/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007 h1:gG67DSER+11cZvqIMb8S8bt0vZtiN6xWYARwirrOSfE= +golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= +golang.org/x/term v0.0.0-20201113234701-d7a72108b828 h1:htWEtQEuEVJ4tU/Ngx7Cd/4Q7e3A5Up1owgyBtVsTwk= +golang.org/x/term v0.0.0-20201113234701-d7a72108b828/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw= +golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= +golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= +golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk= +golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= +golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190125232054-d66bd3c5d5a6/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ= +golang.org/x/tools v0.0.0-20190226205152-f727befe758c/go.mod h1:9Yl7xja0Znq3iFh3HoIrodX9oNMXvdceNzlUR8zjMvY= +golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs= +golang.org/x/tools v0.0.0-20190515012406-7d7faa4812bd/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190524140312-2c0ae7006135/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q= +golang.org/x/tools v0.0.0-20190624222133-a101b041ded4/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc= +golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo= +golang.org/x/tools v0.0.0-20200114235610-7ae403b6b589/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28= +golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE= +golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA= +golang.org/x/tools v0.1.5 h1:ouewzE6p+/VEB31YYnTbEJdi8pFqKp4P4n85vwo3DHA= +golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk= +golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE= +golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= +google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM= +google.golang.org/appengine v1.4.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4= +google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc= +google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE= +google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c= +google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38= +google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= +gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk= +gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= +gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0 h1:POO/ycCATvegFmVuPpQzZFJ+pGZeX22Ufu6fibxDVjU= +gopkg.in/yaml.v1 v1.0.0-20140924161607-9f9df34309c0/go.mod h1:WDnlLJ4WF5VGsH/HVa3CI79GS0ol3YnhVnKP89i0kNg= +gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10= +gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= +gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY= +gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ= +gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +gotest.tools v2.2.0+incompatible h1:VsBPFP1AI068pPrMxtb/S8Zkgf9xEmTLJjfM+P5UIEo= +gotest.tools v2.2.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw= +gotest.tools/v3 v3.0.2/go.mod h1:3SzNCllyD9/Y+b5r9JIKQ474KzkZyqLqEfYqMsX94Bk= +gotest.tools/v3 v3.0.3 h1:4AuOwCGf4lLR9u3YOe2awrHygurzhO/HeQ6laiA6Sx0= +gotest.tools/v3 v3.0.3/go.mod h1:Z7Lb0S5l+klDB31fvDQX8ss/FlKDxtlFlw3Oa8Ymbl8= +honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= +sourcegraph.com/sourcegraph/appdash v0.0.0-20180110180208-2cc67fd64755/go.mod h1:hI742Nqp5OhwiqlzhgfbWU4mW4yO10fP+LoT9WOswdU= +sourcegraph.com/sourcegraph/appdash-data v0.0.0-20151005221446-73f23eafcf67/go.mod h1:L5q+DGLGOQFpo1snNEkLOJT2d1YTW66rWNzatr3He1k= diff --git a/services/workflows/internal/handler/README.md b/services/workflows/internal/handler/README.md new file mode 100644 index 0000000000..9e39f7c61b --- /dev/null +++ b/services/workflows/internal/handler/README.md @@ -0,0 +1,3 @@ +# handler + +Any messages that come in on a message queue are handled by this handler \ No newline at end of file diff --git a/services/workflows/internal/handler/main.go b/services/workflows/internal/handler/main.go new file mode 100644 index 0000000000..0abecbf2ac --- /dev/null +++ b/services/workflows/internal/handler/main.go @@ -0,0 +1,226 @@ +package handler + +import ( + "context" + "encoding/json" + "fmt" + "github.com/Khan/genqlient/graphql" + "github.com/cheshir/go-mq" + "github.com/matryer/try" + "github.com/uselagoon/lagoon/services/actions-handler/internal/lagoon/jwt" + "github.com/uselagoon/lagoon/services/actions-handler/internal/lagoonclient" + "log" + "net/http" + "time" + + //"github.com/uselagoon/lagoon/services/actions-handler/internal/lagoon" + //lclient "github.com/uselagoon/lagoon/services/actions-handler/internal/lagoon/client" + //"github.com/uselagoon/lagoon/services/actions-handler/internal/lagoon/jwt" + //"github.com/uselagoon/lagoon/services/actions-handler/internal/schema" +) + +// RabbitBroker . +type RabbitBroker struct { + Hostname string `json:"hostname"` + Port string `json:"port"` + Username string `json:"username,omitempty"` + Password string `json:"password,omitempty"` + QueueName string `json:"queueName"` + ExchangeName string `json:"exchangeName"` +} + +// LagoonAPI . +type LagoonAPI struct { + Endpoint string `json:"endpoint"` + JWTAudience string `json:"audience"` + TokenSigningKey string `json:"tokenSigningKey` + JWTSubject string `json:"subject"` + JWTIssuer string `json:"issuer"` +} + +// Action is the structure of an action that is received via the message queue. +type Action struct { + Type string `json:"type"` // defines the action type + EventType string `json:"eventType"` // defines the eventtype field in the event notification + Data map[string]interface{} `json:"data"` // contains the payload for the action, this could be any json so using a map +} + +type LagoonLogMeta struct { + BranchName string `json:"branchName,omitempty"` + BuildName string `json:"buildName,omitempty"` + BuildPhase string `json:"buildPhase,omitempty"` + EndTime string `json:"endTime,omitempty"` + Environment string `json:"environment,omitempty"` + EnvironmentID *uint `json:"environmentId,omitempty"` + JobName string `json:"jobName,omitempty"` + JobStatus string `json:"jobStatus,omitempty"` + LogLink string `json:"logLink,omitempty"` + MonitoringURLs []string `json:"monitoringUrls,omitempty"` + Project string `json:"project,omitempty"` + ProjectID *uint `json:"projectId,omitempty"` + ProjectName string `json:"projectName,omitempty"` + RemoteID string `json:"remoteId,omitempty"` + Route string `json:"route,omitempty"` + Routes []string `json:"routes,omitempty"` + StartTime string `json:"startTime,omitempty"` + Services []string `json:"services,omitempty"` + Key string `json:"key,omitempty"` + AdvancedData string `json:"advancedData,omitempty"` + Cluster string `json:"clusterName,omitempty"` +} + +type LagoonLog struct { + Severity string `json:"severity,omitempty"` + Project string `json:"project,omitempty"` + UUID string `json:"uuid,omitempty"` + Event string `json:"event,omitempty"` + Meta *LagoonLogMeta `json:"meta,omitempty"` + Message string `json:"message,omitempty"` +} + +type messaging interface { + Consumer() + Publish(string, []byte) +} + +// Messaging is used for the config and client information for the messaging queue. +type Messaging struct { + Config mq.Config + LagoonAPI LagoonAPI + ConnectionAttempts int + ConnectionRetryInterval int + EnableDebug bool +} + +// NewMessaging returns a messaging with config +func NewMessaging(config mq.Config, lagoonAPI LagoonAPI, startupAttempts int, startupInterval int, enableDebug bool) *Messaging { + return &Messaging{ + Config: config, + LagoonAPI: lagoonAPI, + ConnectionAttempts: startupAttempts, + ConnectionRetryInterval: startupInterval, + EnableDebug: enableDebug, + } +} + +// Consumer handles consuming messages sent to the queue that this action handler is connected to and processes them accordingly +func (h *Messaging) Consumer() { + + var messageQueue mq.MQ + // if no mq is found when the goroutine starts, retry a few times before exiting + // default is 10 retry with 30 second delay = 5 minutes + err := try.Do(func(attempt int) (bool, error) { + var err error + messageQueue, err = mq.New(h.Config) + if err != nil { + log.Println(err, + fmt.Sprintf( + "Failed to initialize message queue manager, retrying in %d seconds, attempt %d/%d", + h.ConnectionRetryInterval, + attempt, + h.ConnectionAttempts, + ), + ) + time.Sleep(time.Duration(h.ConnectionRetryInterval) * time.Second) + } + return attempt < h.ConnectionAttempts, err + }) + if err != nil { + log.Fatalf("Finally failed to initialize message queue manager: %v", err) + } + defer messageQueue.Close() + + go func() { + for err := range messageQueue.Error() { + log.Println(fmt.Sprintf("Caught error from message queue: %v", err)) + } + }() + + forever := make(chan bool) + + // Handle any tasks that go to the queue + log.Println("Listening for messages in queue lagoon-logs:workflows") + err = messageQueue.SetConsumerHandler("items-queue", processingIncomingMessageQueueFactory(h)) + if err != nil { + log.Println(fmt.Sprintf("Failed to set handler to consumer `%s`: %v", "items-queue", err)) + } + <-forever +} + +type authedTransport struct { + wrapped http.RoundTripper + h *Messaging +} + +func (t *authedTransport) RoundTrip(req *http.Request) (*http.Response, error) { + //grab events for project + token, err := jwt.OneMinuteAdminToken(t.h.LagoonAPI.TokenSigningKey, t.h.LagoonAPI.JWTAudience, t.h.LagoonAPI.JWTSubject, t.h.LagoonAPI.JWTIssuer) + if err != nil { + // the token wasn't generated + if t.h.EnableDebug { + log.Println("Could not get bearer token") + log.Println(err) + } + return nil, err + } + req.Header.Set("Authorization", "bearer "+token) + return t.wrapped.RoundTrip(req) +} + +func processingIncomingMessageQueueFactory(h *Messaging) func(mq.Message) { + return func(message mq.Message) { + incoming := &LagoonLog{} + err := json.Unmarshal(message.Body(), incoming) + if err != nil { + fmt.Println("could not unmarshall") + } + + + //Ahhh, the issue is that there is no environment name passed thought ... + environmentName := incoming.Meta.Environment + if incoming.Meta.ProjectID != nil && incoming.Meta.EnvironmentID != nil { + fmt.Println("Connecting to " + h.LagoonAPI.Endpoint) + client := graphql.NewClient(h.LagoonAPI.Endpoint, + &http.Client{Transport: &authedTransport{wrapped: http.DefaultTransport, h: h}}) + projectId := int(*incoming.Meta.ProjectID) + environmentWorkflows, err := lagoonclient.GetEnvironmentWorkflowsByEnvironmentId(context.TODO(), client, int(*incoming.Meta.EnvironmentID)) + if err != nil { + log.Println(err) + return + } + for _, wf := range environmentWorkflows { + if lagoonclient.IsEventOfType(incoming.Event, wf.AdvancedTaskDetails) { + log.Printf("Found event of type %v for project:%v and environment %v - invoking.\n", + incoming.Event, projectId, environmentName) + result, err := lagoonclient.InvokeWorkflowOnEnvironment(context.TODO(), client, wf.EnvironmentId, wf.AdvancedTaskId) + if err != nil { + log.Println(err) + //TODO: do we need some kind of retry logic here? + message.Ack(false) // ack to remove from queue + return + } + log.Printf("Invocation result of %v for project:%v and environment %v - %v.\n", + incoming.Event, projectId, environmentName, result) + } + + } + } + message.Ack(false) // ack to remove from queue + } +} + +// toLagoonLogs sends logs to the lagoon-logs message queue +func (h *Messaging) toLagoonLogs(messageQueue mq.MQ, message map[string]interface{}) { + msgBytes, err := json.Marshal(message) + if err != nil { + if h.EnableDebug { + log.Println(err, "Unable to encode message as JSON") + } + } + producer, err := messageQueue.AsyncProducer("lagoon-logs") + if err != nil { + log.Println(fmt.Sprintf("Failed to get async producer: %v", err)) + return + } + producer.Produce(msgBytes) +} diff --git a/services/workflows/internal/handler/main_test.go b/services/workflows/internal/handler/main_test.go new file mode 100644 index 0000000000..250219d89f --- /dev/null +++ b/services/workflows/internal/handler/main_test.go @@ -0,0 +1,46 @@ +package handler + +import ( + "bytes" + "github.com/cheshir/go-mq" + "reflect" + "testing" +) + +func checkEqual(t *testing.T, got, want interface{}, msgs ...interface{}) { + if !reflect.DeepEqual(got, want) { + buf := bytes.Buffer{} + buf.WriteString("got:\n[%v]\nwant:\n[%v]\n") + for _, v := range msgs { + buf.WriteString(v.(string)) + } + t.Errorf(buf.String(), got, want) + } +} + +type MqMessageFake struct { +} + +func (f MqMessageFake) Ack(Multiple bool) error { + return nil +} + +func (f MqMessageFake) Nack(Multiple bool, request bool) error { + return nil +} + +func Test_processingIncomingMessageQueue(t *testing.T) { + type args struct { + message mq.Message + } + tests := []struct { + name string + args args + }{ + // TODO: Add test cases. + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + }) + } +} \ No newline at end of file diff --git a/services/workflows/internal/handler/testassets/testevent1.json b/services/workflows/internal/handler/testassets/testevent1.json new file mode 100644 index 0000000000..dd99fcd81b --- /dev/null +++ b/services/workflows/internal/handler/testassets/testevent1.json @@ -0,0 +1,8 @@ +{ + "event": "task:builddeploy-kubernetes:complete", + "Meta": { + "ProjectId": 18, + "EnvironmentId": "1", + "Environment": "Master" + } +} \ No newline at end of file diff --git a/services/workflows/internal/handler/workflow_matcher.go b/services/workflows/internal/handler/workflow_matcher.go new file mode 100644 index 0000000000..3668a062af --- /dev/null +++ b/services/workflows/internal/handler/workflow_matcher.go @@ -0,0 +1,12 @@ +package handler + +import "github.com/uselagoon/lagoon/services/actions-handler/internal/schema" + +func matchWorkflows(notification schema.Notification, workflows []schema.Workflow) schema.Workflow { + for _, workflow := range workflows { + if(notification.Event == workflow.Event) { + return workflow + } + } + return schema.Workflow{} +} \ No newline at end of file diff --git a/services/workflows/internal/lagoon/README.md b/services/workflows/internal/lagoon/README.md new file mode 100644 index 0000000000..b705515390 --- /dev/null +++ b/services/workflows/internal/lagoon/README.md @@ -0,0 +1,3 @@ +# lagoon + +This is an interim lagoon client package that will eventually be replaced by a standalone lagoon client library \ No newline at end of file diff --git a/services/workflows/internal/lagoon/client/_lgraphql/deployEnvironmentLatest.graphql b/services/workflows/internal/lagoon/client/_lgraphql/deployEnvironmentLatest.graphql new file mode 100644 index 0000000000..89e47db8a3 --- /dev/null +++ b/services/workflows/internal/lagoon/client/_lgraphql/deployEnvironmentLatest.graphql @@ -0,0 +1,13 @@ +mutation ( + $environment: EnvironmentInput! + $bulkId: String + $priority: Int + ) { + deployEnvironmentLatest(input: { + environment: $environment + bulkId: $bulkId + priority: $priority + returnData: true + } + ) +} \ No newline at end of file diff --git a/services/workflows/internal/lagoon/client/_lgraphql/getWorkflowsForEnvironment.graphql b/services/workflows/internal/lagoon/client/_lgraphql/getWorkflowsForEnvironment.graphql new file mode 100644 index 0000000000..e69de29bb2 diff --git a/services/workflows/internal/lagoon/client/client.go b/services/workflows/internal/lagoon/client/client.go new file mode 100644 index 0000000000..8cdf1ac083 --- /dev/null +++ b/services/workflows/internal/lagoon/client/client.go @@ -0,0 +1,93 @@ +//go:generate go-bindata -pkg lgraphql -o lgraphql/lgraphql.go -nometadata _lgraphql/ + +// Package client implements the interfaces required by the parent lagoon +// package. +package client + +import ( + "encoding/json" + "fmt" + "log" + "os" + + "github.com/machinebox/graphql" + "github.com/uselagoon/lagoon/services/actions-handler/internal/lagoon/client/lgraphql" +) + +// Client implements the lagoon package interfaces for the Lagoon GraphQL API. +type Client struct { + userAgent string + token string + client *graphql.Client +} + +// New creates a new Client for the given endpoint. +func New(endpoint, token, userAgent string, debug bool) *Client { + if debug { + return &Client{ + userAgent: userAgent, + token: token, + client: graphql.NewClient(endpoint, + // enable debug logging to stderr + func(c *graphql.Client) { + l := log.New(os.Stderr, "graphql", 0) + c.Log = func(s string) { + l.Println(s) + } + }), + } + } + return &Client{ + userAgent: userAgent, + token: token, + client: graphql.NewClient(endpoint), + } +} + +// newRequest constructs a graphql request. +// assetName is the name of the graphql query template in _graphql/. +// varStruct is converted to a map of variables for the template. +func (c *Client) newRequest( + assetName string, varStruct interface{}) (*graphql.Request, error) { + + q, err := lgraphql.Asset(assetName) + if err != nil { + return nil, fmt.Errorf("couldn't get asset: %w", err) + } + + return c.doRequest(string(q), varStruct) +} + +func (c *Client) doRequest(query string, varStruct interface{}) (*graphql.Request, error) { + vars, err := structToVarMap(varStruct) + if err != nil { + return nil, fmt.Errorf("couldn't convert struct to map: %w", err) + } + + req := graphql.NewRequest(query) + for key, value := range vars { + req.Var(key, value) + } + + headers := map[string]string{ + "User-Agent": c.userAgent, + "Authorization": fmt.Sprintf("Bearer %s", c.token), + } + for key, value := range headers { + req.Header.Set(key, value) + } + + return req, nil +} + +// structToVarMap encodes the given struct to a map. The idea is that by +// round-tripping through Marshal/Unmarshal, omitempty is applied to the +// zero-valued fields. +func structToVarMap( + varStruct interface{}) (vars map[string]interface{}, err error) { + data, err := json.Marshal(varStruct) + if err != nil { + return vars, err + } + return vars, json.Unmarshal(data, &vars) +} diff --git a/services/workflows/internal/lagoon/client/client_test.go b/services/workflows/internal/lagoon/client/client_test.go new file mode 100644 index 0000000000..2c842a4a93 --- /dev/null +++ b/services/workflows/internal/lagoon/client/client_test.go @@ -0,0 +1,81 @@ +package client_test + +import ( + "reflect" + "testing" + + "github.com/uselagoon/lagoon/services/actions-handler/internal/lagoon/client" +) + +type testStruct0 struct { + Foo string `json:"foo"` + Bar uint `json:"bar"` + Baz string `json:"baz,omitempty"` + Quux uint `json:"quux,omitempty"` +} + +func TestStructToVarMap(t *testing.T) { + var testCases = map[string]struct { + input testStruct0 + expect map[string]interface{} + }{ + "simple struct": { + input: testStruct0{ + Foo: "abc", + Bar: 8, + }, + expect: map[string]interface{}{ + "foo": "abc", + "bar": float64(8), + }, + }, + "keep zero values": { + input: testStruct0{ + Foo: "abc", + Bar: 0, + }, + expect: map[string]interface{}{ + "foo": "abc", + "bar": float64(0), + }, + }, + "omit zero values": { + input: testStruct0{ + Foo: "abc", + Bar: 0, + Baz: "", + Quux: 0, + }, + expect: map[string]interface{}{ + "foo": "abc", + "bar": float64(0), + }, + }, + "keep non-zero values": { + input: testStruct0{ + Foo: "abc", + Bar: 0, + Baz: "hi", + Quux: 9, + }, + expect: map[string]interface{}{ + "foo": "abc", + "bar": float64(0), + "baz": "hi", + "quux": float64(9), + }, + }, + } + for name, tc := range testCases { + t.Run(name, func(tt *testing.T) { + vars, err := client.StructToVarMap(&tc.input) + if err != nil { + tt.Error(err) + } + if !reflect.DeepEqual(vars, tc.expect) { + tt.Logf("result:\n%s\nexpected:\n%s", vars, tc.expect) + tt.Errorf("result does not match expected") + } + }) + } +} diff --git a/services/workflows/internal/lagoon/client/helper_test.go b/services/workflows/internal/lagoon/client/helper_test.go new file mode 100644 index 0000000000..39591e0080 --- /dev/null +++ b/services/workflows/internal/lagoon/client/helper_test.go @@ -0,0 +1,6 @@ +package client + +// StructToVarMap exposes the private client.structToVarMap for tests. +func StructToVarMap(varStruct interface{}) (map[string]interface{}, error) { + return structToVarMap(varStruct) +} diff --git a/services/workflows/internal/lagoon/client/lgraphql/lgraphql.go b/services/workflows/internal/lagoon/client/lgraphql/lgraphql.go new file mode 100644 index 0000000000..21a4b8131f --- /dev/null +++ b/services/workflows/internal/lagoon/client/lgraphql/lgraphql.go @@ -0,0 +1,247 @@ +// Code generated by go-bindata. (@generated) DO NOT EDIT. + + //Package lgraphql generated by go-bindata.// sources: +// _lgraphql/deployEnvironmentLatest.graphql +package lgraphql + +import ( + "bytes" + "compress/gzip" + "fmt" + "io" + "io/ioutil" + "os" + "path/filepath" + "strings" + "time" +) + +func bindataRead(data []byte, name string) ([]byte, error) { + gz, err := gzip.NewReader(bytes.NewBuffer(data)) + if err != nil { + return nil, fmt.Errorf("read %q: %v", name, err) + } + + var buf bytes.Buffer + _, err = io.Copy(&buf, gz) + clErr := gz.Close() + + if err != nil { + return nil, fmt.Errorf("read %q: %v", name, err) + } + if clErr != nil { + return nil, err + } + + return buf.Bytes(), nil +} + +type asset struct { + bytes []byte + info os.FileInfo +} + +type bindataFileInfo struct { + name string + size int64 + mode os.FileMode + modTime time.Time +} + +// Name return file name +func (fi bindataFileInfo) Name() string { + return fi.name +} + +// Size return file size +func (fi bindataFileInfo) Size() int64 { + return fi.size +} + +// Mode return file mode +func (fi bindataFileInfo) Mode() os.FileMode { + return fi.mode +} + +// ModTime return file modify time +func (fi bindataFileInfo) ModTime() time.Time { + return fi.modTime +} + +// IsDir return file whether a directory +func (fi bindataFileInfo) IsDir() bool { + return fi.mode&os.ModeDir != 0 +} + +// Sys return file is sys mode +func (fi bindataFileInfo) Sys() interface{} { + return nil +} + +var __lgraphqlDeployenvironmentlatestGraphql = []byte("\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x54\x8e\xc1\x0a\xc2\x30\x10\x44\xef\xf9\x8a\x11\x3c\xd4\x5f\xc8\x59\x0f\x05\x6f\x7e\x41\xa4\x41\x16\xdb\x4d\x58\x27\x42\x91\xfe\xbb\x44\xa2\xad\xc7\x7d\x3b\xcc\x9b\xa9\x30\x50\x92\xa2\x73\xc0\x3e\xea\x53\x2c\xe9\x14\x95\x1e\xa7\xf5\xe8\x35\x17\xee\x6a\xe2\x5a\xc6\x7b\x3f\x78\x5c\x68\xa2\xb7\x4a\xb2\x49\x32\xe1\xec\xd1\x2b\x1d\x70\xc0\xcb\x01\xc0\x10\xf3\x98\xe6\x4d\xc9\x39\x30\x3e\xd8\x49\xed\xf2\x2d\x04\xfc\x29\xb7\x03\xda\xff\x2b\x6c\xe6\x46\x57\xe9\xcf\xdf\x3e\x16\x59\x4c\x8f\x81\xc1\x83\x56\xe2\x07\x2f\x75\x97\x5b\xde\x01\x00\x00\xff\xff\x49\x2e\xd7\x81\xed\x00\x00\x00") + +func _lgraphqlDeployenvironmentlatestGraphqlBytes() ([]byte, error) { + return bindataRead( + __lgraphqlDeployenvironmentlatestGraphql, + "_lgraphql/deployEnvironmentLatest.graphql", + ) +} + +func _lgraphqlDeployenvironmentlatestGraphql() (*asset, error) { + bytes, err := _lgraphqlDeployenvironmentlatestGraphqlBytes() + if err != nil { + return nil, err + } + + info := bindataFileInfo{name: "_lgraphql/deployEnvironmentLatest.graphql", size: 0, mode: os.FileMode(0), modTime: time.Unix(0, 0)} + a := &asset{bytes: bytes, info: info} + return a, nil +} + +// Asset loads and returns the asset for the given name. +// It returns an error if the asset could not be found or +// could not be loaded. +func Asset(name string) ([]byte, error) { + cannonicalName := strings.Replace(name, "\\", "/", -1) + if f, ok := _bindata[cannonicalName]; ok { + a, err := f() + if err != nil { + return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err) + } + return a.bytes, nil + } + return nil, fmt.Errorf("Asset %s not found", name) +} + +// MustAsset is like Asset but panics when Asset would return an error. +// It simplifies safe initialization of global variables. +func MustAsset(name string) []byte { + a, err := Asset(name) + if err != nil { + panic("asset: Asset(" + name + "): " + err.Error()) + } + + return a +} + +// AssetInfo loads and returns the asset info for the given name. +// It returns an error if the asset could not be found or +// could not be loaded. +func AssetInfo(name string) (os.FileInfo, error) { + cannonicalName := strings.Replace(name, "\\", "/", -1) + if f, ok := _bindata[cannonicalName]; ok { + a, err := f() + if err != nil { + return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err) + } + return a.info, nil + } + return nil, fmt.Errorf("AssetInfo %s not found", name) +} + +// AssetNames returns the names of the assets. +func AssetNames() []string { + names := make([]string, 0, len(_bindata)) + for name := range _bindata { + names = append(names, name) + } + return names +} + +// _bindata is a table, holding each asset generator, mapped to its name. +var _bindata = map[string]func() (*asset, error){ + "_lgraphql/deployEnvironmentLatest.graphql": _lgraphqlDeployenvironmentlatestGraphql, +} + +// AssetDir returns the file names below a certain +// directory embedded in the file by go-bindata. +// For example if you run go-bindata on data/... and data contains the +// following hierarchy: +// data/ +// foo.txt +// img/ +// a.png +// b.png +// then AssetDir("data") would return []string{"foo.txt", "img"} +// AssetDir("data/img") would return []string{"a.png", "b.png"} +// AssetDir("foo.txt") and AssetDir("notexist") would return an error +// AssetDir("") will return []string{"data"}. +func AssetDir(name string) ([]string, error) { + node := _bintree + if len(name) != 0 { + cannonicalName := strings.Replace(name, "\\", "/", -1) + pathList := strings.Split(cannonicalName, "/") + for _, p := range pathList { + node = node.Children[p] + if node == nil { + return nil, fmt.Errorf("Asset %s not found", name) + } + } + } + if node.Func != nil { + return nil, fmt.Errorf("Asset %s not found", name) + } + rv := make([]string, 0, len(node.Children)) + for childName := range node.Children { + rv = append(rv, childName) + } + return rv, nil +} + +type bintree struct { + Func func() (*asset, error) + Children map[string]*bintree +} + +var _bintree = &bintree{nil, map[string]*bintree{ + "_lgraphql": &bintree{nil, map[string]*bintree{ + "deployEnvironmentLatest.graphql": &bintree{_lgraphqlDeployenvironmentlatestGraphql, map[string]*bintree{}}, + }}, +}} + +// RestoreAsset restores an asset under the given directory +func RestoreAsset(dir, name string) error { + data, err := Asset(name) + if err != nil { + return err + } + info, err := AssetInfo(name) + if err != nil { + return err + } + err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755)) + if err != nil { + return err + } + err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode()) + if err != nil { + return err + } + err = os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime()) + if err != nil { + return err + } + return nil +} + +// RestoreAssets restores an asset under the given directory recursively +func RestoreAssets(dir, name string) error { + children, err := AssetDir(name) + // File + if err != nil { + return RestoreAsset(dir, name) + } + // Dir + for _, child := range children { + err = RestoreAssets(dir, filepath.Join(name, child)) + if err != nil { + return err + } + } + return nil +} + +func _filePath(dir, name string) string { + cannonicalName := strings.Replace(name, "\\", "/", -1) + return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...) +} diff --git a/services/workflows/internal/lagoon/client/mutation.go b/services/workflows/internal/lagoon/client/mutation.go new file mode 100644 index 0000000000..cc7db0ea08 --- /dev/null +++ b/services/workflows/internal/lagoon/client/mutation.go @@ -0,0 +1,17 @@ +package client + +import ( + "context" + + "github.com/uselagoon/lagoon/services/actions-handler/internal/schema" +) + +// DeployEnvironmentLatest deploys a latest environment. +func (c *Client) DeployEnvironmentLatest(ctx context.Context, + in *schema.DeployEnvironmentLatestInput, out *schema.DeployEnvironmentLatest) error { + req, err := c.newRequest("_lgraphql/deployEnvironmentLatest.graphql", in) + if err != nil { + return err + } + return c.client.Run(ctx, req, &out) +} diff --git a/services/workflows/internal/lagoon/deployment.go b/services/workflows/internal/lagoon/deployment.go new file mode 100644 index 0000000000..72223f5a19 --- /dev/null +++ b/services/workflows/internal/lagoon/deployment.go @@ -0,0 +1,20 @@ +// Package lagoon implements high-level functions for interacting with the +// Lagoon API. +package lagoon + +import ( + "context" + + "github.com/uselagoon/lagoon/services/actions-handler/internal/schema" +) + +// Deploy interface contains methods for deploying branches and environments in lagoon. +type Deploy interface { + DeployEnvironmentLatest(ctx context.Context, deploy *schema.DeployEnvironmentLatestInput, result *schema.DeployEnvironmentLatest) error +} + +// DeployLatest deploys the latest environment. +func DeployLatest(ctx context.Context, deploy *schema.DeployEnvironmentLatestInput, m Deploy) (*schema.DeployEnvironmentLatest, error) { + result := schema.DeployEnvironmentLatest{} + return &result, m.DeployEnvironmentLatest(ctx, deploy, &result) +} diff --git a/services/workflows/internal/lagoon/jwt/jwt.go b/services/workflows/internal/lagoon/jwt/jwt.go new file mode 100644 index 0000000000..9219e76818 --- /dev/null +++ b/services/workflows/internal/lagoon/jwt/jwt.go @@ -0,0 +1,30 @@ +package jwt + +import ( + "time" + + "github.com/dgrijalva/jwt-go" +) + +// LagoonClaims is a set of JWT claims used by Lagoon. +type LagoonClaims struct { + Role string `json:"role"` + jwt.StandardClaims +} + +// OneMinuteAdminToken returns a JWT admin token valid for one minute. +func OneMinuteAdminToken(secret, audience, subject, issuer string) (string, error) { + now := time.Now() + claims := LagoonClaims{ + Role: "admin", + StandardClaims: jwt.StandardClaims{ + Audience: audience, + ExpiresAt: now.Unix() + 60, + IssuedAt: now.Unix(), + Subject: subject, + Issuer: issuer, + }, + } + token := jwt.NewWithClaims(jwt.SigningMethodHS256, claims) + return token.SignedString([]byte(secret)) +} diff --git a/services/workflows/internal/lagoonclient/eventClassification.go b/services/workflows/internal/lagoonclient/eventClassification.go new file mode 100644 index 0000000000..3fbf24928d --- /dev/null +++ b/services/workflows/internal/lagoonclient/eventClassification.go @@ -0,0 +1,79 @@ +package lagoonclient + + +var Events = map[string][]string { + "mergeRequestOpened": []string{ + "github:pull_request:opened:handled", + "gitlab:merge_request:opened:handled", + "bitbucket:pullrequest:created:opened:handled", + "bitbucket:pullrequest:created:handled", + }, + "mergeRequestUpdated": []string{ + "github:pull_request:synchronize:handled", + "gitlab:merge_request:updated:handled", + "bitbucket:pullrequest:updated:opened:handled", + "bitbucket:pullrequest:updated:handled", + }, + "mergeRequestClosed": []string{ + "github:pull_request:closed:handled", + "bitbucket:pullrequest:fulfilled:handled", + "bitbucket:pullrequest:rejected:handled", + "gitlab:merge_request:closed:handled", + }, + "deleteEnvironment": []string{ + "github:delete:handled", + "gitlab:remove:handled", + "bitbucket:delete:handled", + "api:deleteEnvironment", + }, + "repoPushHandled": []string{ + "github:push:handled", + "bitbucket:repo:push:handled", + "gitlab:push:handled", + }, + "repoPushSkipped": []string{ + "github:push:skipped", + "gitlab:push:skipped", + "bitbucket:push:skipped", + }, + "deployEnvironment": []string{ + "api:deployEnvironmentLatest", + "api:deployEnvironmentBranch", + }, + "deployFinished": []string{ + "task:deploy-openshift:finished", + "task:remove-openshift-resources:finished", + "task:builddeploy-openshift:complete", + "task:builddeploy-kubernetes:complete", + }, + "removeFinished": []string{ + "task:remove-openshift:finished", + "task:remove-kubernetes:finished", + }, + "deployError": []string{ + "task:remove-openshift:error", + "task:remove-kubernetes:error", + "task:builddeploy-kubernetes:failed", + "task:builddeploy-openshift:failed", + }, + "notDeleted": []string{ + "github:pull_request:closed:CannotDeleteProductionEnvironment", + "github:push:CannotDeleteProductionEnvironment", + "bitbucket:repo:push:CannotDeleteProductionEnvironment", + "gitlab:push:CannotDeleteProductionEnvironment", + }, + "testing": []string{ + "testing", + }, +} + +func IsEventOfType(eventName string, eventType string) bool { + if eventTypes, ok := Events[eventType]; ok { + for _, a := range eventTypes { + if a == eventName { + return true + } + } + } + return false +} diff --git a/services/workflows/internal/lagoonclient/eventClassification_test.go b/services/workflows/internal/lagoonclient/eventClassification_test.go new file mode 100644 index 0000000000..4d056e09cc --- /dev/null +++ b/services/workflows/internal/lagoonclient/eventClassification_test.go @@ -0,0 +1,39 @@ +package lagoonclient + +import "testing" + +func TestIsEventOfType(t *testing.T) { + type args struct { + eventName string + eventType string + } + tests := []struct { + name string + args args + want bool + }{ + { + name: "Event does match", + args: args{ + eventName: "github:pull_request:closed:CannotDeleteProductionEnvironment", + eventType: "notDeleted", + }, + want: true, + }, + { + name: "Event does not match", + args: args{ + eventName: "github:pull_request:closed:CannotDeleteProductionEnvironment", + eventType: "deployEnvironment", + }, + want: false, + }, + } + for _, tt := range tests { + t.Run(tt.name, func(t *testing.T) { + if got := IsEventOfType(tt.args.eventName, tt.args.eventType); got != tt.want { + t.Errorf("IsEventOfType() = %v, want %v", got, tt.want) + } + }) + } +} \ No newline at end of file diff --git a/services/workflows/internal/lagoonclient/generated.go b/services/workflows/internal/lagoonclient/generated.go new file mode 100644 index 0000000000..1ceb7f5348 --- /dev/null +++ b/services/workflows/internal/lagoonclient/generated.go @@ -0,0 +1,539 @@ +package lagoonclient + +// Code generated by github.com/Khan/genqlient, DO NOT EDIT. + +import ( + "context" + "encoding/json" + "fmt" + + "github.com/Khan/genqlient/graphql" +) + +// __getEnvironmentByIdWorkflowsInput is used internally by genqlient +type __getEnvironmentByIdWorkflowsInput struct { + EnvironmentId int `json:"environmentId"` +} + +// __getEnvironmentWorkflowsInput is used internally by genqlient +type __getEnvironmentWorkflowsInput struct { + Project int `json:"project"` + Name string `json:"name"` +} + +// __invokeCustomTaskInput is used internally by genqlient +type __invokeCustomTaskInput struct { + EnvironmentId int `json:"environmentId"` + AdvancedTaskDefinitionId int `json:"advancedTaskDefinitionId"` +} + +// getEnvironmentByIdWorkflowsEnvironmentByIdEnvironment includes the requested fields of the GraphQL type Environment. +// The GraphQL type's documentation follows. +// +// Lagoon Environment (for each branch, pullrequest there is an individual environment) +type getEnvironmentByIdWorkflowsEnvironmentByIdEnvironment struct { + // Internal ID of this Environment + Id int `json:"id"` + // Name of this Environment + Name string `json:"name"` + Workflows []getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow `json:"workflows"` +} + +// getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow includes the requested fields of the GraphQL type Workflow. +type getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow struct { + Id int `json:"id"` + Event string `json:"event"` + AdvancedTaskDefinition getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition `json:"-"` +} + +func (v *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow) UnmarshalJSON(b []byte) error { + + if string(b) == "null" { + return nil + } + + var firstPass struct { + *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow + AdvancedTaskDefinition json.RawMessage `json:"advancedTaskDefinition"` + graphql.NoUnmarshalJSON + } + firstPass.getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow = v + + err := json.Unmarshal(b, &firstPass) + if err != nil { + return err + } + + { + dst := &v.AdvancedTaskDefinition + src := firstPass.AdvancedTaskDefinition + if len(src) != 0 && string(src) != "null" { + err = __unmarshalgetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition( + src, dst) + if err != nil { + return fmt.Errorf( + "Unable to unmarshal getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow.AdvancedTaskDefinition: %w", err) + } + } + } + return nil +} + +type __premarshalgetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow struct { + Id int `json:"id"` + + Event string `json:"event"` + + AdvancedTaskDefinition json.RawMessage `json:"advancedTaskDefinition"` +} + +func (v *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow) MarshalJSON() ([]byte, error) { + premarshaled, err := v.__premarshalJSON() + if err != nil { + return nil, err + } + return json.Marshal(premarshaled) +} + +func (v *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow) __premarshalJSON() (*__premarshalgetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow, error) { + var retval __premarshalgetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow + + retval.Id = v.Id + retval.Event = v.Event + { + + dst := &retval.AdvancedTaskDefinition + src := v.AdvancedTaskDefinition + var err error + *dst, err = __marshalgetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition( + &src) + if err != nil { + return nil, fmt.Errorf( + "Unable to marshal getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflow.AdvancedTaskDefinition: %w", err) + } + } + return &retval, nil +} + +// getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition includes the requested fields of the GraphQL interface AdvancedTaskDefinition. +// +// getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition is implemented by the following types: +// getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage +// getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand +type getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition interface { + implementsGraphQLInterfacegetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition() + // GetTypename returns the receiver's concrete GraphQL type-name (see interface doc for possible values). + GetTypename() string +} + +func (v *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage) implementsGraphQLInterfacegetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition() { +} + +// GetTypename is a part of, and documented with, the interface getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition. +func (v *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage) GetTypename() string { + return v.Typename +} + +func (v *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand) implementsGraphQLInterfacegetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition() { +} + +// GetTypename is a part of, and documented with, the interface getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition. +func (v *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand) GetTypename() string { + return v.Typename +} + +func __unmarshalgetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition(b []byte, v *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition) error { + if string(b) == "null" { + return nil + } + + var tn struct { + TypeName string `json:"__typename"` + } + err := json.Unmarshal(b, &tn) + if err != nil { + return err + } + + switch tn.TypeName { + case "AdvancedTaskDefinitionImage": + *v = new(getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage) + return json.Unmarshal(b, *v) + case "AdvancedTaskDefinitionCommand": + *v = new(getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand) + return json.Unmarshal(b, *v) + case "": + return fmt.Errorf( + "Response was missing AdvancedTaskDefinition.__typename") + default: + return fmt.Errorf( + `Unexpected concrete type for getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition: "%v"`, tn.TypeName) + } +} + +func __marshalgetEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition(v *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition) ([]byte, error) { + + var typename string + switch v := (*v).(type) { + case *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage: + typename = "AdvancedTaskDefinitionImage" + + result := struct { + TypeName string `json:"__typename"` + *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage + }{typename, v} + return json.Marshal(result) + case *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand: + typename = "AdvancedTaskDefinitionCommand" + + result := struct { + TypeName string `json:"__typename"` + *getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand + }{typename, v} + return json.Marshal(result) + case nil: + return []byte("null"), nil + default: + return nil, fmt.Errorf( + `Unexpected concrete type for getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinition: "%T"`, v) + } +} + +// getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand includes the requested fields of the GraphQL type AdvancedTaskDefinitionCommand. +type getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand struct { + Typename string `json:"__typename"` + Id int `json:"id"` + Command string `json:"command"` +} + +// getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage includes the requested fields of the GraphQL type AdvancedTaskDefinitionImage. +type getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage struct { + Typename string `json:"__typename"` + Id int `json:"id"` + Image string `json:"image"` +} + +// getEnvironmentByIdWorkflowsResponse is returned by getEnvironmentByIdWorkflows on success. +type getEnvironmentByIdWorkflowsResponse struct { + EnvironmentById getEnvironmentByIdWorkflowsEnvironmentByIdEnvironment `json:"environmentById"` +} + +// getEnvironmentWorkflowsEnvironmentByNameEnvironment includes the requested fields of the GraphQL type Environment. +// The GraphQL type's documentation follows. +// +// Lagoon Environment (for each branch, pullrequest there is an individual environment) +type getEnvironmentWorkflowsEnvironmentByNameEnvironment struct { + // Internal ID of this Environment + Id int `json:"id"` + // Name of this Environment + Name string `json:"name"` + Workflows []getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow `json:"workflows"` +} + +// getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow includes the requested fields of the GraphQL type Workflow. +type getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow struct { + Id int `json:"id"` + Event string `json:"event"` + AdvancedTaskDefinition getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition `json:"-"` +} + +func (v *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow) UnmarshalJSON(b []byte) error { + + if string(b) == "null" { + return nil + } + + var firstPass struct { + *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow + AdvancedTaskDefinition json.RawMessage `json:"advancedTaskDefinition"` + graphql.NoUnmarshalJSON + } + firstPass.getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow = v + + err := json.Unmarshal(b, &firstPass) + if err != nil { + return err + } + + { + dst := &v.AdvancedTaskDefinition + src := firstPass.AdvancedTaskDefinition + if len(src) != 0 && string(src) != "null" { + err = __unmarshalgetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition( + src, dst) + if err != nil { + return fmt.Errorf( + "Unable to unmarshal getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow.AdvancedTaskDefinition: %w", err) + } + } + } + return nil +} + +type __premarshalgetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow struct { + Id int `json:"id"` + + Event string `json:"event"` + + AdvancedTaskDefinition json.RawMessage `json:"advancedTaskDefinition"` +} + +func (v *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow) MarshalJSON() ([]byte, error) { + premarshaled, err := v.__premarshalJSON() + if err != nil { + return nil, err + } + return json.Marshal(premarshaled) +} + +func (v *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow) __premarshalJSON() (*__premarshalgetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow, error) { + var retval __premarshalgetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow + + retval.Id = v.Id + retval.Event = v.Event + { + + dst := &retval.AdvancedTaskDefinition + src := v.AdvancedTaskDefinition + var err error + *dst, err = __marshalgetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition( + &src) + if err != nil { + return nil, fmt.Errorf( + "Unable to marshal getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflow.AdvancedTaskDefinition: %w", err) + } + } + return &retval, nil +} + +// getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition includes the requested fields of the GraphQL interface AdvancedTaskDefinition. +// +// getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition is implemented by the following types: +// getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage +// getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand +type getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition interface { + implementsGraphQLInterfacegetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition() + // GetTypename returns the receiver's concrete GraphQL type-name (see interface doc for possible values). + GetTypename() string +} + +func (v *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage) implementsGraphQLInterfacegetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition() { +} + +// GetTypename is a part of, and documented with, the interface getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition. +func (v *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage) GetTypename() string { + return v.Typename +} + +func (v *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand) implementsGraphQLInterfacegetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition() { +} + +// GetTypename is a part of, and documented with, the interface getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition. +func (v *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand) GetTypename() string { + return v.Typename +} + +func __unmarshalgetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition(b []byte, v *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition) error { + if string(b) == "null" { + return nil + } + + var tn struct { + TypeName string `json:"__typename"` + } + err := json.Unmarshal(b, &tn) + if err != nil { + return err + } + + switch tn.TypeName { + case "AdvancedTaskDefinitionImage": + *v = new(getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage) + return json.Unmarshal(b, *v) + case "AdvancedTaskDefinitionCommand": + *v = new(getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand) + return json.Unmarshal(b, *v) + case "": + return fmt.Errorf( + "Response was missing AdvancedTaskDefinition.__typename") + default: + return fmt.Errorf( + `Unexpected concrete type for getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition: "%v"`, tn.TypeName) + } +} + +func __marshalgetEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition(v *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition) ([]byte, error) { + + var typename string + switch v := (*v).(type) { + case *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage: + typename = "AdvancedTaskDefinitionImage" + + result := struct { + TypeName string `json:"__typename"` + *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage + }{typename, v} + return json.Marshal(result) + case *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand: + typename = "AdvancedTaskDefinitionCommand" + + result := struct { + TypeName string `json:"__typename"` + *getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand + }{typename, v} + return json.Marshal(result) + case nil: + return []byte("null"), nil + default: + return nil, fmt.Errorf( + `Unexpected concrete type for getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinition: "%T"`, v) + } +} + +// getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand includes the requested fields of the GraphQL type AdvancedTaskDefinitionCommand. +type getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand struct { + Typename string `json:"__typename"` + Id int `json:"id"` + Command string `json:"command"` +} + +// getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage includes the requested fields of the GraphQL type AdvancedTaskDefinitionImage. +type getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage struct { + Typename string `json:"__typename"` + Id int `json:"id"` + Image string `json:"image"` +} + +// getEnvironmentWorkflowsResponse is returned by getEnvironmentWorkflows on success. +type getEnvironmentWorkflowsResponse struct { + EnvironmentByName getEnvironmentWorkflowsEnvironmentByNameEnvironment `json:"environmentByName"` +} + +// invokeCustomTaskInvokeRegisteredTask includes the requested fields of the GraphQL type Task. +type invokeCustomTaskInvokeRegisteredTask struct { + Id int `json:"id"` + Status string `json:"status"` +} + +// invokeCustomTaskResponse is returned by invokeCustomTask on success. +type invokeCustomTaskResponse struct { + InvokeRegisteredTask invokeCustomTaskInvokeRegisteredTask `json:"invokeRegisteredTask"` +} + +func getEnvironmentWorkflows( + ctx context.Context, + client graphql.Client, + project int, + name string, +) (*getEnvironmentWorkflowsResponse, error) { + __input := __getEnvironmentWorkflowsInput{ + Project: project, + Name: name, + } + var err error + + var retval getEnvironmentWorkflowsResponse + err = client.MakeRequest( + ctx, + "getEnvironmentWorkflows", + ` +query getEnvironmentWorkflows ($project: Int!, $name: String!) { + environmentByName(project: $project, name: $name) { + id + name + workflows { + id + event + advancedTaskDefinition { + __typename + ... on AdvancedTaskDefinitionCommand { + id + command + } + ... on AdvancedTaskDefinitionImage { + id + image + } + } + } + } +} +`, + &retval, + &__input, + ) + return &retval, err +} + +func getEnvironmentByIdWorkflows( + ctx context.Context, + client graphql.Client, + environmentId int, +) (*getEnvironmentByIdWorkflowsResponse, error) { + __input := __getEnvironmentByIdWorkflowsInput{ + EnvironmentId: environmentId, + } + var err error + + var retval getEnvironmentByIdWorkflowsResponse + err = client.MakeRequest( + ctx, + "getEnvironmentByIdWorkflows", + ` +query getEnvironmentByIdWorkflows ($environmentId: Int!) { + environmentById(id: $environmentId) { + id + name + workflows { + id + event + advancedTaskDefinition { + __typename + ... on AdvancedTaskDefinitionCommand { + id + command + } + ... on AdvancedTaskDefinitionImage { + id + image + } + } + } + } +} +`, + &retval, + &__input, + ) + return &retval, err +} + +func invokeCustomTask( + ctx context.Context, + client graphql.Client, + environmentId int, + advancedTaskDefinitionId int, +) (*invokeCustomTaskResponse, error) { + __input := __invokeCustomTaskInput{ + EnvironmentId: environmentId, + AdvancedTaskDefinitionId: advancedTaskDefinitionId, + } + var err error + + var retval invokeCustomTaskResponse + err = client.MakeRequest( + ctx, + "invokeCustomTask", + ` +mutation invokeCustomTask ($environmentId: Int!, $advancedTaskDefinitionId: Int!) { + invokeRegisteredTask(environment: $environmentId, advancedTaskDefinition: $advancedTaskDefinitionId) { + id + status + } +} +`, + &retval, + &__input, + ) + return &retval, err +} diff --git a/services/workflows/internal/lagoonclient/genqlient.graphql b/services/workflows/internal/lagoonclient/genqlient.graphql new file mode 100644 index 0000000000..9ec678229c --- /dev/null +++ b/services/workflows/internal/lagoonclient/genqlient.graphql @@ -0,0 +1,48 @@ +query getEnvironmentWorkflows($project: Int!, $name: String!) { + environmentByName(project: $project, name: $name) { + id + name + workflows { + id + event + advancedTaskDefinition { + ... on AdvancedTaskDefinitionCommand { + id + command + } + ... on AdvancedTaskDefinitionImage { + id + image + } + } + } + } +} + +query getEnvironmentByIdWorkflows($environmentId: Int!) { + environmentById(id: $environmentId) { + id + name + workflows { + id + event + advancedTaskDefinition { + ... on AdvancedTaskDefinitionCommand { + id + command + } + ... on AdvancedTaskDefinitionImage { + id + image + } + } + } + } +} + +mutation invokeCustomTask($environmentId: Int!, $advancedTaskDefinitionId: Int!) { + invokeRegisteredTask(environment: $environmentId, advancedTaskDefinition: $advancedTaskDefinitionId) { + id + status + } +} \ No newline at end of file diff --git a/services/workflows/internal/lagoonclient/genqlient.yaml b/services/workflows/internal/lagoonclient/genqlient.yaml new file mode 100644 index 0000000000..bfc5f265b3 --- /dev/null +++ b/services/workflows/internal/lagoonclient/genqlient.yaml @@ -0,0 +1,6 @@ +# Default genqlient config; for full documentation see: +# https://github.com/Khan/genqlient/blob/main/docs/genqlient.yaml +schema: schema.graphql +operations: +- genqlient.graphql +generated: generated.go diff --git a/services/workflows/internal/lagoonclient/schema.graphql b/services/workflows/internal/lagoonclient/schema.graphql new file mode 100644 index 0000000000..23d5b1f9d7 --- /dev/null +++ b/services/workflows/internal/lagoonclient/schema.graphql @@ -0,0 +1,2203 @@ +input AddBackupInput { + id: Int + environment: Int! + source: String! + backupId: String! + created: String! +} + +input AddBillingModifierInput { + """ + The existing billing group for this modifier + """ + group: GroupInput! + + """ + The date this modifier should start to be applied - Format: YYYY-MM-DD + """ + startDate: String! + + """ + The date this modifer will expire - Format: YYYY-MM-DD + """ + endDate: String! + + """ + The amount that the total monthly bill should be discounted - Format (Float) + """ + discountFixed: Float + + """ + The percentage the total monthly bill should be discounted - Format (0-100) + """ + discountPercentage: Float + + """ + The amount of exta cost that should be added to the total- Format (Float) + """ + extraFixed: Float + + """ + The percentage the total monthly bill should be added - Format (0-100) + """ + extraPercentage: Float + + """ + The minimum amount of the invoice applied to the total- Format (Float) + """ + min: Float + + """ + The maximum amount of the invoice applied to the total- Format (Float) + """ + max: Float + + """ + Customer comments are visible to the customer + """ + customerComments: String + + """ + Admin comments will not be visible to the customer. + """ + adminComments: String! + + """ + The order this modifer should be applied + """ + weight: Int +} + +input AddDeploymentInput { + id: Int + name: String! + status: DeploymentStatusType! + created: String! + started: String + completed: String + environment: Int! + remoteId: String +} + +input AddDeployTargetConfigInput { + id: Int + project: Int! + weight: Int + branches: String + pullrequests: String + deployTarget: Int + deployTargetProjectPattern: String +} + +input AddEnvironmentInput { + id: Int + name: String! + project: Int! + deployType: DeployType! + deployBaseRef: String! + deployHeadRef: String + deployTitle: String + environmentType: EnvType! + openshiftProjectName: String + kubernetesNamespaceName: String + openshift: Int + openshiftProjectPattern: String + kubernetes: Int + kubernetesNamespacePattern: String +} + +input AddFactInput { + id: Int + environment: Int! + name: String! + value: String! + source: String! + description: String! + keyFact: Boolean + type: FactType + category: String +} + +input AddFactReferenceInput { + fid: Int! + name: String! +} + +input AddFactsInput { + facts: [AddFactInput]! +} + +input AddGroupInput { + name: String! + parentGroup: GroupInput +} + +input AddKubernetesInput { + id: Int + name: String! + consoleUrl: String! + token: String + routerPattern: String + projectUser: String + sshHost: String + sshPort: String + monitoringConfig: JSON +} + +input AddNotificationEmailInput { + name: String! + emailAddress: String! +} + +input AddNotificationMicrosoftTeamsInput { + name: String! + webhook: String! +} + +input AddNotificationRocketChatInput { + name: String! + webhook: String! + channel: String! +} + +input AddNotificationSlackInput { + name: String! + webhook: String! + channel: String! +} + +input AddNotificationToProjectInput { + project: String! + notificationType: NotificationType! + notificationName: String! + contentType: NotificationContentType + notificationSeverityThreshold: ProblemSeverityRating +} + +input AddNotificationWebhookInput { + name: String! + webhook: String! +} + +input AddOpenshiftInput { + id: Int + name: String! + consoleUrl: String! + token: String + routerPattern: String + projectUser: String + sshHost: String + sshPort: String + monitoringConfig: JSON +} + +input AddOrUpdateEnvironmentStorageInput { + environment: Int! + persistentStorageClaim: String! + bytesUsed: Int! + + """ + Date in format 'YYYY-MM-DD' + """ + updated: String +} + +input AddProblemHarborScanMatchInput { + name: String! + description: String! + defaultLagoonProject: String + defaultLagoonEnvironment: String + defaultLagoonService: String + regex: String! +} + +input AddProblemInput { + id: Int + environment: Int! + severity: ProblemSeverityRating + severityScore: SeverityScore + identifier: String! + service: String + source: String! + associatedPackage: String + description: String + links: String + version: String + fixedVersion: String + data: String! + created: String +} + +input AddProjectInput { + id: Int + name: String! + gitUrl: String! + subfolder: String + routerPattern: String + openshift: Int + openshiftProjectPattern: String + kubernetes: Int + kubernetesNamespacePattern: String + activeSystemsDeploy: String + activeSystemsPromote: String + activeSystemsRemove: String + activeSystemsTask: String + activeSystemsMisc: String + branches: String + pullrequests: String + productionEnvironment: String! + productionRoutes: String + productionAlias: String + standbyProductionEnvironment: String + standbyRoutes: String + standbyAlias: String + availability: ProjectAvailability + autoIdle: Int + storageCalc: Int + developmentEnvironmentsLimit: Int + privateKey: String + problemsUi: Int + factsUi: Int + deploymentsDisabled: Int +} + +input AddRestoreInput { + id: Int + status: RestoreStatusType + restoreLocation: String + created: String + execute: Boolean + backupId: String! +} + +input AddSshKeyInput { + id: Int + name: String! + keyValue: String! + keyType: SshKeyType! + user: UserInput! +} + +input AddUserInput { + email: String! + firstName: String + lastName: String + comment: String + gitlabId: Int +} + +input AddWorkflowInput { + event: String + project: Int + advancedTaskDefinition: Int +} + +type AdvancedTask { + id: Int + name: String + status: String + created: String + started: String + completed: String + environment: Environment + service: String + advancedTask: String + remoteId: String + logs: String + files: [File] +} + +input AdvancedTaskArgumentInput { + name: String + value: String +} + +union AdvancedTaskDefinition = + AdvancedTaskDefinitionImage + | AdvancedTaskDefinitionCommand + +type AdvancedTaskDefinitionArgument { + id: Int + name: String + type: String + advancedTaskDefinition: AdvancedTaskDefinition +} + +input AdvancedTaskDefinitionArgumentInput { + name: String + type: AdvancedTaskDefinitionArgumentTypes +} + +enum AdvancedTaskDefinitionArgumentTypes { + NUMERIC + STRING +} + +type AdvancedTaskDefinitionCommand { + id: Int + name: String + description: String + type: AdvancedTaskDefinitionTypes + service: String + command: String + groupName: String + environment: Int + project: Int + permission: TaskPermission + created: String + deleted: String +} + +type AdvancedTaskDefinitionImage { + id: Int + name: String + description: String + type: AdvancedTaskDefinitionTypes + image: String + service: String + groupName: String + environment: Int + project: Int + permission: TaskPermission + advancedTaskDefinitionArguments: [AdvancedTaskDefinitionArgument] + created: String + deleted: String +} + +input AdvancedTaskDefinitionInput { + name: String + description: String + image: String + type: AdvancedTaskDefinitionTypes + service: String + command: String + environment: Int + project: Int + groupName: String + permission: TaskPermission + advancedTaskDefinitionArguments: [AdvancedTaskDefinitionArgumentInput] +} + +enum AdvancedTaskDefinitionTypes { + COMMAND + IMAGE +} + +type Backup { + id: Int + environment: Environment + source: String + backupId: String + created: String + deleted: String + restore: Restore +} + +type BillingGroup implements GroupInterface { + id: String + name: String + type: String + groups: [GroupInterface] + members: [GroupMembership] + projects: [Project] + currency: String + billingSoftware: String + modifiers: [BillingModifier] + uptimeRobotStatusPageId: String +} + +input BillingGroupInput { + name: String! + currency: Currency! + billingSoftware: String + uptimeRobotStatusPageId: String +} + +type BillingModifier { + id: Int + group: BillingGroup + startDate: String + endDate: String + discountFixed: Float + discountPercentage: Float + extraFixed: Float + extraPercentage: Float + min: Float + max: Float + customerComments: String + adminComments: String + weight: Int +} + +input BillingModifierPatchInput { + group: GroupInput + startDate: String + endDate: String + discountFixed: Float + discountPercentage: Float + extraFixed: Float + extraPercentage: Float + min: Float + max: Float + customerComments: String + adminComments: String + weight: Int +} + +input BulkProblem { + severity: ProblemSeverityRating + severityScore: SeverityScore + identifier: String + data: String +} + +input CancelDeploymentInput { + deployment: DeploymentInput! +} + +enum Currency { + AUD + EUR + GBP + USD + CHF + ZAR +} + +scalar Date + +input DeleteBackupInput { + backupId: String! +} + +input DeleteBillingModifierInput { + id: Int! +} + +input DeleteDeploymentInput { + id: Int! +} + +input DeleteDeployTargetConfigInput { + id: Int! + project: Int! + execute: Boolean +} + +input DeleteEnvironmentInput { + name: String! + project: String! + execute: Boolean +} + +input DeleteEnvVariableInput { + id: Int! +} + +input DeleteFactInput { + environment: Int! + name: String! +} + +input DeleteFactReferenceInput { + factName: String! + referenceName: String! + eid: Int! +} + +input DeleteFactReferencesByFactIdInput { + fid: Int! +} + +input DeleteFactsFromSourceInput { + environment: Int! + source: String! +} + +input DeleteFilesForTaskInput { + id: Int! +} + +input DeleteGroupInput { + group: GroupInput! +} + +input DeleteKubernetesInput { + name: String! +} + +input DeleteNotificationEmailInput { + name: String! +} + +input DeleteNotificationMicrosoftTeamsInput { + name: String! +} + +input DeleteNotificationRocketChatInput { + name: String! +} + +input DeleteNotificationSlackInput { + name: String! +} + +input DeleteNotificationWebhookInput { + name: String! +} + +input DeleteOpenshiftInput { + name: String! +} + +input DeleteProblemHarborScanMatchInput { + id: Int! +} + +input DeleteProblemInput { + environment: Int! + identifier: String! +} + +input DeleteProblemsFromSourceInput { + environment: Int! + source: String! + service: String! +} + +input DeleteProjectInput { + project: String! +} + +input DeleteSshKeyByIdInput { + id: Int! +} + +input DeleteSshKeyInput { + name: String! +} + +input DeleteTaskInput { + id: Int! +} + +input DeleteUserInput { + user: UserInput! +} + +input DeployEnvironmentBranchInput { + project: ProjectInput! + branchName: String! + branchRef: String +} + +input DeployEnvironmentLatestInput { + environment: EnvironmentInput! +} + +input DeployEnvironmentPromoteInput { + sourceEnvironment: EnvironmentInput! + project: ProjectInput! + destinationEnvironment: String! +} + +input DeployEnvironmentPullrequestInput { + project: ProjectInput! + number: Int! + title: String! + baseBranchName: String! + baseBranchRef: String! + headBranchName: String! + headBranchRef: String! +} + +type Deployment { + id: Int + name: String + status: String + created: String + started: String + completed: String + environment: Environment + remoteId: String + buildLog: String + + """ + The Lagoon URL + """ + uiLink: String +} + +""" +Must provide id OR name and environment +""" +input DeploymentInput { + id: Int + name: String + environment: EnvironmentInput +} + +enum DeploymentStatusType { + NEW + PENDING + RUNNING + CANCELLED + ERROR + FAILED + COMPLETE +} + +type DeployTargetConfig { + id: Int + project: Project + weight: Int + branches: String + pullrequests: String + deployTarget: Openshift + deployTargetProjectPattern: String +} + +enum DeployType { + BRANCH + PULLREQUEST + PROMOTE +} + +""" +Lagoon Environment (for each branch, pullrequest there is an individual environment) +""" +type Environment { + """ + Internal ID of this Environment + """ + id: Int + + """ + Name of this Environment + """ + name: String + + """ + Reference to the Project Object + """ + project: Project + + """ + Which Deployment Type this environment is, can be `branch`, `pullrequest`, `promote` + """ + deployType: String + + """ + The version control base ref for deployments (e.g., branch name, tag, or commit id) + """ + deployBaseRef: String + + """ + The version control head ref for deployments (e.g., branch name, tag, or commit id) + """ + deployHeadRef: String + + """ + The title of the last deployment (PR title) + """ + deployTitle: String + + """ + Should this environment have auto idling enabled (`1` or `0`) + """ + autoIdle: Int + + """ + Which Environment Type this environment is, can be `production`, `development` + """ + environmentType: String + + """ + Name of the OpenShift Project/Namespace this environment is deployed into + """ + openshiftProjectName: String + + """ + Name of the Kubernetes Namespace this environment is deployed into + """ + kubernetesNamespaceName: String + + """ + Unix Timestamp of the last time this environment has been updated + """ + updated: String + + """ + Unix Timestamp if the creation time + """ + created: String + + """ + Unix Timestamp of when this project has been deleted + """ + deleted: String + + """ + Reference to EnvironmentHoursMonth API Object, which returns how many hours this environment ran in a specific month + """ + hoursMonth(month: Date): EnvironmentHoursMonth + + """ + Reference to EnvironmentStorage API Object, which shows the Storage consumption of this environment per day + """ + storages: [EnvironmentStorage] + + """ + Reference to EnvironmentStorageMonth API Object, which returns how many + storage per day this environment used in a specific month + """ + storageMonth(month: Date): EnvironmentStorageMonth + + """ + Reference to EnvironmentHitsMonth API Object, which returns how many hits this environment generated in a specific month + """ + hitsMonth(month: Date): EnvironmentHitsMonth + + """ + Environment variables available during build-time and run-time + """ + envVariables: [EnvKeyValue] + route: String + routes: String + monitoringUrls: String + deployments(name: String, limit: Int): [Deployment] + backups(includeDeleted: Boolean, limit: Int): [Backup] + tasks(id: Int, limit: Int): [Task] + advancedTasks: [AdvancedTaskDefinition] + services: [EnvironmentService] + problems(severity: [ProblemSeverityRating], source: [String]): [Problem] + facts(keyFacts: Boolean, limit: Int): [Fact] + openshift: Openshift + openshiftProjectPattern: String + kubernetes: Kubernetes + kubernetesNamespacePattern: String + workflows: [Workflow] +} + +type EnvironmentFactSearchResults { + count: Int + environments: [Environment] +} + +type EnvironmentHitsMonth { + total: Int +} + +type EnvironmentHoursMonth { + month: String + hours: Int +} + +""" +Must provide id OR name and project +""" +input EnvironmentInput { + id: Int + name: String + project: ProjectInput +} + +type EnvironmentService { + id: Int + name: String +} + +type EnvironmentStorage { + id: Int + environment: Environment + persistentStorageClaim: String + bytesUsed: Float + updated: String +} + +type EnvironmentStorageMonth { + month: String + bytesUsed: Float +} + +type EnvKeyValue { + id: Int + scope: String + name: String + value: String +} + +enum EnvOrderType { + NAME + UPDATED +} + +enum EnvType { + PRODUCTION + DEVELOPMENT +} + +input EnvVariableInput { + id: Int + type: EnvVariableType + typeId: Int! + scope: EnvVariableScope + name: String! + value: String! +} + +enum EnvVariableScope { + BUILD + RUNTIME + GLOBAL + CONTAINER_REGISTRY + INTERNAL_CONTAINER_REGISTRY +} + +enum EnvVariableType { + PROJECT + ENVIRONMENT +} + +type Fact { + id: Int + environment: Environment + name: String + value: String + source: String + description: String + keyFact: Boolean + type: FactType + category: String + references: [FactReference] +} + +input FactFilterAtom { + lhsTarget: FactFilterLHSTarget + name: String! + contains: String! +} + +enum FactFilterConnective { + OR + AND +} + +input FactFilterInput { + filterConnective: FactFilterConnective + filters: [FactFilterAtom] + skip: Int + take: Int + orderBy: String +} + +enum FactFilterLHSTarget { + FACT + ENVIRONMENT + PROJECT +} + +type FactReference { + id: Int + fid: Int + name: String +} + +enum FactType { + TEXT + URL + SEMVER +} + +type File { + id: Int + filename: String + download: String + created: String +} + +type Group implements GroupInterface { + id: String + name: String + type: String + groups: [GroupInterface] + members: [GroupMembership] + projects: [Project] +} + +input GroupInput { + id: String + name: String +} + +interface GroupInterface { + id: String + name: String + type: String + groups: [GroupInterface] + members: [GroupMembership] + projects: [Project] +} + +type GroupMembership { + user: User + role: GroupRole +} + +enum GroupRole { + GUEST + REPORTER + DEVELOPER + MAINTAINER + OWNER +} + +scalar JSON + +type Kubernetes { + id: Int + name: String + consoleUrl: String + token: String + routerPattern: String + projectUser: String + sshHost: String + sshPort: String + created: String + monitoringConfig: JSON +} + +input MetadataKeyValue { + key: String! + value: String +} + +type Mutation { + """ + Add Environment or update if it is already existing + """ + addOrUpdateEnvironment(input: AddEnvironmentInput!): Environment + updateEnvironment(input: UpdateEnvironmentInput!): Environment + deleteEnvironment(input: DeleteEnvironmentInput!): String + deleteAllEnvironments: String + + """ + Add or update Storage Information for Environment + """ + addOrUpdateEnvironmentStorage( + input: AddOrUpdateEnvironmentStorageInput! + ): EnvironmentStorage + addNotificationSlack(input: AddNotificationSlackInput!): NotificationSlack + updateNotificationSlack( + input: UpdateNotificationSlackInput! + ): NotificationSlack + deleteNotificationSlack(input: DeleteNotificationSlackInput!): String + deleteAllNotificationSlacks: String + addNotificationRocketChat( + input: AddNotificationRocketChatInput! + ): NotificationRocketChat + updateNotificationRocketChat( + input: UpdateNotificationRocketChatInput! + ): NotificationRocketChat + deleteNotificationRocketChat( + input: DeleteNotificationRocketChatInput! + ): String + deleteAllNotificationRocketChats: String + addNotificationMicrosoftTeams( + input: AddNotificationMicrosoftTeamsInput! + ): NotificationMicrosoftTeams + updateNotificationMicrosoftTeams( + input: UpdateNotificationMicrosoftTeamsInput! + ): NotificationMicrosoftTeams + deleteNotificationMicrosoftTeams( + input: DeleteNotificationMicrosoftTeamsInput! + ): String + deleteAllNotificationMicrosoftTeams: String + addNotificationWebhook( + input: AddNotificationWebhookInput! + ): NotificationWebhook + updateNotificationWebhook( + input: UpdateNotificationWebhookInput! + ): NotificationWebhook + deleteNotificationWebhook(input: DeleteNotificationWebhookInput!): String + deleteAllNotificationWebhook: String + addNotificationEmail(input: AddNotificationEmailInput!): NotificationEmail + updateNotificationEmail( + input: UpdateNotificationEmailInput! + ): NotificationEmail + deleteNotificationEmail(input: DeleteNotificationEmailInput!): String + deleteAllNotificationEmails: String + + """ + Connect previous created Notification to a Project + """ + addNotificationToProject(input: AddNotificationToProjectInput!): Project + removeNotificationFromProject( + input: RemoveNotificationFromProjectInput! + ): Project + removeAllNotificationsFromAllProjects: String + addOpenshift(input: AddOpenshiftInput!): Openshift + updateOpenshift(input: UpdateOpenshiftInput!): Openshift + deleteOpenshift(input: DeleteOpenshiftInput!): String + deleteAllOpenshifts: String + addKubernetes(input: AddKubernetesInput!): Kubernetes + updateKubernetes(input: UpdateKubernetesInput!): Kubernetes + deleteKubernetes(input: DeleteKubernetesInput!): String + deleteAllKubernetes: String + addProject(input: AddProjectInput!): Project + updateProject(input: UpdateProjectInput!): Project + deleteProject(input: DeleteProjectInput!): String + deleteAllProjects: String + addSshKey(input: AddSshKeyInput!): SshKey + updateSshKey(input: UpdateSshKeyInput!): SshKey + deleteSshKey(input: DeleteSshKeyInput!): String + deleteSshKeyById(input: DeleteSshKeyByIdInput!): String + deleteAllSshKeys: String + removeAllSshKeysFromAllUsers: String + addUser(input: AddUserInput!): User + updateUser(input: UpdateUserInput!): User + deleteUser(input: DeleteUserInput!): String + deleteAllUsers: String + addDeployment(input: AddDeploymentInput!): Deployment + deleteDeployment(input: DeleteDeploymentInput!): String + updateDeployment(input: UpdateDeploymentInput): Deployment + cancelDeployment(input: CancelDeploymentInput!): String + addBackup(input: AddBackupInput!): Backup + addProblem(input: AddProblemInput!): Problem + addProblemHarborScanMatch( + input: AddProblemHarborScanMatchInput! + ): ProblemHarborScanMatch + deleteProblem(input: DeleteProblemInput!): String + deleteProblemsFromSource(input: DeleteProblemsFromSourceInput!): String + deleteProblemHarborScanMatch( + input: DeleteProblemHarborScanMatchInput! + ): String + addFact(input: AddFactInput!): Fact + addFacts(input: AddFactsInput!): [Fact] + deleteFact(input: DeleteFactInput!): String + deleteFactsFromSource(input: DeleteFactsFromSourceInput!): String + addFactReference(input: AddFactReferenceInput!): FactReference + deleteFactReference(input: DeleteFactReferenceInput!): String + deleteAllFactReferencesByFactId( + input: DeleteFactReferencesByFactIdInput! + ): String + deleteBackup(input: DeleteBackupInput!): String + deleteAllBackups: String + addRestore(input: AddRestoreInput!): Restore + updateRestore(input: UpdateRestoreInput!): Restore + addEnvVariable(input: EnvVariableInput!): EnvKeyValue + deleteEnvVariable(input: DeleteEnvVariableInput!): String + addTask(input: TaskInput!): Task + addAdvancedTaskDefinition( + input: AdvancedTaskDefinitionInput! + ): AdvancedTaskDefinition + invokeRegisteredTask(advancedTaskDefinition: Int!, environment: Int!): Task + deleteAdvancedTaskDefinition(advancedTaskDefinition: Int!): String + addWorkflow(input: AddWorkflowInput!): Workflow + taskDrushArchiveDump(environment: Int!): Task + taskDrushSqlDump(environment: Int!): Task + taskDrushCacheClear(environment: Int!): Task + taskDrushCron(environment: Int!): Task + taskDrushSqlSync(sourceEnvironment: Int!, destinationEnvironment: Int!): Task + taskDrushRsyncFiles( + sourceEnvironment: Int! + destinationEnvironment: Int! + ): Task + taskDrushUserLogin(environment: Int!): Task + deleteTask(input: DeleteTaskInput!): String + updateTask(input: UpdateTaskInput): Task + setEnvironmentServices( + input: SetEnvironmentServicesInput! + ): [EnvironmentService] + uploadFilesForTask(input: UploadFilesForTaskInput!): Task + deleteFilesForTask(input: DeleteFilesForTaskInput!): String + deployEnvironmentLatest(input: DeployEnvironmentLatestInput!): String + deployEnvironmentBranch(input: DeployEnvironmentBranchInput!): String + deployEnvironmentPullrequest( + input: DeployEnvironmentPullrequestInput! + ): String + deployEnvironmentPromote(input: DeployEnvironmentPromoteInput!): String + switchActiveStandby(input: switchActiveStandbyInput!): Task + addGroup(input: AddGroupInput!): GroupInterface + updateGroup(input: UpdateGroupInput!): GroupInterface + deleteGroup(input: DeleteGroupInput!): String + deleteAllGroups: String + addUserToGroup(input: UserGroupRoleInput!): GroupInterface + removeUserFromGroup(input: UserGroupInput!): GroupInterface + addGroupsToProject(input: ProjectGroupsInput): Project + addBillingGroup(input: BillingGroupInput!): BillingGroup + updateBillingGroup(input: UpdateBillingGroupInput!): BillingGroup + deleteBillingGroup(input: DeleteGroupInput!): String + addProjectToBillingGroup(input: ProjectBillingGroupInput): Project + updateProjectBillingGroup(input: ProjectBillingGroupInput): Project + removeProjectFromBillingGroup(input: ProjectBillingGroupInput): Project + removeGroupsFromProject(input: ProjectGroupsInput!): Project + updateProjectMetadata(input: UpdateMetadataInput!): Project + removeProjectMetadataByKey(input: RemoveMetadataInput!): Project + addBillingModifier(input: AddBillingModifierInput!): BillingModifier + updateBillingModifier(input: UpdateBillingModifierInput!): BillingModifier + deleteBillingModifier(input: DeleteBillingModifierInput!): String + deleteAllBillingModifiersByBillingGroup(input: GroupInput!): String + addDeployTargetConfig(input: AddDeployTargetConfigInput!): DeployTargetConfig + @deprecated( + reason: "Unstable API, subject to breaking changes in any release. Use at your own risk" + ) + updateDeployTargetConfig( + input: UpdateDeployTargetConfigInput! + ): DeployTargetConfig + @deprecated( + reason: "Unstable API, subject to breaking changes in any release. Use at your own risk" + ) + deleteDeployTargetConfig(input: DeleteDeployTargetConfigInput!): String + @deprecated( + reason: "Unstable API, subject to breaking changes in any release. Use at your own risk" + ) + deleteAllDeployTargetConfigs: String + @deprecated( + reason: "Unstable API, subject to breaking changes in any release. Use at your own risk" + ) +} + +union Notification = + NotificationRocketChat + | NotificationSlack + | NotificationMicrosoftTeams + | NotificationEmail + | NotificationWebhook + +enum NotificationContentType { + DEPLOYMENT + PROBLEM +} + +type NotificationEmail { + id: Int + name: String + emailAddress: String + contentType: String + notificationSeverityThreshold: ProblemSeverityRating +} + +type NotificationMicrosoftTeams { + id: Int + name: String + webhook: String + contentType: String + notificationSeverityThreshold: ProblemSeverityRating +} + +type NotificationRocketChat { + id: Int + name: String + webhook: String + channel: String + contentType: String + notificationSeverityThreshold: ProblemSeverityRating +} + +type NotificationSlack { + id: Int + name: String + webhook: String + channel: String + contentType: String + notificationSeverityThreshold: ProblemSeverityRating +} + +enum NotificationType { + SLACK + ROCKETCHAT + MICROSOFTTEAMS + EMAIL + WEBHOOK +} + +type NotificationWebhook { + id: Int + name: String + webhook: String + contentType: String + notificationSeverityThreshold: ProblemSeverityRating +} + +type Openshift { + id: Int + name: String + consoleUrl: String + token: String + routerPattern: String + projectUser: String + sshHost: String + sshPort: String + created: String + monitoringConfig: JSON +} + +type Problem { + id: Int + environment: Environment + severity: ProblemSeverityRating + severityScore: SeverityScore + identifier: String + service: String + source: String + associatedPackage: String + description: String + links: String + version: String + fixedVersion: String + data: String + created: String + deleted: String +} + +type ProblemHarborScanMatch { + id: Int + name: String + description: String + defaultLagoonProject: String + defaultLagoonEnvironment: String + defaultLagoonService: String + regex: String +} + +enum ProblemSeverityRating { + NONE + UNKNOWN + NEGLIGIBLE + LOW + MEDIUM + HIGH + CRITICAL +} + +""" +Lagoon Project (like a git repository) +""" +type Project { + """ + ID of project + """ + id: Int + + """ + Name of project + """ + name: String + + """ + Git URL, needs to be SSH Git URL in one of these two formats + - git@172.17.0.1/project1.git + - ssh://git@172.17.0.1:2222/project1.git + """ + gitUrl: String + + """ + Project Availability STANDARD|HIGH + """ + availability: ProjectAvailability + + """ + SSH Private Key for Project + Will be used to authenticate against the Git Repo of the Project + Needs to be in single string separated by ` + `, example: + ``` + -----BEGIN RSA PRIVATE KEY----- + MIIJKQIBAAKCAgEA+o[...]P0yoL8BoQQG2jCvYfWh6vyglQdrDYx/o6/8ecTwXokKKh6fg1q + -----END RSA PRIVATE KEY----- + ``` + """ + privateKey: String + + """ + Set if the .lagoon.yml should be found in a subfolder + Usefull if you have multiple Lagoon projects per Git Repository + """ + subfolder: String + + """ + Set if the project should use a routerPattern that is different from the deploy target default + """ + routerPattern: String + + """ + Notifications that should be sent for this project + """ + notifications( + type: NotificationType + contentType: NotificationContentType + notificationSeverityThreshold: ProblemSeverityRating + ): [Notification] + + """ + Which internal Lagoon System is responsible for deploying + Currently only 'lagoon_controllerBuildDeploy' exists + """ + activeSystemsDeploy: String + + """ + Which internal Lagoon System is responsible for promoting + Currently only 'lagoon_controllerBuildDeploy' exists + """ + activeSystemsPromote: String + + """ + Which internal Lagoon System is responsible for promoting + Currently only 'lagoon_controllerRemove' exists + """ + activeSystemsRemove: String + + """ + Which internal Lagoon System is responsible for tasks + Currently only 'lagoon_controllerJob' exists + """ + activeSystemsTask: String + + """ + Which internal Lagoon System is responsible for miscellaneous tasks + Currently only 'lagoon_controllerMisc' exists + """ + activeSystemsMisc: String + + """ + Which branches should be deployed, can be one of: + - `true` - all branches are deployed + - `false` - no branches are deployed + - REGEX - regex of all branches that should be deployed, example: `^(main|staging)$` + """ + branches: String + + """ + Which Pull Requests should be deployed, can be one of: + - `true` - all pull requests are deployed + - `false` - no pull requests are deployed + - REGEX - regex of all Pull Request titles that should be deployed, example: `[BUILD]` + """ + pullrequests: String + + """ + Which environment(the name) should be marked as the production environment. + *Important:* If you change this, you need to deploy both environments (the + current and previous one) that are affected in order for the change to + propagate correctly + """ + productionEnvironment: String + + """ + Routes that are attached to the active environment + """ + productionRoutes: String + + """ + The drush alias to use for the active production environment + *Important:* This is mainly used for drupal, but could be used for other services potentially + """ + productionAlias: String + + """ + Which environment(the name) should be marked as the production standby environment. + *Important:* This is used to determine which environment should be marked as the standby production environment + """ + standbyProductionEnvironment: String + + """ + Routes that are attached to the standby environment + """ + standbyRoutes: String + + """ + The drush alias to use for the standby production environment + *Important:* This is mainly used for drupal, but could be used for other services potentially + """ + standbyAlias: String + + """ + Should this project have auto idling enabled (`1` or `0`) + """ + autoIdle: Int + + """ + Should storage for this environment be calculated (`1` or `0`) + """ + storageCalc: Int + + """ + Should the Problems UI be available for this Project (`1` or `0`) + """ + problemsUi: Int + + """ + Should the Facts UI be available for this Project (`1` or `0`) + """ + factsUi: Int + + """ + Should the ability to deploy environments be disabled for this Project (`1` or `0`) + """ + deploymentsDisabled: Int + + """ + Reference to OpenShift Object this Project should be deployed to + """ + openshift: Openshift + + """ + Pattern of OpenShift Project/Namespace that should be generated, default: `${project}-${environmentname}` + """ + openshiftProjectPattern: String + + """ + Reference to Kubernetes Object this Project should be deployed to + """ + kubernetes: Kubernetes + + """ + Pattern of Kubernetes Namespace that should be generated, default: `${project}-${environmentname}` + """ + kubernetesNamespacePattern: String + + """ + How many environments can be deployed at one timeout + """ + developmentEnvironmentsLimit: Int + + """ + Name of the OpenShift Project/Namespace + """ + openshiftProjectName: String + + """ + Deployed Environments for this Project + """ + environments( + """ + Filter by Environment Type + """ + type: EnvType + + """ + Include deleted Environments (by default deleted environment are hidden) + """ + includeDeleted: Boolean + + """ + Filter environments by fact matching + """ + factFilter: FactFilterInput + ): [Environment] + + """ + Creation Timestamp of Project + """ + created: String + + """ + Environment variables available during build-time and run-time + """ + envVariables: [EnvKeyValue] + + """ + Which groups are directly linked to project + """ + groups: [GroupInterface] + + """ + Metadata key/values stored against a project + """ + metadata: JSON + + """ + DeployTargetConfigs are a way to define which deploy targets are used for a project + """ + deployTargetConfigs: [DeployTargetConfig] + @deprecated( + reason: "Unstable API, subject to breaking changes in any release. Use at your own risk" + ) +} + +enum ProjectAvailability { + STANDARD + HIGH + POLYSITE +} + +input ProjectBillingGroupInput { + group: GroupInput! + project: ProjectInput! +} + +type ProjectFactSearchResults { + count: Int + projects: [Project] +} + +input ProjectGroupsInput { + project: ProjectInput! + groups: [GroupInput!]! +} + +""" +Must provide id OR name +""" +input ProjectInput { + id: Int + name: String +} + +enum ProjectOrderType { + NAME + CREATED +} + +type Query { + """ + Returns the current user + """ + me: User + + """ + Returns User Object by a given sshKey + """ + userBySshKey(sshKey: String!): User + + """ + Returns Project Object by a given name + """ + projectByName(name: String!): Project + + """ + Returns Group Object by a given name + """ + groupByName(name: String!): GroupInterface + + """ + Returns Project Object by a given gitUrl (only the first one if there are multiple) + """ + projectByGitUrl(gitUrl: String!): Project + environmentByName( + name: String! + project: Int! + includeDeleted: Boolean + ): Environment + environmentById(id: Int!): Environment + + """ + Returns Environment Object by a given openshiftProjectName + """ + environmentByOpenshiftProjectName(openshiftProjectName: String!): Environment + + """ + Returns Environment Object by a given kubernetesNamespaceName + """ + environmentByKubernetesNamespaceName( + kubernetesNamespaceName: String! + ): Environment + + """ + Return projects from a fact-based search + """ + projectsByFactSearch(input: FactFilterInput): ProjectFactSearchResults + + """ + Return environments from a fact-based search + """ + environmentsByFactSearch(input: FactFilterInput): EnvironmentFactSearchResults + userCanSshToEnvironment( + openshiftProjectName: String + kubernetesNamespaceName: String + ): Environment + deploymentByRemoteId(id: String): Deployment + taskByRemoteId(id: String): Task + taskById(id: Int): Task + + """ + Returns all Project Objects matching given filters (all if no filter defined) + """ + allProjects( + createdAfter: String + gitUrl: String + order: ProjectOrderType + ): [Project] + + """ + Returns all Project Objects matching metadata filters + """ + projectsByMetadata(metadata: [MetadataKeyValue]): [Project] + + """ + Returns all OpenShift Objects + """ + allOpenshifts: [Openshift] + + """ + Returns all Kubernetes Objects + """ + allKubernetes: [Kubernetes] + + """ + Returns all Environments matching given filter (all if no filter defined) + """ + allEnvironments( + createdAfter: String + type: EnvType + order: EnvOrderType + ): [Environment] + + """ + Returns all Problems matching given filter (all if no filter defined) + """ + allProblems( + source: [String] + project: Int + environment: Int + envType: [EnvType] + identifier: String + severity: [ProblemSeverityRating] + ): [Problem] + problemSources: [String] + + """ + Returns all Groups matching given filter (all if no filter defined) + """ + allGroups(name: String, type: String): [GroupInterface] + + """ + Returns all projects in a given group + """ + allProjectsInGroup(input: GroupInput): [Project] + + """ + Returns the costs for a given billing group + """ + billingGroupCost(input: GroupInput, month: String!): JSON + + """ + Returns the costs for all billing groups + """ + allBillingGroupsCost(month: String!): JSON + + """ + Returns the Billing Group Modifiers for a given Billing Group (all modifiers + for the Billing Group will be returned if the month is not provided) + """ + allBillingModifiers(input: GroupInput!, month: String): [BillingModifier] + + """ + Returns LAGOON_VERSION + """ + lagoonVersion: JSON + + """ + Returns all ProblemHarborScanMatchers + """ + allProblemHarborScanMatchers: [ProblemHarborScanMatch] + + """ + Returns all AdvancedTaskDefinitions + """ + allAdvancedTaskDefinitions: [AdvancedTaskDefinition] + + """ + Returns a single AdvancedTaskDefinition given an id + """ + advancedTaskDefinitionById(id: Int!): AdvancedTaskDefinition + + """ + Returns a AdvancedTaskDefinitions applicable for an environment + """ + advancedTasksForEnvironment(environment: Int!): [AdvancedTaskDefinition] + + """ + Returns a AdvancedTaskDefinitionArgument by Id + """ + advancedTaskDefinitionArgumentById(id: Int!): [AdvancedTaskDefinitionArgument] + + """ + Returns all Workflows for an environment + """ + workflowsForEnvironment(environment: Int!): [Workflow] + + """ + Returns the DeployTargetConfig by a deployTargetConfig Id + """ + deployTargetConfigById(id: Int!): DeployTargetConfig + @deprecated( + reason: "Unstable API, subject to breaking changes in any release. Use at your own risk" + ) + + """ + Returns all DeployTargetConfig by a project Id + """ + deployTargetConfigsByProjectId(project: Int!): [DeployTargetConfig] + @deprecated( + reason: "Unstable API, subject to breaking changes in any release. Use at your own risk" + ) + + """ + Returns all DeployTargetConfig by a deployTarget Id (aka: Openshift Id) + """ + deployTargetConfigsByDeployTarget(deployTarget: Int!): [DeployTargetConfig] + @deprecated( + reason: "Unstable API, subject to breaking changes in any release. Use at your own risk" + ) + allDeployTargetConfigs: [DeployTargetConfig] + @deprecated( + reason: "Unstable API, subject to breaking changes in any release. Use at your own risk" + ) +} + +input RemoveMetadataInput { + id: Int! + key: String! +} + +input RemoveNotificationFromProjectInput { + project: String! + notificationType: NotificationType! + notificationName: String! +} + +type Restore { + id: Int + backupId: String + status: String + restoreLocation: String + created: String +} + +enum RestoreStatusType { + PENDING + SUCCESSFUL + FAILED +} + +input SetEnvironmentServicesInput { + environment: Int! + services: [String]! +} + +""" +Severity score is a numeric measure (0-1) of a problems severity +""" +scalar SeverityScore + +type SshKey { + id: Int + name: String + keyValue: String + keyType: String + keyFingerprint: String + created: String +} + +enum SshKeyType { + SSH_RSA + SSH_ED25519 +} + +type Subscription { + backupChanged(environment: Int!): Backup + deploymentChanged(environment: Int!): Deployment + taskChanged(environment: Int!): Task +} + +input switchActiveStandbyInput { + project: ProjectInput! +} + +type Task { + id: Int + name: String + status: String + created: String + started: String + completed: String + environment: Environment + service: String + command: String + remoteId: String + logs: String + files: [File] +} + +input TaskInput { + id: Int + name: String! + status: TaskStatusType + created: String + started: String + completed: String + environment: Int! + service: String + command: String + remoteId: String + execute: Boolean +} + +enum TaskPermission { + MAINTAINER + DEVELOPER + GUEST +} + +type TaskRegistration { + id: Int + type: String + name: String + description: String + groupName: String + environment: Int + project: Int + command: String + service: String + permission: TaskPermission + created: String + deleted: String +} + +enum TaskStatusType { + ACTIVE + SUCCEEDED + FAILED +} + +type UnassignedNotification { + id: Int + name: String + type: String + contentType: String + notificationSeverityThreshold: ProblemSeverityRating +} + +input UpdateBillingGroupInput { + group: GroupInput! + patch: UpdateBillingGroupPatchInput! +} + +input UpdateBillingGroupPatchInput { + name: String! + currency: Currency + billingSoftware: String + uptimeRobotStatusPageId: String +} + +input UpdateBillingModifierInput { + id: Int! + patch: BillingModifierPatchInput! +} + +input UpdateDeploymentInput { + id: Int! + patch: UpdateDeploymentPatchInput! +} + +input UpdateDeploymentPatchInput { + name: String + status: DeploymentStatusType + created: String + started: String + completed: String + environment: Int + remoteId: String +} + +input UpdateDeployTargetConfigInput { + id: Int! + patch: UpdateDeployTargetConfigPatchInput +} + +input UpdateDeployTargetConfigPatchInput { + weight: Int + branches: String + pullrequests: String + deployTarget: Int + deployTargetProjectPattern: String +} + +input UpdateEnvironmentInput { + id: Int! + patch: UpdateEnvironmentPatchInput +} + +input UpdateEnvironmentPatchInput { + project: Int + deployType: DeployType + deployBaseRef: String + deployHeadRef: String + deployTitle: String + environmentType: EnvType + openshiftProjectName: String + kubernetesNamespaceName: String + route: String + routes: String + monitoringUrls: String + autoIdle: Int + openshift: Int + openshiftProjectPattern: String + kubernetes: Int + kubernetesNamespacePattern: String + + """ + Timestamp in format 'YYYY-MM-DD hh:mm:ss' + """ + created: String +} + +input UpdateFactInput { + environment: Int! + patch: UpdateFactInputValue! +} + +input UpdateFactInputValue { + environment: Int! + name: String! + value: String! + source: String! + description: String + keyFact: Boolean + type: FactType + category: String +} + +input UpdateFactReferenceInput { + fid: Int! + patch: UpdateFactReferenceInputValue! +} + +input UpdateFactReferenceInputValue { + fid: Int! + name: String +} + +input UpdateGroupInput { + group: GroupInput! + patch: UpdateGroupPatchInput! +} + +input UpdateGroupPatchInput { + name: String +} + +input UpdateKubernetesInput { + id: Int! + patch: UpdateKubernetesPatchInput! +} + +input UpdateKubernetesPatchInput { + name: String + consoleUrl: String + token: String + routerPattern: String + projectUser: String + sshHost: String + sshPort: String + monitoringConfig: JSON +} + +input UpdateMetadataInput { + id: Int! + patch: MetadataKeyValue! +} + +input UpdateNotificationEmailInput { + name: String! + patch: UpdateNotificationEmailPatchInput +} + +input UpdateNotificationEmailPatchInput { + name: String + emailAddress: String +} + +input UpdateNotificationMicrosoftTeamsInput { + name: String! + patch: UpdateNotificationMicrosoftTeamsPatchInput +} + +input UpdateNotificationMicrosoftTeamsPatchInput { + name: String + webhook: String + channel: String +} + +input UpdateNotificationRocketChatInput { + name: String! + patch: UpdateNotificationRocketChatPatchInput +} + +input UpdateNotificationRocketChatPatchInput { + name: String + webhook: String + channel: String +} + +input UpdateNotificationSlackInput { + name: String! + patch: UpdateNotificationSlackPatchInput +} + +input UpdateNotificationSlackPatchInput { + name: String + webhook: String + channel: String +} + +input UpdateNotificationWebhookInput { + name: String! + patch: UpdateNotificationWebhookPatchInput +} + +input UpdateNotificationWebhookPatchInput { + name: String + webhook: String +} + +input UpdateOpenshiftInput { + id: Int! + patch: UpdateOpenshiftPatchInput! +} + +input UpdateOpenshiftPatchInput { + name: String + consoleUrl: String + token: String + routerPattern: String + projectUser: String + sshHost: String + sshPort: String + monitoringConfig: JSON +} + +input UpdateProjectInput { + id: Int! + patch: UpdateProjectPatchInput! +} + +input UpdateProjectPatchInput { + name: String + gitUrl: String + availability: ProjectAvailability + privateKey: String + subfolder: String + routerPattern: String + activeSystemsDeploy: String + activeSystemsRemove: String + activeSystemsTask: String + activeSystemsMisc: String + activeSystemsPromote: String + branches: String + productionEnvironment: String + productionRoutes: String + productionAlias: String + standbyProductionEnvironment: String + standbyRoutes: String + standbyAlias: String + autoIdle: Int + storageCalc: Int + pullrequests: String + openshift: Int + openshiftProjectPattern: String + kubernetes: Int + kubernetesNamespacePattern: String + developmentEnvironmentsLimit: Int + problemsUi: Int + factsUi: Int + deploymentsDisabled: Int +} + +input UpdateRestoreInput { + backupId: String! + patch: UpdateRestorePatchInput! +} + +input UpdateRestorePatchInput { + status: RestoreStatusType + created: String + restoreLocation: String +} + +input UpdateSshKeyInput { + id: Int! + patch: UpdateSshKeyPatchInput! +} + +input UpdateSshKeyPatchInput { + name: String + keyValue: String + keyType: SshKeyType +} + +input UpdateTaskInput { + id: Int! + patch: UpdateTaskPatchInput! +} + +input UpdateTaskPatchInput { + name: String + status: TaskStatusType + created: String + started: String + completed: String + environment: Int + service: String + command: String + remoteId: String +} + +input UpdateUserInput { + user: UserInput! + patch: UpdateUserPatchInput! +} + +input UpdateUserPatchInput { + email: String + firstName: String + lastName: String + comment: String + gitlabId: Int +} + +""" +The `Upload` scalar type represents a file upload. +""" +scalar Upload + +input UploadFilesForTaskInput { + task: Int! + files: [Upload]! +} + +type User { + id: String + email: String + firstName: String + lastName: String + comment: String + gitlabId: Int + sshKeys: [SshKey] + groups: [GroupInterface] +} + +input UserGroupInput { + user: UserInput! + group: GroupInput! +} + +input UserGroupRoleInput { + user: UserInput! + group: GroupInput! + role: GroupRole! +} + +input UserInput { + id: String + email: String +} + +type Workflow { + id: Int + event: String + project: Int + advancedTaskDefinition: AdvancedTaskDefinition +} diff --git a/services/workflows/internal/lagoonclient/testassets/TestGetEnvironmentWorkflows.response.json b/services/workflows/internal/lagoonclient/testassets/TestGetEnvironmentWorkflows.response.json new file mode 100644 index 0000000000..80fd5dc434 --- /dev/null +++ b/services/workflows/internal/lagoonclient/testassets/TestGetEnvironmentWorkflows.response.json @@ -0,0 +1,19 @@ +{ + "data": { + "environmentByName": { + "id": 3, + "name": "Master", + "workflows": [ + { + "id": 1, + "event": "testevent", + "advancedTaskDefinition": { + "id": 1, + "command": "env | sort", + "__typename": "AdvancedTaskDefinitionCommand" + } + } + ] + } + } +} \ No newline at end of file diff --git a/services/workflows/internal/lagoonclient/testassets/TestInvokeWorkflowOnEnvironment.response.json b/services/workflows/internal/lagoonclient/testassets/TestInvokeWorkflowOnEnvironment.response.json new file mode 100644 index 0000000000..b28ef03323 --- /dev/null +++ b/services/workflows/internal/lagoonclient/testassets/TestInvokeWorkflowOnEnvironment.response.json @@ -0,0 +1,8 @@ +{ + "data": { + "invokeRegisteredTask": { + "id": 4, + "status": "active" + } + } +} \ No newline at end of file diff --git a/services/workflows/internal/lagoonclient/workflows.go b/services/workflows/internal/lagoonclient/workflows.go new file mode 100644 index 0000000000..f77a8f28a0 --- /dev/null +++ b/services/workflows/internal/lagoonclient/workflows.go @@ -0,0 +1,64 @@ +package lagoonclient + +import ( + "context" + "github.com/Khan/genqlient/graphql" +) + +type Workflow struct { + Id int + AdvancedTaskId int + AdvancedTaskDetails string + EnvironmentId int + EnvironmentName string +} + + +func InvokeWorkflowOnEnvironment(ctx context.Context, client graphql.Client, environmentId int, advancedTaskDefinition int) (string, error) { + resp, err := invokeCustomTask(ctx, client, environmentId, advancedTaskDefinition) + if err != nil { + return "", err + } + return resp.InvokeRegisteredTask.Status, nil +} + +func GetEnvironmentWorkflowsByEnvironmentId(ctx context.Context, client graphql.Client, environmentId int) ([]Workflow, error) { + var ret []Workflow + resp, err := getEnvironmentByIdWorkflows(ctx, client, environmentId) + + if err != nil { + return ret, err + } + for _, workflow := range resp.EnvironmentById.Workflows { + newWorkflow := Workflow{Id: workflow.Id, AdvancedTaskDetails: workflow.Event, EnvironmentName: resp.EnvironmentById.Name, EnvironmentId: resp.EnvironmentById.Id} + if commandTask, ok := workflow.AdvancedTaskDefinition.(*getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand); ok { + newWorkflow.AdvancedTaskId = commandTask.Id + } else if imageTask, ok := workflow.AdvancedTaskDefinition.(*getEnvironmentByIdWorkflowsEnvironmentByIdEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage); ok { + newWorkflow.AdvancedTaskId = imageTask.Id + } + ret = append(ret, newWorkflow) + } + + return ret, nil +} + + +func GetEnvironmentWorkflows(ctx context.Context, client graphql.Client, projectId int, environmentName string) ([]Workflow, error) { + var ret []Workflow + resp, err := getEnvironmentWorkflows(ctx, client, projectId, environmentName) + + if err != nil { + return ret, err + } + for _, workflow := range resp.EnvironmentByName.Workflows { + newWorkflow := Workflow{Id: workflow.Id, AdvancedTaskDetails: workflow.Event, EnvironmentName: resp.EnvironmentByName.Name, EnvironmentId: resp.EnvironmentByName.Id} + if commandTask, ok := workflow.AdvancedTaskDefinition.(*getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionCommand); ok { + newWorkflow.AdvancedTaskId = commandTask.Id + } else if imageTask, ok := workflow.AdvancedTaskDefinition.(*getEnvironmentWorkflowsEnvironmentByNameEnvironmentWorkflowsWorkflowAdvancedTaskDefinitionAdvancedTaskDefinitionImage); ok { + newWorkflow.AdvancedTaskId = imageTask.Id + } + ret = append(ret, newWorkflow) + } + + return ret, nil +} diff --git a/services/workflows/internal/lagoonclient/workflows_test.go b/services/workflows/internal/lagoonclient/workflows_test.go new file mode 100644 index 0000000000..1205fae58e --- /dev/null +++ b/services/workflows/internal/lagoonclient/workflows_test.go @@ -0,0 +1,75 @@ +package lagoonclient + +import ( + "context" + "github.com/Khan/genqlient/graphql" + "io/ioutil" + "net/http" + "net/http/httptest" + "testing" +) + + +func TestGetEnvironmentWorkflows(t *testing.T) { + + testResponse, err := ioutil.ReadFile("./testassets/TestGetEnvironmentWorkflows.response.json") + if err != nil { + t.Fatalf("Could not open file" ) + } + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + if r.URL.Path != "/" { + t.Errorf("Expected to request '/fixedvalue', got: %s", r.URL.Path) + } + w.WriteHeader(http.StatusOK) + w.Write(testResponse) + })) + defer server.Close() + + client := graphql.NewClient(server.URL, http.DefaultClient) + w, err := GetEnvironmentWorkflows(context.TODO(), client, 1, "test") + + if err != nil { + t.Errorf("GetEnvironmentWorkflows() error = %v", err) + return + } + + if w[0].Id != 1 { + t.Errorf("GetEnvironmentWorkflows() error = %v", "Incorrent id returned" ) + return + } + +} + + +func TestInvokeWorkflowOnEnvironment(t *testing.T) { + testResponse, err := ioutil.ReadFile("./testassets/TestInvokeWorkflowOnEnvironment.response.json") + if err != nil { + t.Fatalf("Could not open file" ) + } + + server := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + + if r.URL.Path != "/" { + t.Errorf("Expected to request '/fixedvalue', got: %s", r.URL.Path) + } + w.WriteHeader(http.StatusOK) + w.Write(testResponse) + })) + defer server.Close() + + client := graphql.NewClient(server.URL, http.DefaultClient) + status, err := InvokeWorkflowOnEnvironment(context.TODO(), client, 1, 1) + + if err != nil { + t.Errorf("GetEnvironmentWorkflows() error = %v", err) + return + } + + if status == "" { + t.Errorf("InvokeWorkflowOnEnvironment not returning status") + return + } + +} diff --git a/services/workflows/internal/schema/README.md b/services/workflows/internal/schema/README.md new file mode 100644 index 0000000000..a7fdb8c9fd --- /dev/null +++ b/services/workflows/internal/schema/README.md @@ -0,0 +1,3 @@ +# schema + +This is an interim schema package that will eventually be replaced by one in a standalone lagoon client library \ No newline at end of file diff --git a/services/workflows/internal/schema/deployment.go b/services/workflows/internal/schema/deployment.go new file mode 100644 index 0000000000..0f2128f9f2 --- /dev/null +++ b/services/workflows/internal/schema/deployment.go @@ -0,0 +1,13 @@ +package schema + +// DeployEnvironmentLatestInput is used as the input for deploying an environment. +type DeployEnvironmentLatestInput struct { + Environment EnvironmentInput `json:"environment"` + BulkID string `json:"bulkId"` + Priority int `json:"priority"` +} + +// DeployEnvironmentLatest is the response. +type DeployEnvironmentLatest struct { + DeployEnvironmentLatest string `json:"deployEnvironmentLatest"` +} diff --git a/services/workflows/internal/schema/environment.go b/services/workflows/internal/schema/environment.go new file mode 100644 index 0000000000..91e246da4d --- /dev/null +++ b/services/workflows/internal/schema/environment.go @@ -0,0 +1,8 @@ +package schema + +// EnvironmentInput is based on the Lagoon API type. +type EnvironmentInput struct { + ID int `json:"id,omitempty"` + Name string `json:"name,omitempty"` + Project ProjectInput `json:"project,omitempty"` +} diff --git a/services/workflows/internal/schema/project.go b/services/workflows/internal/schema/project.go new file mode 100644 index 0000000000..e7f0d0e475 --- /dev/null +++ b/services/workflows/internal/schema/project.go @@ -0,0 +1,7 @@ +package schema + +// ProjectInput is based on the Lagoon API type. +type ProjectInput struct { + ID uint `json:"id,omitempty"` + Name string `json:"name,omitempty"` +} \ No newline at end of file diff --git a/services/workflows/internal/schema/workflow.go b/services/workflows/internal/schema/workflow.go new file mode 100644 index 0000000000..177693b0ec --- /dev/null +++ b/services/workflows/internal/schema/workflow.go @@ -0,0 +1,66 @@ +package schema + +type Workflow struct { + Id int `json:"id"` + Event string `json:"event"` + Project int `json:"project"` + AdvancedTaskDefinition AdvancedTaskDefinition `json:"AdvancedTaskDefinition"` +} + +type AdvancedTaskDefinition struct { + Id int `json:"id"` + Name string `json:"name"` +} + + +// Notification . +type Notification struct { + Severity string `json:"severity"` + Project string `json:"project"` + UUID string `json:"uuid"` + Event string `json:"event"` + Meta struct { + User struct { + ID string `json:"id"` + PreferredUsername string `json:"preferred_username"` + Email string `json:"email"` + } `json:"user"` + Headers struct { + UserAgent string `json:"user-agent"` + ContentType string `json:"content-type"` + ContentLength string `json:"content-length"` + Host string `json:"host"` + IPAddress string `json:"ipAddress"` + } `json:"headers"` + Project string `json:"project"` + ProjectName string `json:"projectName"` + BranchName string `json:"branchName` + Event string `json:"event"` + Level string `json:"level"` + Message string `json:"message"` + Timestamp string `json:"timestamp"` + ShortSha string `json:"shortSha"` + BuildName string `json:"buildName"` + CommitURL string `json:"commitUrl"` + Environment string `json:"environment"` + EnvironmentID string `json:"environmentId"` + EnvironmentName string `json:"environmentName"` + Error string `json:"error"` + JobName string `json:"jobName"` + LogLink string `json:"logLink"` + Name string `json:"name"` + OpenshiftProject string `json:"openshiftProject"` + PromoteSourceEnvironment string `json:"promoteSourceEnvironment"` + PullrequestNumber string `json:"pullrequestNumber"` + PullrequestTitle string `json:"pullrequestTitle"` + PullrequestURL string `json:"pullrequestUrl"` + RemoteID string `json:"remoteId"` + RepoFullName string `json:"repoFullName"` + RepoName string `json:"repoName"` + RepoURL string `json:"repoUrl"` + Route string `json:"route"` + Routes string `json:"routes"` + Task string `json:"task"` + } `json:"meta"` + Message string `json:"message"` +} diff --git a/services/workflows/main.go b/services/workflows/main.go new file mode 100644 index 0000000000..b6e8aefd9a --- /dev/null +++ b/services/workflows/main.go @@ -0,0 +1,191 @@ +package main + +import ( + "flag" + "fmt" + "os" + "strconv" + "time" + + "github.com/cheshir/go-mq" + "github.com/uselagoon/lagoon/services/actions-handler/internal/handler" +) + +var ( + httpListenPort = os.Getenv("HTTP_LISTEN_PORT") + mqUser string + mqPass string + mqHost string + mqPort string + mqWorkers int + rabbitReconnectRetryInterval int + startupConnectionAttempts int + startupConnectionInterval int + lagoonAPIHost string + lagoonAppID string + jwtTokenSigningKey string + jwtAudience string + workflowsQueueName string + workflowsExchange string + jwtSubject string + jwtIssuer string +) + +func main() { + flag.StringVar(&lagoonAppID, "lagoon-app-id", "actions-handler", + "The appID to use that will be sent with messages.") + flag.StringVar(&mqUser, "rabbitmq-username", "guest", + "The username of the rabbitmq user.") + flag.StringVar(&mqPass, "rabbitmq-password", "guest", + "The password for the rabbitmq user.") + flag.StringVar(&mqHost, "rabbitmq-hostname", "localhost", + "The hostname for the rabbitmq host.") + flag.StringVar(&mqPort, "rabbitmq-port", "5672", + "The port for the rabbitmq host.") + flag.IntVar(&mqWorkers, "rabbitmq-queue-workers", 1, + "The number of workers to start with.") + flag.IntVar(&rabbitReconnectRetryInterval, "rabbitmq-reconnect-retry-interval", 30, + "The retry interval for rabbitmq.") + flag.IntVar(&startupConnectionAttempts, "startup-connection-attempts", 10, + "The number of startup attempts before exiting.") + flag.IntVar(&startupConnectionInterval, "startup-connection-interval-seconds", 30, + "The duration between startup attempts.") + flag.StringVar(&lagoonAPIHost, "lagoon-api-host", "http://localhost:3000/graphql", + "The host for the lagoon api.") + flag.StringVar(&jwtTokenSigningKey, "jwt-token-signing-key", "super-secret-string", + "The jwt signing token key or secret.") + flag.StringVar(&jwtAudience, "jwt-audience", "api.dev", + "The jwt audience.") + flag.StringVar(&jwtSubject, "jwt-subject", "actions-handler", + "The jwt audience.") + flag.StringVar(&jwtIssuer, "jwt-issuer", "actions-handler", + "The jwt audience.") + flag.StringVar(&workflowsQueueName, "workflows-queue-name", "lagoon-logs:workflows", + "The name of the queue in rabbitmq to use.") + flag.StringVar(&workflowsExchange, "workflows-exchange", "lagoon-logs", + "The name of the exchange in rabbitmq to use.") + flag.Parse() + + // get overrides from environment variables + mqUser = getEnv("RABBITMQ_USERNAME", mqUser) + mqPass = getEnv("RABBITMQ_PASSWORD", mqPass) + mqHost = getEnv("RABBITMQ_HOST", mqHost) + mqPort = getEnv("RABBITMQ_PORT", mqPort) + lagoonAPIHost = getEnv("API_HOST", lagoonAPIHost) + jwtTokenSigningKey = getEnv("JWTSECRET", jwtTokenSigningKey) + jwtAudience = getEnv("JWT_AUDIENCE", jwtAudience) + jwtSubject = getEnv("JWT_SUBJECT", jwtSubject) + jwtIssuer = getEnv("JWT_ISSUER", jwtIssuer) + workflowsQueueName = getEnv("WORKFLOWS_QUEUE_NAME", workflowsQueueName) + workflowsExchange = getEnv("WORKFLOWS_EXCHANGE", workflowsExchange) + + enableDebug := true + + // configure the backup handler settings + broker := handler.RabbitBroker{ + Hostname: fmt.Sprintf("%s:%s", mqHost, mqPort), + Username: mqUser, + Password: mqPass, + QueueName: workflowsQueueName, + ExchangeName: workflowsExchange, + } + graphQLConfig := handler.LagoonAPI{ + Endpoint: lagoonAPIHost, + TokenSigningKey: jwtTokenSigningKey, + JWTAudience: jwtAudience, + JWTSubject: jwtSubject, + JWTIssuer: jwtIssuer, + } + + config := mq.Config{ + ReconnectDelay: time.Duration(rabbitReconnectRetryInterval) * time.Second, + Exchanges: mq.Exchanges{ + { + Name: "lagoon-logs", + Type: "direct", + Options: mq.Options{ + "durable": true, + "delivery_mode": "2", + "headers": "", + "content_type": "", + }, + }, + }, + Consumers: mq.Consumers{ + { + Name: "items-queue", + Queue: "lagoon-logs:workflows", + Workers: mqWorkers, + Options: mq.Options{ + "durable": true, + "delivery_mode": "2", + "headers": "", + "content_type": "", + }, + }, + }, + Queues: mq.Queues{ + { + Name: "lagoon-logs:workflows", + Exchange: "lagoon-logs", + Options: mq.Options{ + "durable": true, + "delivery_mode": "2", + "headers": "", + "content_type": "", + }, + }, + }, + //Producers: mq.Producers{ + // { + // Name: "lagoon-actions", + // Exchange: "lagoon-actions", + // Options: mq.Options{ + // "app_id": lagoonAppID, + // "delivery_mode": "2", + // "headers": "", + // "content_type": "", + // }, + // }, + // { + // Name: "lagoon-logs", + // Exchange: "lagoon-logs", + // Options: mq.Options{ + // "app_id": lagoonAppID, + // "delivery_mode": "2", + // "headers": "", + // "content_type": "", + // }, + // }, + //}, + DSN: fmt.Sprintf("amqp://%s:%s@%s/", broker.Username, broker.Password, broker.Hostname), + } + + messaging := handler.NewMessaging(config, + graphQLConfig, + startupConnectionAttempts, + startupConnectionInterval, + enableDebug, + ) + + // start the consumer + messaging.Consumer() + +} + +func getEnv(key, fallback string) string { + if value, ok := os.LookupEnv(key); ok { + return value + } + return fallback +} + +// accepts fallback values 1, t, T, TRUE, true, True, 0, f, F, FALSE, false, False +// anything else is false. +func getEnvBool(key string, fallback bool) bool { + if value, ok := os.LookupEnv(key); ok { + rVal, _ := strconv.ParseBool(value) + return rVal + } + return fallback +} diff --git a/tests/tests/workflows.yaml b/tests/tests/workflows.yaml new file mode 100644 index 0000000000..605ec6918d --- /dev/null +++ b/tests/tests/workflows.yaml @@ -0,0 +1,25 @@ +--- +- include: features/random-wait.yaml + +- include: features/api-token.yaml + vars: + testname: "API TOKEN" + +- include: api/add-project.yaml + vars: + project: ci-workflows-{{ cluster_type }} + git_repo_name: tasks.git + git_url: "{{ localgit_url }}/{{ git_repo_name }}" + +- include: workflows/workflows.yaml + vars: + testname: "WORKFLOWS {{ cluster_type|upper }}" + git_repo_name: tasks.git + project: ci-workflows-{{ cluster_type }} + branch: workflows + check_url: "http://node.{{ project | regex_replace('_', '-') }}.{{ branch | regex_replace('/', '-') }}.{{ route_suffix }}" + openshift_project_name: ci-workflows-{{ cluster_type }}-workflows + +- include: api/delete-project.yaml + vars: + project: ci-workflows-{{ cluster_type }} \ No newline at end of file diff --git a/tests/tests/workflows/create-and-register-task.yaml b/tests/tests/workflows/create-and-register-task.yaml new file mode 100644 index 0000000000..faacfbce5f --- /dev/null +++ b/tests/tests/workflows/create-and-register-task.yaml @@ -0,0 +1,46 @@ +- name: "{{ testname }} - POST api deployEnvironmentBranch with target git branch {{ branch }} and project {{ project }} (no sha) to {{ graphql_url }}" + block: + - include: ../../tasks/api/refresh-token.yaml + - name: "{{ testname }} - print openshift variable debug inside subtask" + debug: + msg: "openshift id {{ openshift_project_name }}" + - name: "DEBUGGING POST GET ENV" + debug: + msg: '{ "query": "query($openshiftProjectName: String!) {environmentByOpenshiftProjectName(openshiftProjectName:$openshiftProjectName){ id }}", "variables": {"openshiftProjectName":"{{ openshift_project_name }}"}}' + - name: "{{ testname }} - POST Get environment id for {{ openshift_project_name }} to {{ graphql_url }}" + uri: + url: "{{ graphql_url }}" + method: POST + headers: + Authorization: "Bearer {{ token }}" + body_format: json + body: '{ "query": "query($openshiftProjectName: String!) {environmentByOpenshiftProjectName(openshiftProjectName:$openshiftProjectName){ id }}", "variables": {"openshiftProjectName":"{{ openshift_project_name }}"}}' + register: environmentByOSProjectNameApiResponse + until: + - name: "{{ testname }} - environmentByOSProjectNameApiResponse" + debug: + msg: "api response: {{ environmentByOSProjectNameApiResponse }}" + - include: ./post-api-register-task-command.yaml + - name: "{{ testname }} - DEBUG taskCreateApiResponse" + debug: + msg: "api response: {{ taskCreateApiResponse }}" + - name: "{{ testname }} - POST api addAdvancedTaskDefinition to {{ graphql_url }}" + debug: + msg: "api response: {{ taskCreateApiResponse.json.data.addAdvancedTaskDefinition.id }}" + - name: "{{ testname }} - POST Get project id for {{ openshift_project_name }} to {{ graphql_url }}" + uri: + url: "{{ graphql_url }}" + method: POST + headers: + Authorization: "Bearer {{ token }}" + body_format: json + body: '{ "query": "query($projectName: String!) {projectByName(name:$projectName) {id}}", "variables": {"projectName":"{{ project }}"}}' + register: projectByNameResponse + until: + - name: "{{ testname }} - projectByNameResponse" + debug: + msg: "api response: {{ projectByNameResponse.json.data.projectByName.id }}" + - include: ./post-api-register-workflow.yaml + - name: "{{ testname }} - DEBUG taskCreateApiResponse" + debug: + msg: "api response: {{ taskCreateApiResponse }}" diff --git a/tests/tests/workflows/post-api-delete-workflow.yaml b/tests/tests/workflows/post-api-delete-workflow.yaml new file mode 100644 index 0000000000..f82d22b6fa --- /dev/null +++ b/tests/tests/workflows/post-api-delete-workflow.yaml @@ -0,0 +1,34 @@ +- name: 'Wait until success' + block: + - include: ../../tasks/api/admin-token.yaml + - name: Set the retry count + set_fact: + retry_count: "{{ 0 if retry_count is undefined else retry_count|int + 1 }}" + - name: Set + set_fact: + create_workflow_graphql: '{ "query": "mutation deleteWorkflowById($workflowId: Int!) { deleteWorkflow(input: {id: $workflowId})}", "variables": {"workflowId": {{ deleteWorkflowId }}} }' + - debug: + msg: "{{ create_workflow_graphql }}" + + - name: "{{ testname }} - POST api register workflow definition to {{ graphql_url }}" + uri: + url: "{{ graphql_url }}" + method: POST + headers: + Authorization: "Bearer {{ admin_token }}" + body_format: json + body: "{{ create_workflow_graphql }}" + register: workflowDeleteApiResponse + rescue: + - debug: + msg: "{{ workflowCreateApiResponse }}" + - fail: + msg: Ended after 3 retries + when: retry_count|int == 3 + - name: Pause for retry + pause: + seconds: 10 + - debug: + msg: "Failed to connect - Retrying..." + + - include_tasks: ./post-api-delete-workflow.yaml \ No newline at end of file diff --git a/tests/tests/workflows/post-api-register-task-command.yaml b/tests/tests/workflows/post-api-register-task-command.yaml new file mode 100644 index 0000000000..3affa88dd6 --- /dev/null +++ b/tests/tests/workflows/post-api-register-task-command.yaml @@ -0,0 +1,28 @@ +- name: 'Wait until success' + block: + - include: ../../tasks/api/admin-token.yaml + - name: Set the retry count + set_fact: + retry_count: "{{ 0 if retry_count is undefined else retry_count|int + 1 }}" + - name: "{{ testname }} - POST api register task definition to {{ graphql_url }}" + uri: + url: "{{ graphql_url }}" + method: POST + headers: + Authorization: "Bearer {{ admin_token }}" + body_format: json + body: '{ "query": "mutation($environmentId: Int!, $taskName: String!, $description: String!, $service: String!, $command: String!) {addAdvancedTaskDefinition(input: {environment:$environmentId, name:$taskName, type:COMMAND, description: $description, service: $service, command: $command}){... on AdvancedTaskDefinitionCommand {id}}}", "variables": {"environmentId": {{ environmentByOSProjectNameApiResponse.json.data.environmentByOpenshiftProjectName.id }}, "taskName":"testing-echo","description":"echos to file", "service":"node", "command":"echo ''REPLACED BY TASK'' > /app/files/testoutput.txt"}}' + register: taskCreateApiResponse + rescue: + - debug: + msg: "{{ taskCreateApiResponse }}" + - fail: + msg: Ended after 3 retries + when: retry_count|int == 3 + - name: Pause for retry + pause: + seconds: 10 + - debug: + msg: "Failed to connect - Retrying..." + + - include_tasks: ./post-api-register-task-command.yaml \ No newline at end of file diff --git a/tests/tests/workflows/post-api-register-workflow.yaml b/tests/tests/workflows/post-api-register-workflow.yaml new file mode 100644 index 0000000000..798951169c --- /dev/null +++ b/tests/tests/workflows/post-api-register-workflow.yaml @@ -0,0 +1,34 @@ +- name: 'Wait until success' + block: + - include: ../../tasks/api/admin-token.yaml + - name: Set the retry count + set_fact: + retry_count: "{{ 0 if retry_count is undefined else retry_count|int + 1 }}" + - name: Set + set_fact: + create_workflow_graphql: '{ "query": "mutation createWorkflow ($adtaskdef: Int!, $projId: Int, $taskName: String!, $eventType: String!) {addWorkflow(input: {name: $taskName, event: $eventType, advancedTaskDefinition:$adtaskdef, project: $projId}) { id }}", "variables": {"adtaskdef": {{ taskCreateApiResponse.json.data.addAdvancedTaskDefinition.id }}, "projId": {{ projectByNameResponse.json.data.projectByName.id }}, "taskName": "testTask", "eventType": "deployFinished" }}' + - debug: + msg: "{{ create_workflow_graphql }}" + + - name: "{{ testname }} - POST api register workflow definition to {{ graphql_url }}" + uri: + url: "{{ graphql_url }}" + method: POST + headers: + Authorization: "Bearer {{ admin_token }}" + body_format: json + body: "{{ create_workflow_graphql }}" + register: workflowCreateApiResponse + rescue: + - debug: + msg: "{{ workflowCreateApiResponse }}" + - fail: + msg: Ended after 3 retries + when: retry_count|int == 3 + - name: Pause for retry + pause: + seconds: 10 + - debug: + msg: "Failed to connect - Retrying..." + + - include_tasks: ./post-api-register-workflow.yaml \ No newline at end of file diff --git a/tests/tests/workflows/workflows.yaml b/tests/tests/workflows/workflows.yaml new file mode 100644 index 0000000000..ef57a3994b --- /dev/null +++ b/tests/tests/workflows/workflows.yaml @@ -0,0 +1,88 @@ + +- name: "{{ testname }} - init git, add files, commit, git push" + hosts: localhost + serial: 1 + vars: + git_files: "tasks/" + tasks: + - include: ../../tasks/git-init.yaml + - include: ../../tasks/git-add-commit-push.yaml + +- name: "{{ testname }} - api deployEnvironmentBranch on {{ project }}, which should deploy the first commit" + hosts: localhost + serial: 1 + vars: + branch: "{{ branch }}" + project: "{{ project }}" + namespace: "{{ project | regex_replace('_', '-') }}-{{ branch | regex_replace('/', '-') }}" + tasks: + - include: ../../tasks/api/deploy-no-sha.yaml + +- name: "{{ testname }} - check if {{ project }} is deployed with searching for 'TO BE REPLACED' which is added by hand" + hosts: localhost + serial: 1 + vars: + url: "{{ check_url }}" + expected_content: "TO BE REPLACED" + tasks: + - include: ../../checks/check-url-content.yaml + +- name: "{{ testname }} - POST api Add task to {{ project }} via {{ graphql_url }}" + hosts: localhost + serial: 1 + tasks: + - include: ../../tasks/api/refresh-token.yaml + - include: ./create-and-register-task.yaml + +- name: debugger + hosts: localhost + tasks: + - name: outputstuffs + debug: + msg: "{{ workflowCreateApiResponse }}" + +- name: "{{ testname }} - REDEPLOY api deployEnvironmentBranch on {{ project }}, which should deploy the first commit" + hosts: localhost + serial: 1 + vars: + branch: "{{ branch }}" + project: "{{ project }}" + namespace: "{{ project | regex_replace('_', '-') }}-{{ branch | regex_replace('/', '-') }}" + tasks: + - include: ../../tasks/api/deploy-no-sha.yaml + +- name: "{{ testname }} - check if {{ project }} is deployed with searching for 'REPLACED BY TASK' which is added by the task" + hosts: localhost + serial: 1 + vars: + url: "{{ check_url }}" + expected_content: "REPLACED BY TASK" + tasks: + - include: ../../checks/check-url-content.yaml + +- name: "{{ testname }} - Delete created workflow" + hosts: localhost + serial: 1 + vars: + deleteWorkflowId: "{{ workflowCreateApiResponse.json.data.addWorkflow.id }}" + tasks: + - include: ./post-api-delete-workflow.yaml + + +- name: "{{ testname }} - api deleteEnvironment on {{ project }}, which should remove all resources" + hosts: localhost + serial: 1 + vars: + project: "{{ project }}" + branch: "{{ branch }}" + tasks: + - include: ../../tasks/api/delete-environment.yaml + +- name: "{{ testname }} - check if site for {{ project }} does not exist anymore" + hosts: localhost + serial: 1 + vars: + url: "{{ check_url }}" + expected_returncode: "{{ del_status_code }}" + tasks: + - include: ../../checks/check-url-returncode.yaml