diff --git a/server/events/markdown_renderer_test.go b/server/events/markdown_renderer_test.go index 9cbc4e3df5..548ce03cdb 100644 --- a/server/events/markdown_renderer_test.go +++ b/server/events/markdown_renderer_test.go @@ -191,7 +191,7 @@ $$$ LockURL: "lock-url", RePlanCmd: "atlantis plan -d path -w workspace", ApplyCmd: "atlantis apply -d path -w workspace", - HasDiverged: true, + MergedAgain: true, }, Workspace: "workspace", RepoRelDir: "path", @@ -210,7 +210,7 @@ $$$ * :repeat: To **plan** this project again, comment: * $atlantis plan -d path -w workspace$ -:warning: The branch we're merging into is ahead, it is recommended to pull new commits first. +:twisted_rightwards_arrows: Upstream was modified, a new merge was performed. --- * :fast_forward: To **apply** all unapplied plans from this pull request, comment: @@ -1974,7 +1974,7 @@ $$$ LockURL: "lock-url", RePlanCmd: "atlantis plan -d path -w workspace", ApplyCmd: "atlantis apply -d path -w workspace", - HasDiverged: true, + MergedAgain: true, }, Workspace: "workspace", RepoRelDir: "path", @@ -1992,7 +1992,7 @@ $$$ * :repeat: To **plan** this project again, comment: * $atlantis plan -d path -w workspace$ -:warning: The branch we're merging into is ahead, it is recommended to pull new commits first. +:twisted_rightwards_arrows: Upstream was modified, a new merge was performed. --- * :fast_forward: To **apply** all unapplied plans from this pull request, comment: diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index 27a0695ea5..30b344ea3a 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -165,12 +165,12 @@ func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool { return ret0 } -func (mock *MockWorkingDir) SetSafeToReClone() { +func (mock *MockWorkingDir) SetCheckForUpstreamChanges() { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } params := []pegomock.Param{} - pegomock.GetGenericMockFrom(mock).Invoke("SetSafeToReClone", params, []reflect.Type{}) + pegomock.GetGenericMockFrom(mock).Invoke("SetCheckForUpstreamChanges", params, []reflect.Type{}) } func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { @@ -482,19 +482,19 @@ func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments return } -func (verifier *VerifierMockWorkingDir) SetSafeToReClone() *MockWorkingDir_SetSafeToReClone_OngoingVerification { +func (verifier *VerifierMockWorkingDir) SetCheckForUpstreamChanges() *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification { params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetSafeToReClone", params, verifier.timeout) - return &MockWorkingDir_SetSafeToReClone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetCheckForUpstreamChanges", params, verifier.timeout) + return &MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockWorkingDir_SetSafeToReClone_OngoingVerification struct { +type MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification struct { mock *MockWorkingDir methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_SetSafeToReClone_OngoingVerification) GetCapturedArguments() { +func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetCapturedArguments() { } -func (c *MockWorkingDir_SetSafeToReClone_OngoingVerification) GetAllCapturedArguments() { +func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetAllCapturedArguments() { } diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index 8f051f1937..55ecc1ca4c 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -165,12 +165,12 @@ func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool { return ret0 } -func (mock *MockWorkingDir) SetSafeToReClone() { +func (mock *MockWorkingDir) SetCheckForUpstreamChanges() { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } params := []pegomock.Param{} - pegomock.GetGenericMockFrom(mock).Invoke("SetSafeToReClone", params, []reflect.Type{}) + pegomock.GetGenericMockFrom(mock).Invoke("SetCheckForUpstreamChanges", params, []reflect.Type{}) } func (mock *MockWorkingDir) VerifyWasCalledOnce() *VerifierMockWorkingDir { @@ -482,19 +482,19 @@ func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments return } -func (verifier *VerifierMockWorkingDir) SetSafeToReClone() *MockWorkingDir_SetSafeToReClone_OngoingVerification { +func (verifier *VerifierMockWorkingDir) SetCheckForUpstreamChanges() *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification { params := []pegomock.Param{} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetSafeToReClone", params, verifier.timeout) - return &MockWorkingDir_SetSafeToReClone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "SetCheckForUpstreamChanges", params, verifier.timeout) + return &MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockWorkingDir_SetSafeToReClone_OngoingVerification struct { +type MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification struct { mock *MockWorkingDir methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_SetSafeToReClone_OngoingVerification) GetCapturedArguments() { +func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetCapturedArguments() { } -func (c *MockWorkingDir_SetSafeToReClone_OngoingVerification) GetAllCapturedArguments() { +func (c *MockWorkingDir_SetCheckForUpstreamChanges_OngoingVerification) GetAllCapturedArguments() { } diff --git a/server/events/models/models.go b/server/events/models/models.go index 492fc36fea..bdc821c285 100644 --- a/server/events/models/models.go +++ b/server/events/models/models.go @@ -361,10 +361,10 @@ type PlanSuccess struct { RePlanCmd string // ApplyCmd is the command that users should run to apply this plan. ApplyCmd string - // HasDiverged is true if we're using the checkout merge strategy and the - // branch we're merging into has been updated since we cloned and merged - // it. - HasDiverged bool + // MergedAgain is true if we're using the checkout merge strategy and the + // branch we're merging into had been updated, and we had to merge again + // before planning + MergedAgain bool } type PolicySetResult struct { diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index 276a9ef81d..6b938b2b95 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -543,9 +543,9 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model } defer unlockFn() - p.WorkingDir.SetSafeToReClone() + p.WorkingDir.SetCheckForUpstreamChanges() // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, hasDiverged, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) @@ -576,7 +576,7 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model TerraformOutput: strings.Join(outputs, "\n"), RePlanCmd: ctx.RePlanCmd, ApplyCmd: ctx.ApplyCmd, - HasDiverged: hasDiverged, + MergedAgain: mergedAgain, }, "", nil } diff --git a/server/events/templates/diverged.tmpl b/server/events/templates/diverged.tmpl deleted file mode 100644 index f0f4be0ed2..0000000000 --- a/server/events/templates/diverged.tmpl +++ /dev/null @@ -1,5 +0,0 @@ -{{ define "diverged" -}} -{{ if .HasDiverged }} -:warning: The branch we're merging into is ahead, it is recommended to pull new commits first. -{{ end -}} -{{ end -}} diff --git a/server/events/templates/merged_again.tmpl b/server/events/templates/merged_again.tmpl new file mode 100644 index 0000000000..796afe552a --- /dev/null +++ b/server/events/templates/merged_again.tmpl @@ -0,0 +1,5 @@ +{{ define "mergedAgain" -}} +{{ if .MergedAgain }} +:twisted_rightwards_arrows: Upstream was modified, a new merge was performed. +{{ end -}} +{{ end -}} diff --git a/server/events/templates/plan_success_unwrapped.tmpl b/server/events/templates/plan_success_unwrapped.tmpl index 1f34216a0d..6bd81de233 100644 --- a/server/events/templates/plan_success_unwrapped.tmpl +++ b/server/events/templates/plan_success_unwrapped.tmpl @@ -16,5 +16,5 @@ This plan was not saved because one or more projects failed and automerge requir * :repeat: To **plan** this project again, comment: * `{{ .RePlanCmd }}` {{ end -}} -{{ template "diverged" . }} +{{ template "mergedAgain" . }} {{ end -}} diff --git a/server/events/templates/plan_success_wrapped.tmpl b/server/events/templates/plan_success_wrapped.tmpl index 9630de1b2e..cef96d0609 100644 --- a/server/events/templates/plan_success_wrapped.tmpl +++ b/server/events/templates/plan_success_wrapped.tmpl @@ -20,5 +20,5 @@ This plan was not saved because one or more projects failed and automerge requir {{ end -}} {{ .PlanSummary -}} -{{ template "diverged" . -}} +{{ template "mergedAgain" . -}} {{ end -}} diff --git a/server/events/working_dir.go b/server/events/working_dir.go index 65fd28a304..a6b2c73656 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -53,7 +53,7 @@ type WorkingDir interface { // Set a flag in the workingdir so Clone() can know that it is safe to re-clone the workingdir if // the upstream branch has been modified. This is only safe after grabbing the project lock // and before running any plans - SetSafeToReClone() + SetCheckForUpstreamChanges() // DeletePlan deletes the plan for this repo, pull, workspace path and project name DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error // GetGitUntrackedFiles returns a list of Git untracked files in the working dir. @@ -84,9 +84,9 @@ type FileWorkspace struct { GithubAppEnabled bool // use the global setting without overriding GpgNoSigningEnabled bool - // flag indicating if a re-clone will be safe (project lock held, about to run plan) - SafeToReClone bool - Logger logging.SimpleLogging + // flag indicating if we have to merge with potential new changes upstream (directly after grabbing project lock) + CheckForUpstreamChanges bool + Logger logging.SimpleLogging } // Clone git clones headRepo, checks out the branch and then returns the absolute @@ -100,9 +100,9 @@ func (w *FileWorkspace) Clone( p models.PullRequest, workspace string) (string, bool, error) { cloneDir := w.cloneDir(p.BaseRepo, p, workspace) - hasDiverged := false - defer func() { w.SafeToReClone = false }() + defer func() { w.CheckForUpstreamChanges = false }() + c := wrappedGitContext{cloneDir, headRepo, p} // If the directory already exists, check if it's at the right commit. // If so, then we do nothing. if _, err := os.Stat(cloneDir); err == nil { @@ -122,16 +122,16 @@ func (w *FileWorkspace) Clone( outputRevParseCmd, err := revParseCmd.CombinedOutput() if err != nil { w.Logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) - return cloneDir, false, w.forceClone(cloneDir, headRepo, p) + return cloneDir, false, w.forceClone(c) } currCommit := strings.Trim(string(outputRevParseCmd), "\n") // We're prefix matching here because BitBucket doesn't give us the full // commit, only a 12 character prefix. if strings.HasPrefix(currCommit, p.HeadCommit) { - if w.SafeToReClone && w.CheckoutMerge && w.recheckDiverged(p, headRepo, cloneDir) { + if w.CheckForUpstreamChanges && w.CheckoutMerge && w.recheckDiverged(p, headRepo, cloneDir) { w.Logger.Info("base branch has been updated, using merge strategy and will clone again") - hasDiverged = true + return cloneDir, true, w.mergeAgain(c) } else { w.Logger.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) return cloneDir, false, nil @@ -143,7 +143,7 @@ func (w *FileWorkspace) Clone( } // Otherwise we clone the repo. - return cloneDir, hasDiverged, w.forceClone(cloneDir, headRepo, p) + return cloneDir, false, w.forceClone(c) } // recheckDiverged returns true if the branch we're merging into has diverged @@ -212,8 +212,8 @@ func (w *FileWorkspace) HasDiverged(cloneDir string) bool { return hasDiverged } -func (w *FileWorkspace) forceClone(cloneDir string, headRepo models.Repo, p models.PullRequest) error { - value, _ := cloneLocks.LoadOrStore(cloneDir, new(sync.Mutex)) +func (w *FileWorkspace) forceClone(c wrappedGitContext) error { + value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex)) mutex := value.(*sync.Mutex) defer mutex.Unlock() @@ -222,97 +222,131 @@ func (w *FileWorkspace) forceClone(cloneDir string, headRepo models.Repo, p mode return nil } - err := os.RemoveAll(cloneDir) + err := os.RemoveAll(c.dir) if err != nil { - return errors.Wrapf(err, "deleting dir %q before cloning", cloneDir) + return errors.Wrapf(err, "deleting dir %q before cloning", c.dir) } // Create the directory and parents if necessary. - w.Logger.Info("creating dir %q", cloneDir) - if err := os.MkdirAll(cloneDir, 0700); err != nil { + w.Logger.Info("creating dir %q", c.dir) + if err := os.MkdirAll(c.dir, 0700); err != nil { return errors.Wrap(err, "creating new workspace") } // During testing, we mock some of this out. - headCloneURL := headRepo.CloneURL + headCloneURL := c.head.CloneURL if w.TestingOverrideHeadCloneURL != "" { headCloneURL = w.TestingOverrideHeadCloneURL } - baseCloneURL := p.BaseRepo.CloneURL + baseCloneURL := c.pr.BaseRepo.CloneURL if w.TestingOverrideBaseCloneURL != "" { baseCloneURL = w.TestingOverrideBaseCloneURL } - runGit := func(args ...string) error { - cmd := exec.Command("git", args...) // nolint: gosec - cmd.Dir = cloneDir - // The git merge command requires these env vars are set. - cmd.Env = append(os.Environ(), []string{ - "EMAIL=atlantis@runatlantis.io", - "GIT_AUTHOR_NAME=atlantis", - "GIT_COMMITTER_NAME=atlantis", - }...) - - cmdStr := w.sanitizeGitCredentials(strings.Join(cmd.Args, " "), p.BaseRepo, headRepo) - output, err := cmd.CombinedOutput() - sanitizedOutput := w.sanitizeGitCredentials(string(output), p.BaseRepo, headRepo) - if err != nil { - sanitizedErrMsg := w.sanitizeGitCredentials(err.Error(), p.BaseRepo, headRepo) - return fmt.Errorf("running %s: %s: %s", cmdStr, sanitizedOutput, sanitizedErrMsg) - } - w.Logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n")) - return nil - } - // if branch strategy, use depth=1 if !w.CheckoutMerge { - return runGit("clone", "--depth=1", "--branch", p.HeadBranch, "--single-branch", headCloneURL, cloneDir) + return w.wrappedGit(c, "clone", "--depth=1", "--branch", c.pr.HeadBranch, "--single-branch", headCloneURL, c.dir) } // if merge strategy... // if no checkout depth, omit depth arg if w.CheckoutDepth == 0 { - if err := runGit("clone", "--branch", p.BaseBranch, "--single-branch", baseCloneURL, cloneDir); err != nil { + if err := w.wrappedGit(c, "clone", "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { return err } } else { - if err := runGit("clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", p.BaseBranch, "--single-branch", baseCloneURL, cloneDir); err != nil { + if err := w.wrappedGit(c, "clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { return err } } - if err := runGit("remote", "add", "head", headCloneURL); err != nil { + if err := w.wrappedGit(c, "remote", "add", "head", headCloneURL); err != nil { return err } + if w.GpgNoSigningEnabled { + if err := w.wrappedGit(c, "config", "--local", "commit.gpgsign", "false"); err != nil { + return err + } + } + + return w.mergeToBaseBranch(c) +} + +// There is a new upstream update that we need, and we want to update to it +// without deleting any existing plans +func (w *FileWorkspace) mergeAgain(c wrappedGitContext) error { + value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex)) + mutex := value.(*sync.Mutex) - fetchRef := fmt.Sprintf("+refs/heads/%s:", p.HeadBranch) + defer mutex.Unlock() + if locked := mutex.TryLock(); !locked { + mutex.Lock() + return nil + } + + // Reset branch as if it was cloned again + if err := w.wrappedGit(c, "reset", "--hard", fmt.Sprintf("refs/remotes/head/%s", c.pr.BaseBranch)); err != nil { + return err + } + + return w.mergeToBaseBranch(c) +} + +// wrappedGitContext is the configuration for wrappedGit that is typically unchanged +// for a series of calls to wrappedGit +type wrappedGitContext struct { + dir string + head models.Repo + pr models.PullRequest +} + +// wrappedGit runs git with additional environment settings required for git merge, +// and with sanitized error logging to avoid leaking git credentials +func (w *FileWorkspace) wrappedGit(c wrappedGitContext, args ...string) error { + cmd := exec.Command("git", args...) // nolint: gosec + cmd.Dir = c.dir + // The git merge command requires these env vars are set. + cmd.Env = append(os.Environ(), []string{ + "EMAIL=atlantis@runatlantis.io", + "GIT_AUTHOR_NAME=atlantis", + "GIT_COMMITTER_NAME=atlantis", + }...) + cmdStr := w.sanitizeGitCredentials(strings.Join(cmd.Args, " "), c.pr.BaseRepo, c.head) + output, err := cmd.CombinedOutput() + sanitizedOutput := w.sanitizeGitCredentials(string(output), c.pr.BaseRepo, c.head) + if err != nil { + sanitizedErrMsg := w.sanitizeGitCredentials(err.Error(), c.pr.BaseRepo, c.head) + return fmt.Errorf("running %s: %s: %s", cmdStr, sanitizedOutput, sanitizedErrMsg) + } + w.Logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n")) + return nil +} + +// Merge the PR into the base branch. +func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error { + fetchRef := fmt.Sprintf("+refs/heads/%s:", c.pr.HeadBranch) fetchRemote := "head" if w.GithubAppEnabled { - fetchRef = fmt.Sprintf("pull/%d/head:", p.Num) + fetchRef = fmt.Sprintf("pull/%d/head:", c.pr.Num) fetchRemote = "origin" } // if no checkout depth, omit depth arg if w.CheckoutDepth == 0 { - if err := runGit("fetch", fetchRemote, fetchRef); err != nil { + if err := w.wrappedGit(c, "fetch", fetchRemote, fetchRef); err != nil { return err } } else { - if err := runGit("fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil { + if err := w.wrappedGit(c, "fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil { return err } } - if w.GpgNoSigningEnabled { - if err := runGit("config", "--local", "commit.gpgsign", "false"); err != nil { - return err - } - } - if err := runGit("merge-base", p.BaseBranch, "FETCH_HEAD"); err != nil { + if err := w.wrappedGit(c, "merge-base", c.pr.BaseBranch, "FETCH_HEAD"); err != nil { // git merge-base returning error means that we did not receive enough commits in shallow clone. // Fall back to retrieving full repo history. - if err := runGit("fetch", "--unshallow"); err != nil { + if err := w.wrappedGit(c, "fetch", "--unshallow"); err != nil { return err } } @@ -323,7 +357,7 @@ func (w *FileWorkspace) forceClone(cloneDir string, headRepo models.Repo, p mode // git rev-parse HEAD^2 to get the head commit because it will // always succeed whereas without --no-ff, if the merge was fast // forwarded then git rev-parse HEAD^2 would fail. - return runGit("merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD") + return w.wrappedGit(c, "merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD") } // GetWorkingDir returns the path to the workspace for this repo and pull. @@ -374,9 +408,9 @@ func (w *FileWorkspace) sanitizeGitCredentials(s string, base models.Repo, head return strings.Replace(baseReplaced, head.CloneURL, head.SanitizedCloneURL, -1) } -// Set the flag that indicates it is safe to re-clone if necessary -func (w *FileWorkspace) SetSafeToReClone() { - w.SafeToReClone = true +// Set the flag that indicates we need to check for upstream changes (if using merge checkout strategy) +func (w *FileWorkspace) SetCheckForUpstreamChanges() { + w.CheckForUpstreamChanges = true } func (w *FileWorkspace) DeletePlan(r models.Repo, p models.PullRequest, workspace string, projectPath string, projectName string) error { diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index 225f299291..8eae7c730d 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -3,6 +3,7 @@ package events_test import ( "crypto/tls" "fmt" + "github.com/stretchr/testify/assert" "net/http" "os" "path/filepath" @@ -97,13 +98,13 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Check the commits. actBaseCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD~1") @@ -149,25 +150,25 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { Logger: logger, } - _, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Create a file that we can use to check if the repo was recloned. runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Check that our proof file is still there, proving that we didn't reclone. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -202,25 +203,25 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { Logger: logger, } - _, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Create a file that we can use to check if the repo was recloned. runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Check that our proof file is still there, proving that we didn't reclone. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -320,13 +321,13 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) gotBaseCommitType := runCmd(t, cloneDir, "git", "cat-file", "-t", baseCommit) Assert(t, gotBaseCommitType == "commit\n", "should have merge-base in shallow repo") @@ -351,13 +352,13 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) gotBaseCommitType := runCmd(t, cloneDir, "git", "cat-file", "-t", baseCommit) Assert(t, gotBaseCommitType == "commit\n", "should have merge-base in full repo") @@ -387,12 +388,12 @@ func TestClone_NoReclone(t *testing.T) { GpgNoSigningEnabled: true, Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) // Check that our proof file is still there. _, err = os.Stat(filepath.Join(cloneDir, "proof")) @@ -416,6 +417,13 @@ func TestClone_RecloneWrongCommit(t *testing.T) { runCmd(t, repoDir, "git", "commit", "-m", "newfile") expCommit := runCmd(t, repoDir, "git", "rev-parse", "HEAD") + // Pretend that terraform has created a plan file, we'll check for it later + planFile := filepath.Join(dataDir, "repos/0/default/default.tfplan") + assert.NoFileExists(t, planFile) + _, err := os.Create(planFile) + Assert(t, err == nil, "creating plan file: %v", err) + assert.FileExists(t, planFile) + logger := logging.NewNoopLogger(t) wd := &events.FileWorkspace{ @@ -425,13 +433,14 @@ func TestClone_RecloneWrongCommit(t *testing.T) { GpgNoSigningEnabled: true, Logger: logger, } - cloneDir, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", HeadCommit: expCommit, }, "default") Ok(t, err) - Equals(t, false, hasDiverged) + Equals(t, false, mergedAgain) + assert.NoFileExists(t, planFile, "Plan file should have been wiped out by Clone") // Use rev-parse to verify at correct commit. actCommit := runCmd(t, cloneDir, "git", "rev-parse", "HEAD") @@ -499,37 +508,47 @@ func TestClone_MasterHasDiverged(t *testing.T) { Logger: logger, } + // Pretend terraform has created a plan file, we'll check for it later + planFile := filepath.Join(repoDir, "repos/0/default/default.tfplan") + assert.NoFileExists(t, planFile) + _, err := os.Create(planFile) + Assert(t, err == nil, "creating plan file: %v", err) + assert.FileExists(t, planFile) + // Run the clone without the checkout merge strategy. It should return - // false for hasDiverged - _, hasDiverged, err := wd.Clone(models.Repo{}, models.PullRequest{ + // false for mergedAgain + _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "main", }, "default") Ok(t, err) - Assert(t, hasDiverged == false, "Clone with CheckoutMerge=false should not merge") + Assert(t, mergedAgain == false, "Clone with CheckoutMerge=false should not merge") + assert.FileExists(t, planFile, "Existing plan file should not be deleted by Clone with merge disabled") wd.CheckoutMerge = true - wd.SetSafeToReClone() + wd.SetCheckForUpstreamChanges() // Run the clone twice with the merge strategy, the first run should - // return true for hasDiverged, subsequent runs should + // return true for mergedAgain, subsequent runs should // return false since the first call is supposed to merge. - _, hasDiverged, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ + _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", }, "default") Ok(t, err) - Assert(t, hasDiverged == true, "First clone with CheckoutMerge=true with diverged base should have merged") + assert.FileExists(t, planFile, "Existing plan file should not be deleted by merging again") + Assert(t, mergedAgain == true, "First clone with CheckoutMerge=true with diverged base should have merged") - wd.SetSafeToReClone() - _, hasDiverged, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ + wd.SetCheckForUpstreamChanges() + _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", }, "default") Ok(t, err) - Assert(t, hasDiverged == false, "Second clone with CheckoutMerge=true and initially diverged base should not merge again") + Assert(t, mergedAgain == false, "Second clone with CheckoutMerge=true and initially diverged base should not merge again") + assert.FileExists(t, planFile, "Existing plan file should not have been deleted") } func TestHasDiverged_MasterHasDiverged(t *testing.T) {