Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: backfill commit files for merge/rebase/direct commits #1110

Merged
merged 2 commits into from
Nov 23, 2021
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
18 changes: 18 additions & 0 deletions __snapshots__/github.js

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

86 changes: 53 additions & 33 deletions src/github.ts
Original file line number Diff line number Diff line change
Expand Up @@ -365,44 +365,64 @@ export class GitHub {
}
const history = response.repository.ref.target.history;
const commits = (history.nodes || []) as GraphQLCommit[];
const commitData: Commit[] = [];
for (const graphCommit of commits) {
const commit: Commit = {
sha: graphCommit.sha,
message: graphCommit.message,
files: [],
};
const pullRequest = graphCommit.associatedPullRequests.nodes.find(pr => {
return pr.mergeCommit && pr.mergeCommit.oid === graphCommit.sha;
});
if (pullRequest) {
const files = pullRequest.files.nodes.map(node => node.path);
commit.pullRequest = {
sha: commit.sha,
number: pullRequest.number,
baseBranchName: pullRequest.baseRefName,
headBranchName: pullRequest.headRefName,
title: pullRequest.title,
body: pullRequest.body,
labels: pullRequest.labels.nodes.map(node => node.name),
files,
};
// We cannot directly fetch files on commits via graphql, only provide file
// information for commits with associated pull requests
commit.files = files;
} else {
// In this case, there is no squashed merge commit. This could be a simple
// merge commit, a rebase merge commit, or a direct commit to the branch.
// Fallback to fetching the list of commits from the REST API. In the future
// we can perhaps lazy load these.
commit.files = await this.getCommitFiles(graphCommit.sha);
}
commitData.push(commit);
}
return {
pageInfo: history.pageInfo,
data: commits.map(graphCommit => {
const commit: Commit = {
sha: graphCommit.sha,
message: graphCommit.message,
files: [],
};
const pullRequest = graphCommit.associatedPullRequests.nodes.find(
pr => {
return pr.mergeCommit && pr.mergeCommit.oid === graphCommit.sha;
}
);
if (pullRequest) {
const files = pullRequest.files.nodes.map(node => node.path);
commit.pullRequest = {
sha: commit.sha,
number: pullRequest.number,
baseBranchName: pullRequest.baseRefName,
headBranchName: pullRequest.headRefName,
title: pullRequest.title,
body: pullRequest.body,
labels: pullRequest.labels.nodes.map(node => node.name),
files,
};
// We cannot directly fetch files on commits via graphql, only provide file
// information for commits with associated pull requests
commit.files = files;
} else {
logger.warn(
`No merged pull request for commit: ${graphCommit.sha} - files unavailable`
);
}
return commit;
}),
data: commitData,
};
}

/**
* Get the list of file paths modified in a given commit.
*
* @param {string} sha The commit SHA
* @returns {string[]} File paths
* @throws {GitHubAPIError} on an API error
*/
getCommitFiles = wrapAsync(async (sha: string): Promise<string[]> => {
logger.debug(`Backfilling file list for commit: ${sha}`);
const resp = await this.octokit.repos.getCommit({
owner: this.repository.owner,
repo: this.repository.repo,
ref: sha,
});
const files = resp.data.files || [];
return files.map(file => file.filename!).filter(filename => !!filename);
});

private graphqlRequest = wrapAsync(
async (
opts: {
Expand Down
1 change: 0 additions & 1 deletion src/manifest.ts
Original file line number Diff line number Diff line change
Expand Up @@ -768,7 +768,6 @@ export class Manifest {
);
}
this._pathsByComponent[component] = path;
logger.info(this._pathsByComponent);
}
}
return this._pathsByComponent;
Expand Down
57 changes: 56 additions & 1 deletion test/github.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,13 @@

import * as nock from 'nock';
import {expect} from 'chai';
import {beforeEach, describe, it} from 'mocha';
import {afterEach, beforeEach, describe, it} from 'mocha';
nock.disableNetConnect();

import {readFileSync} from 'fs';
import {resolve} from 'path';
import * as snapshot from 'snap-shot-it';
import * as sinon from 'sinon';

import {GitHub, GitHubRelease} from '../src/github';
import {PullRequest} from '../src/pull-request';
Expand All @@ -30,6 +31,7 @@ import {DuplicateReleaseError, GitHubAPIError} from '../src/errors';
import {fail} from 'assert';

const fixturesPath = './test/fixtures';
const sandbox = sinon.createSandbox();

describe('GitHub', () => {
let github: GitHub;
Expand All @@ -56,6 +58,9 @@ describe('GitHub', () => {
// This shared nock will take care of some common requests.
req = getNock();
});
afterEach(() => {
sandbox.restore();
});

describe('create', () => {
it('allows configuring the default branch explicitly', async () => {
Expand Down Expand Up @@ -287,6 +292,7 @@ describe('GitHub', () => {

describe('commitsSince', () => {
it('finds commits up until a condition', async () => {
sandbox.stub(github, 'getCommitFiles').resolves([]);
const graphql = JSON.parse(
readFileSync(resolve(fixturesPath, 'commits-since.json'), 'utf8')
);
Expand All @@ -307,6 +313,7 @@ describe('GitHub', () => {
});

it('paginates through commits', async () => {
sandbox.stub(github, 'getCommitFiles').resolves([]);
const graphql1 = JSON.parse(
readFileSync(resolve(fixturesPath, 'commits-since-page-1.json'), 'utf8')
);
Expand Down Expand Up @@ -336,6 +343,7 @@ describe('GitHub', () => {
});

it('finds first commit of a multi-commit merge pull request', async () => {
sandbox.stub(github, 'getCommitFiles').resolves([]);
const graphql = JSON.parse(
readFileSync(resolve(fixturesPath, 'commits-since.json'), 'utf8')
);
Expand All @@ -356,6 +364,7 @@ describe('GitHub', () => {
});

it('limits pagination', async () => {
sandbox.stub(github, 'getCommitFiles').resolves([]);
const graphql1 = JSON.parse(
readFileSync(resolve(fixturesPath, 'commits-since-page-1.json'), 'utf8')
);
Expand All @@ -377,6 +386,7 @@ describe('GitHub', () => {
});

it('returns empty commits if branch does not exist', async () => {
sandbox.stub(github, 'getCommitFiles').resolves([]);
const graphql = JSON.parse(
readFileSync(
resolve(fixturesPath, 'commits-since-missing-branch.json'),
Expand All @@ -396,6 +406,51 @@ describe('GitHub', () => {
expect(commitsSinceSha.length).to.eql(0);
req.done();
});

it('backfills commit files without pull requests', async () => {
const graphql = JSON.parse(
readFileSync(resolve(fixturesPath, 'commits-since.json'), 'utf8')
);
req
.post('/graphql')
.reply(200, {
data: graphql,
})
.get(
'/repos/fake/fake/commits/0cda26c2e7776748072ba5a24302474947b3ebbd'
)
.reply(200, {files: [{filename: 'abc'}]})
.get(
'/repos/fake/fake/commits/c6d9dfb03aa2dbe1abc329592af60713fe28586d'
)
.reply(200, {files: [{filename: 'def'}]})
.get(
'/repos/fake/fake/commits/c8f1498c92c323bfa8f5ffe84e0ade1c37e4ea6e'
)
.reply(200, {files: [{filename: 'ghi'}]});
const targetBranch = 'main';
const commitsSinceSha = await git.luolix.topmitsSince(
targetBranch,
commit => {
// this commit is the 2nd most recent
return commit.sha === 'b29149f890e6f76ee31ed128585744d4c598924c';
}
);
expect(commitsSinceSha.length).to.eql(1);
snapshot(commitsSinceSha);
req.done();
});
});

describe('getCommitFiles', () => {
it('fetches the list of files', async () => {
req
.get('/repos/fake/fake/commits/abc123')
.reply(200, {files: [{filename: 'abc'}]});
const files = await github.getCommitFiles('abc123');
expect(files).to.eql(['abc']);
req.done();
});
});

describe('releaseIterator', () => {
Expand Down