Skip to content
This repository has been archived by the owner on Sep 23, 2021. It is now read-only.

Issue 47 #48

Merged
merged 15 commits into from
Jan 14, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
15 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
139 changes: 60 additions & 79 deletions lib/archiveHandler.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,63 +12,71 @@

'use strict';

const path = require('path');
const { join: joinPaths, relative: relativePaths } = require('path');

const Archiver = require('archiver');
const async = require('async');
const fse = require('fs-extra');
const Git = require('nodegit');
const klaw = require('klaw');
const Ignore = require('ignore');

const { resolveRepositoryPath, createBlobReadStream, serveUncommittedContent } = require('./utils');
const {
isCheckedOut,
createBlobReadStream,
resolveCommit,
getObject,
} = require('./git');
const { resolveRepositoryPath } = require('./utils');

const CACHE_DIR = './tmp';

/**
* Recursively collects all tree entries (blobs and trees).
*
* @param {Git.Tree} tree tree instance to process
* @param {Array<Git.TreeEntry>} allEntries array where tree entries will be added
* @param {boolean} [blobsOnly = false] if false separate directory entries will be included
* @returns {Promise<Array<Git.TreeEntry>>} collected entries
* @param {string} repPath git repository path
* @param {object} tree git tree to process
* @param {Array<object>} result array where tree entries will be collected
* @param {string} treePath path of specified tree (will be prepended to child entries)
* @returns {Promise<Array<object>>} collected entries
*/
async function collectTreeEntries(tree, allEntries, blobsOnly = false) {
return new Promise((resolve, reject) => {
const walker = tree.walk(blobsOnly);
walker.on('entry', (entry) => {
allEntries.push(entry);
});
walker.on('error', (err) => {
reject(err);
});
walker.on('end', () => {
resolve(allEntries);
});
walker.start();
});
async function collectTreeEntries(repPath, tree, result, treePath) {
const entries = await Promise.all(tree.entries.map(async ({
oid, type, mode, path,
}) => ({
oid, type, mode, path: joinPaths(treePath, path),
})));
result.push(...entries);
// recurse into subtrees
const treeEntries = entries.filter(entry => entry.type === 'tree');
for (let i = 0; i < treeEntries.length; i += 1) {
const { oid, path } = treeEntries[i];
/* eslint-disable no-await-in-loop */
const { object: subTree } = await getObject(repPath, oid);
await collectTreeEntries(repPath, subTree, result, path);
}
return result;
}

/**
* Serializes the specified git tree as an archive (zip/tgz).
*
* @param {Git.Repositor} repo
* @param {Git.Tree} tree
* @param {string} repPath git repository path
* @param {object} tree git tree to process
* @param {object} archiver Archiver instance
* @returns {Promise<stream.Readable>} readable stream of archive
*/
async function archiveGitTree(repo, tree, archive) {
async function archiveGitTree(repPath, tree, archive) {
// recursively collect all entries (blobs and trees)
const allEntries = await collectTreeEntries(tree, [], false);
const allEntries = await collectTreeEntries(repPath, tree, [], '');

const process = async (entry) => {
const p = entry.path();
if (entry.isTree() || entry.isSubmodule()) {
archive.append(null, { name: `${p}/` });
const process = async ({ type, oid, path }) => {
if (type === 'tree' || type === 'commit') {
// directory or submodule
archive.append(null, { name: `${path}/` });
} else {
const blob = await entry.getBlob();
const stream = await createBlobReadStream(repo, blob);
archive.append(stream, { name: p });
// blob
const stream = await createBlobReadStream(repPath, oid);
archive.append(stream, { name: path });
}
};

Expand Down Expand Up @@ -97,16 +105,14 @@ async function archiveGitTree(repo, tree, archive) {
async function collectFSEntries(dirPath, allEntries) {
// apply .gitignore rules
const ignore = Ignore();
const ignoreFilePath = path.join(dirPath, '.gitignore');
const ignoreFilePath = joinPaths(dirPath, '.gitignore');
if (await fse.pathExists(ignoreFilePath)) {
const data = await fse.readFile(ignoreFilePath);
ignore.add(data.toString());
}
ignore.add('.git/');
ignore.add('.git');

const filterIgnored = (item) => {
return !ignore.ignores(path.relative(dirPath, item));
};
const filterIgnored = item => !ignore.ignores(relativePaths(dirPath, item));

return new Promise((resolve, reject) => {
klaw(dirPath, { filter: filterIgnored })
Expand Down Expand Up @@ -134,7 +140,7 @@ async function archiveWorkingDir(dirPath, archive) {
const allEntries = await collectFSEntries(dirPath, []);

const process = (entry, cb) => {
const p = path.relative(dirPath, entry.path);
const p = relativePaths(dirPath, entry.path);
if (p.length) {
if (entry.stats.isDirectory()) {
archive.append(null, { name: `${p}/` });
Expand All @@ -161,7 +167,7 @@ async function archiveWorkingDir(dirPath, archive) {
}

/**
* Export the raw content handler (express middleware) through a parameterizable function
* Export the archive handler (express middleware) through a parameterizable function
*
* @param {object} options configuration hash
* @param {string} archiveFormat 'zip' or 'tar.gz'
Expand All @@ -177,7 +183,7 @@ function createMiddleware(options, archiveFormat) {
*
* @see https://developer.github.com/v3/repos/contents/#get-archive-link
*/
return (req, res, next) => {
return async (req, res, next) => {
// GET /:owner/:repo/:archive_format/:ref
const { owner } = req.params;
const repoName = req.params.repo;
Expand All @@ -187,43 +193,22 @@ function createMiddleware(options, archiveFormat) {

const repPath = resolveRepositoryPath(options, owner, repoName);

let repo;
let serveUncommitted = false;
let headRef;
let commitId;
// project-helix/#187: serve modified content only if the requested ref is currently checked out
const serveUncommitted = await isCheckedOut(repPath, refName);

let commitSha;
let archiveFileName;
let archiveFilePath;
Git.Repository.open(repPath)
.then((repository) => {
repo = repository;
return repo.head()
.then((ref) => {
headRef = ref;
return repo.getReference(refName);
})
.then((reqRef) => {
serveUncommitted = serveUncommittedContent(repo, headRef, reqRef);
return reqRef.peel(Git.Object.TYPE.COMMIT);
})
.then(obj => Git.Commit.lookup(repo, obj.id()))
.catch((err) => {
/* eslint arrow-body-style: "off" */
options.logger.debug(`[archiveHandler] errno: ${err.errno} errorFuntion: ${err.errorFunction} message: ${err.message}`);

// ref => commit id?
// return repo.getCommit(ref);
// support shorthand commit id's
return Git.AnnotatedCommit.fromRevspec(repo, refName)
.then(annCommit => repo.getCommit(annCommit.id()));
});
resolveCommit(repPath, refName)
.then((oid) => {
commitSha = oid;
return getObject(repPath, commitSha);
})
.then((commit) => {
commitId = commit.id().tostrS();
return commit.getTree();
})
.then(async (tree) => {
archiveFileName = `${owner}-${repoName}-${serveUncommitted ? 'SNAPSHOT' : commitId}${archiveFormat === 'zip' ? '.zip' : '.tgz'}`;
archiveFilePath = path.join(CACHE_DIR, archiveFileName);
.then(({ object: commit }) => getObject(repPath, commit.tree))
.then(async ({ object: tree }) => {
archiveFileName = `${owner}-${repoName}-${serveUncommitted ? 'SNAPSHOT' : commitSha}${archiveFormat === 'zip' ? '.zip' : '.tgz'}`;
archiveFilePath = joinPaths(CACHE_DIR, archiveFileName);
await fse.ensureDir(CACHE_DIR);

// check cache
Expand Down Expand Up @@ -252,7 +237,7 @@ function createMiddleware(options, archiveFormat) {
// don't cache
archive = await archiveWorkingDir(repPath, archive);
} else {
archive = await archiveGitTree(repo, tree, archive);
archive = await archiveGitTree(repPath, tree, archive);
}

return new Promise((resolve, reject) => {
Expand All @@ -278,12 +263,8 @@ function createMiddleware(options, archiveFormat) {
archiveStream.pipe(res);
})
.catch((err) => {
options.logger.debug(`[archiveHandler] errno: ${err.errno} errorFuntion: ${err.errorFunction} message: ${err.message} stack: ${err.stack}`);
options.logger.debug(`[archiveHandler] code: ${err.code} message: ${err.message} stack: ${err.stack}`);
next(err);
})
.finally(() => {
// cleanup
repo.free();
});
};
}
Expand Down
36 changes: 18 additions & 18 deletions lib/blobHandler.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,7 @@

'use strict';

const Git = require('nodegit');

const { getObject } = require('./git');
const { resolveRepositoryPath } = require('./utils');

/**
Expand Down Expand Up @@ -43,35 +42,36 @@ function createMiddleware(options) {

const repPath = resolveRepositoryPath(options, owner, repoName);

let repo;
Git.Repository.open(repPath)
.then((repository) => {
repo = repository;
return repo.getBlob(sha);
})
.then(((blob) => {
if (sha.match(/[0-9a-f]/g).length !== 40) {
// invalid sha format
res.status(422).json({
message: 'The sha parameter must be exactly 40 characters and contain only [0-9a-f].',
documentation_url: 'https://developer.github.com/v3/git/blobs/#get-a-blob',
});
return;
}
getObject(repPath, sha)
.then(({ object: content }) => {
res.json({
sha,
size: blob.rawsize(),
size: content.length,
url: `${req.protocol}://${host}${req.path}`,
content: `${blob.content().toString('base64')}\n`,
content: `${content.toString('base64')}\n`,
encoding: 'base64',
});
}))
})
.catch((err) => {
if (err.errno === -3) {
// TODO: use generic errors
if (err.code === 'ReadObjectFail') {
options.logger.debug(`[blobHandler] resource not found: ${err.message}`);
res.status(404).json({
message: 'Not Found',
documentation_url: 'https://developer.github.com/v3/git/blobs/#get-a-blob',
});
} else {
options.logger.debug(`[blobHandler] errno: ${err.errno} errorFuntion: ${err.errorFunction} message: ${err.message} stack: ${err.stack}`);
options.logger.debug(`[blobHandler] code: ${err.code} message: ${err.message} stack: ${err.stack}`);
next(err);
}
})
.finally(() => {
// TODO: cache Repository instances (key: absolute path)
repo.free();
});
};
}
Expand Down
Loading