Skip to content

Commit

Permalink
bump @actions/artifact to v2.1.9
Browse files Browse the repository at this point in the history
  • Loading branch information
robherley committed Aug 1, 2024
1 parent 0b2256b commit e445c64
Show file tree
Hide file tree
Showing 4 changed files with 67 additions and 71 deletions.
58 changes: 28 additions & 30 deletions dist/merge/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -2997,7 +2997,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
const os_1 = __importDefault(__nccwpck_require__(22037));
// Used for controlling the highWaterMark value of the zip that is being streamed
// The same value is used as the chunk size that is use during upload to blob storage
Expand Down Expand Up @@ -3050,6 +3050,10 @@ function getConcurrency() {
return concurrency > 300 ? 300 : concurrency;
}
exports.getConcurrency = getConcurrency;
function getUploadChunkTimeout() {
return 30000; // 30 seconds
}
exports.getUploadChunkTimeout = getUploadChunkTimeout;
//# sourceMappingURL=config.js.map

/***/ }),
Expand Down Expand Up @@ -3298,37 +3302,34 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
return __awaiter(this, void 0, void 0, function* () {
let uploadByteCount = 0;
let lastProgressTime = Date.now();
let timeoutId;
const chunkTimer = (timeout) => {
// clear the previous timeout
if (timeoutId) {
clearTimeout(timeoutId);
}
timeoutId = setTimeout(() => {
const now = Date.now();
// if there's been more than 30 seconds since the
// last progress event, then we'll consider the upload stalled
if (now - lastProgressTime > timeout) {
throw new Error('Upload progress stalled.');
}
}, timeout);
return timeoutId;
};
const abortController = new AbortController();
const chunkTimer = (interval) => __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => {
const timer = setInterval(() => {
if (Date.now() - lastProgressTime > interval) {
reject(new Error('Upload progress stalled.'));
}
}, interval);
abortController.signal.addEventListener('abort', () => {
clearInterval(timer);
resolve();
});
});
});
const maxConcurrency = (0, config_1.getConcurrency)();
const bufferSize = (0, config_1.getUploadChunkSize)();
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const timeoutDuration = 300000; // 30 seconds
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
const uploadCallback = (progress) => {
core.info(`Uploaded bytes ${progress.loadedBytes}`);
uploadByteCount = progress.loadedBytes;
chunkTimer(timeoutDuration);
lastProgressTime = Date.now();
};
const options = {
blobHTTPHeaders: { blobContentType: 'zip' },
onProgress: uploadCallback
onProgress: uploadCallback,
abortSignal: abortController.signal
};
let sha256Hash = undefined;
const uploadStream = new stream.PassThrough();
Expand All @@ -3337,9 +3338,10 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
core.info('Beginning upload of artifact content to blob storage');
try {
// Start the chunk timer
timeoutId = chunkTimer(timeoutDuration);
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
yield Promise.race([
blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options),
chunkTimer((0, config_1.getUploadChunkTimeout)())
]);
}
catch (error) {
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
Expand All @@ -3348,10 +3350,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
throw error;
}
finally {
// clear the timeout whether or not the upload completes
if (timeoutId) {
clearTimeout(timeoutId);
}
abortController.abort();
}
core.info('Finished uploading artifact content to blob storage!');
hashStream.end();
Expand Down Expand Up @@ -3778,7 +3777,6 @@ exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRE
const stream = __importStar(__nccwpck_require__(12781));
const archiver = __importStar(__nccwpck_require__(43084));
const core = __importStar(__nccwpck_require__(42186));
const fs_1 = __nccwpck_require__(57147);
const config_1 = __nccwpck_require__(74610);
exports.DEFAULT_COMPRESSION_LEVEL = 6;
// Custom stream transformer so we can set the highWaterMark property
Expand Down Expand Up @@ -3810,7 +3808,7 @@ function createZipUploadStream(uploadSpecification, compressionLevel = exports.D
for (const file of uploadSpecification) {
if (file.sourcePath !== null) {
// Add a normal file to the zip
zip.append((0, fs_1.createReadStream)(file.sourcePath), {
zip.file(file.sourcePath, {
name: file.destinationPath
});
}
Expand Down Expand Up @@ -136152,7 +136150,7 @@ module.exports = index;
/***/ ((module) => {

"use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.9","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');

/***/ }),

Expand Down
58 changes: 28 additions & 30 deletions dist/upload/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -2997,7 +2997,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
exports.getUploadChunkTimeout = exports.getConcurrency = exports.getGitHubWorkspaceDir = exports.isGhes = exports.getResultsServiceUrl = exports.getRuntimeToken = exports.getUploadChunkSize = void 0;
const os_1 = __importDefault(__nccwpck_require__(22037));
// Used for controlling the highWaterMark value of the zip that is being streamed
// The same value is used as the chunk size that is use during upload to blob storage
Expand Down Expand Up @@ -3050,6 +3050,10 @@ function getConcurrency() {
return concurrency > 300 ? 300 : concurrency;
}
exports.getConcurrency = getConcurrency;
function getUploadChunkTimeout() {
return 30000; // 30 seconds
}
exports.getUploadChunkTimeout = getUploadChunkTimeout;
//# sourceMappingURL=config.js.map

/***/ }),
Expand Down Expand Up @@ -3298,37 +3302,34 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
return __awaiter(this, void 0, void 0, function* () {
let uploadByteCount = 0;
let lastProgressTime = Date.now();
let timeoutId;
const chunkTimer = (timeout) => {
// clear the previous timeout
if (timeoutId) {
clearTimeout(timeoutId);
}
timeoutId = setTimeout(() => {
const now = Date.now();
// if there's been more than 30 seconds since the
// last progress event, then we'll consider the upload stalled
if (now - lastProgressTime > timeout) {
throw new Error('Upload progress stalled.');
}
}, timeout);
return timeoutId;
};
const abortController = new AbortController();
const chunkTimer = (interval) => __awaiter(this, void 0, void 0, function* () {
return new Promise((resolve, reject) => {
const timer = setInterval(() => {
if (Date.now() - lastProgressTime > interval) {
reject(new Error('Upload progress stalled.'));
}
}, interval);
abortController.signal.addEventListener('abort', () => {
clearInterval(timer);
resolve();
});
});
});
const maxConcurrency = (0, config_1.getConcurrency)();
const bufferSize = (0, config_1.getUploadChunkSize)();
const blobClient = new storage_blob_1.BlobClient(authenticatedUploadURL);
const blockBlobClient = blobClient.getBlockBlobClient();
const timeoutDuration = 300000; // 30 seconds
core.debug(`Uploading artifact zip to blob storage with maxConcurrency: ${maxConcurrency}, bufferSize: ${bufferSize}`);
const uploadCallback = (progress) => {
core.info(`Uploaded bytes ${progress.loadedBytes}`);
uploadByteCount = progress.loadedBytes;
chunkTimer(timeoutDuration);
lastProgressTime = Date.now();
};
const options = {
blobHTTPHeaders: { blobContentType: 'zip' },
onProgress: uploadCallback
onProgress: uploadCallback,
abortSignal: abortController.signal
};
let sha256Hash = undefined;
const uploadStream = new stream.PassThrough();
Expand All @@ -3337,9 +3338,10 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
zipUploadStream.pipe(hashStream).setEncoding('hex'); // This stream is used to compute a hash of the zip content that gets used. Integrity check
core.info('Beginning upload of artifact content to blob storage');
try {
// Start the chunk timer
timeoutId = chunkTimer(timeoutDuration);
yield blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options);
yield Promise.race([
blockBlobClient.uploadStream(uploadStream, bufferSize, maxConcurrency, options),
chunkTimer((0, config_1.getUploadChunkTimeout)())
]);
}
catch (error) {
if (errors_1.NetworkError.isNetworkErrorCode(error === null || error === void 0 ? void 0 : error.code)) {
Expand All @@ -3348,10 +3350,7 @@ function uploadZipToBlobStorage(authenticatedUploadURL, zipUploadStream) {
throw error;
}
finally {
// clear the timeout whether or not the upload completes
if (timeoutId) {
clearTimeout(timeoutId);
}
abortController.abort();
}
core.info('Finished uploading artifact content to blob storage!');
hashStream.end();
Expand Down Expand Up @@ -3778,7 +3777,6 @@ exports.createZipUploadStream = exports.ZipUploadStream = exports.DEFAULT_COMPRE
const stream = __importStar(__nccwpck_require__(12781));
const archiver = __importStar(__nccwpck_require__(43084));
const core = __importStar(__nccwpck_require__(42186));
const fs_1 = __nccwpck_require__(57147);
const config_1 = __nccwpck_require__(74610);
exports.DEFAULT_COMPRESSION_LEVEL = 6;
// Custom stream transformer so we can set the highWaterMark property
Expand Down Expand Up @@ -3810,7 +3808,7 @@ function createZipUploadStream(uploadSpecification, compressionLevel = exports.D
for (const file of uploadSpecification) {
if (file.sourcePath !== null) {
// Add a normal file to the zip
zip.append((0, fs_1.createReadStream)(file.sourcePath), {
zip.file(file.sourcePath, {
name: file.destinationPath
});
}
Expand Down Expand Up @@ -136162,7 +136160,7 @@ module.exports = index;
/***/ ((module) => {

"use strict";
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.8","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');
module.exports = JSON.parse('{"name":"@actions/artifact","version":"2.1.9","preview":true,"description":"Actions artifact lib","keywords":["github","actions","artifact"],"homepage":"https://github.com/actions/toolkit/tree/main/packages/artifact","license":"MIT","main":"lib/artifact.js","types":"lib/artifact.d.ts","directories":{"lib":"lib","test":"__tests__"},"files":["lib","!.DS_Store"],"publishConfig":{"access":"public"},"repository":{"type":"git","url":"git+https://github.com/actions/toolkit.git","directory":"packages/artifact"},"scripts":{"audit-moderate":"npm install && npm audit --json --audit-level=moderate > audit.json","test":"cd ../../ && npm run test ./packages/artifact","bootstrap":"cd ../../ && npm run bootstrap","tsc-run":"tsc","tsc":"npm run bootstrap && npm run tsc-run","gen:docs":"typedoc --plugin typedoc-plugin-markdown --out docs/generated src/artifact.ts --githubPages false --readme none"},"bugs":{"url":"https://github.com/actions/toolkit/issues"},"dependencies":{"@actions/core":"^1.10.0","@actions/github":"^5.1.1","@actions/http-client":"^2.1.0","@azure/storage-blob":"^12.15.0","@octokit/core":"^3.5.1","@octokit/plugin-request-log":"^1.0.4","@octokit/plugin-retry":"^3.0.9","@octokit/request-error":"^5.0.0","@protobuf-ts/plugin":"^2.2.3-alpha.1","archiver":"^7.0.1","crypto":"^1.0.1","jwt-decode":"^3.1.2","twirp-ts":"^2.5.0","unzip-stream":"^0.3.1"},"devDependencies":{"@types/archiver":"^5.3.2","@types/unzip-stream":"^0.3.4","typedoc":"^0.25.4","typedoc-plugin-markdown":"^3.17.1","typescript":"^5.2.2"}}');

/***/ }),

Expand Down
18 changes: 9 additions & 9 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

Loading

0 comments on commit e445c64

Please sign in to comment.