Skip to content

Commit

Permalink
cleanup (#4)
Browse files Browse the repository at this point in the history
  • Loading branch information
mgrybyk authored Feb 5, 2024
1 parent 3a5a40c commit a13bcfd
Show file tree
Hide file tree
Showing 10 changed files with 381 additions and 109 deletions.
4 changes: 4 additions & 0 deletions .github/workflows/report_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,8 @@ jobs:
gh_pages: 'gh-pages-dir'
report_dir: 'lighthouse-html'
list_dirs: ${{ github.ref == 'refs/heads/main' }}
branch_cleanup_enabled: ${{ github.ref == 'refs/heads/main' }}
max_reports: 9

- name: Local Chart Report (single)
if: ${{ always() && steps.allure.outputs.report_url }}
Expand All @@ -103,6 +105,7 @@ jobs:
report_dir: 'test-results/Allure_Trend.csv'
list_dirs: ${{ github.ref == 'refs/heads/main' }}
report_type: csv
max_reports: 9

- name: Local Chart Report (multiple)
if: ${{ always() && steps.allure.outputs.report_url }}
Expand All @@ -114,6 +117,7 @@ jobs:
report_dir: 'lighthouse-csv'
list_dirs: ${{ github.ref == 'refs/heads/main' }}
report_type: csv
max_reports: 9

- name: Git Commit and Push Action
uses: mgrybyk/git-commit-pull-push-action@v1
Expand Down
4 changes: 2 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -122,6 +122,6 @@ concurrency:

- [thollander/actions-comment-pull-request](https://github.com/thollander/actions-comment-pull-request) for building Github Action that comments the linked PRs

## Upcoming features
## ## Planned features

- cleanup old reports
- cleanup `data.json` file per report. Raise an issue if you're interested!
16 changes: 16 additions & 0 deletions action.yml
Original file line number Diff line number Diff line change
Expand Up @@ -24,12 +24,28 @@ inputs:
description: 'Write index.html to the Github Action folders. Might cause concurrency issues!'
required: false
default: false
list_dirs_branch:
description: 'Write index.html to the report branch folder. Might cause concurrency issues!'
required: false
default: true
branch_cleanup_enabled:
description: 'Cleanup reports from deleted branches. Might cause concurrency issues!'
required: false
default: false
max_reports:
description: 'Max reports to keep per branch/report. Set to 0 to disable cleanup.'
required: false
default: 100
outputs:
# report_url and report_history_url have the same url for CSV reports
report_url:
description: 'Published Report url'
report_history_url:
description: 'Published History list url'
run_unique_id:
description: 'Name of the folder containing the generated Allure Report'
report_path:
description: 'Path to the folder containing the generated Allure Report relative to the repo root'
runs:
using: 'node20'
main: 'dist/index.js'
144 changes: 135 additions & 9 deletions dist/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -27751,6 +27751,9 @@ function httpRedirectFetch (fetchParams, response) {
// https://fetch.spec.whatwg.org/#cors-non-wildcard-request-header-name
request.headersList.delete('authorization')

// https://fetch.spec.whatwg.org/#authentication-entries
request.headersList.delete('proxy-authorization', true)

// "Cookie" and "Host" are forbidden request-headers, which undici doesn't implement.
request.headersList.delete('cookie')
request.headersList.delete('host')
Expand Down Expand Up @@ -38586,7 +38589,9 @@ __nccwpck_require__.a(module, async (__webpack_handle_async_dependencies__, __we
/* harmony import */ var _src_csvReport_js__WEBPACK_IMPORTED_MODULE_4__ = __nccwpck_require__(7925);
/* harmony import */ var _src_isFileExists_js__WEBPACK_IMPORTED_MODULE_5__ = __nccwpck_require__(2139);
/* harmony import */ var _src_writeFolderListing_js__WEBPACK_IMPORTED_MODULE_6__ = __nccwpck_require__(4362);
/* harmony import */ var _src_helpers_js__WEBPACK_IMPORTED_MODULE_7__ = __nccwpck_require__(3015);
/* harmony import */ var _src_helpers_js__WEBPACK_IMPORTED_MODULE_8__ = __nccwpck_require__(3015);
/* harmony import */ var _src_cleanup_js__WEBPACK_IMPORTED_MODULE_7__ = __nccwpck_require__(646);




Expand All @@ -38604,8 +38609,12 @@ try {
const reportId = _actions_core__WEBPACK_IMPORTED_MODULE_1__.getInput('report_id');
const reportType = _actions_core__WEBPACK_IMPORTED_MODULE_1__.getInput('report_type');
const listDirs = _actions_core__WEBPACK_IMPORTED_MODULE_1__.getInput('list_dirs') == 'true';
const branchName = (0,_src_helpers_js__WEBPACK_IMPORTED_MODULE_7__/* .getBranchName */ .L)(_actions_github__WEBPACK_IMPORTED_MODULE_2__.context.ref, _actions_github__WEBPACK_IMPORTED_MODULE_2__.context.payload.pull_request);
const reportBaseDir = path__WEBPACK_IMPORTED_MODULE_0__.join(ghPagesPath, baseDir, branchName, reportId);
const listDirsBranch = _actions_core__WEBPACK_IMPORTED_MODULE_1__.getInput('list_dirs_branch') == 'true';
const branchCleanupEnabled = _actions_core__WEBPACK_IMPORTED_MODULE_1__.getInput('branch_cleanup_enabled') == 'true';
const maxReports = parseInt(_actions_core__WEBPACK_IMPORTED_MODULE_1__.getInput('max_reports'), 10);
const branchName = (0,_src_helpers_js__WEBPACK_IMPORTED_MODULE_8__/* .getBranchName */ .L)(_actions_github__WEBPACK_IMPORTED_MODULE_2__.context.ref, _actions_github__WEBPACK_IMPORTED_MODULE_2__.context.payload.pull_request);
const ghPagesBaseDir = path__WEBPACK_IMPORTED_MODULE_0__.join(ghPagesPath, baseDir);
const reportBaseDir = path__WEBPACK_IMPORTED_MODULE_0__.join(ghPagesBaseDir, branchName, reportId);
/**
* `runId` is unique but won't change on job re-run
* `runNumber` is not unique and resets from time to time
Expand All @@ -38615,9 +38624,9 @@ try {
const reportDir = path__WEBPACK_IMPORTED_MODULE_0__.join(reportBaseDir, runUniqueId);
// urls
const ghPagesUrl = `https://${_actions_github__WEBPACK_IMPORTED_MODULE_2__.context.repo.owner}.github.io/${_actions_github__WEBPACK_IMPORTED_MODULE_2__.context.repo.repo}`;
const ghPagesBaseDir = `${ghPagesUrl}/${baseDir}/${branchName}/${reportId}`.replaceAll(' ', '%20');
const ghPagesReportDir = `${ghPagesBaseDir}/${runUniqueId}`.replaceAll(' ', '%20');
const reportUrl = reportType === 'csv' ? ghPagesBaseDir : ghPagesReportDir;
const ghPagesBaseUrl = `${ghPagesUrl}/${baseDir}/${branchName}/${reportId}`.replaceAll(' ', '%20');
const ghPagesReportUrl = `${ghPagesBaseUrl}/${runUniqueId}`.replaceAll(' ', '%20');
const reportUrl = reportType === 'csv' ? ghPagesBaseUrl : ghPagesReportUrl;
// log
console.log({
report_dir: sourceReportDir,
Expand All @@ -38631,6 +38640,9 @@ try {
reportDir,
report_url: reportUrl,
listDirs,
listDirsBranch,
branchCleanupEnabled,
maxReports,
});
if (!(await (0,_src_isFileExists_js__WEBPACK_IMPORTED_MODULE_5__/* .isFileExist */ .e)(ghPagesPath))) {
throw new Error("Folder with gh-pages branch doesn't exist: " + ghPagesPath);
Expand All @@ -38655,14 +38667,25 @@ try {
await (0,_src_writeFolderListing_js__WEBPACK_IMPORTED_MODULE_6__/* .writeFolderListing */ .l)(ghPagesPath, '.');
}
await (0,_src_writeFolderListing_js__WEBPACK_IMPORTED_MODULE_6__/* .writeFolderListing */ .l)(ghPagesPath, baseDir);
}
if (listDirsBranch) {
await (0,_src_writeFolderListing_js__WEBPACK_IMPORTED_MODULE_6__/* .writeFolderListing */ .l)(ghPagesPath, path__WEBPACK_IMPORTED_MODULE_0__.join(baseDir, branchName));
await (0,_src_writeFolderListing_js__WEBPACK_IMPORTED_MODULE_6__/* .writeFolderListing */ .l)(ghPagesPath, path__WEBPACK_IMPORTED_MODULE_0__.join(baseDir, branchName));
if (reportType === 'html') {
await (0,_src_writeFolderListing_js__WEBPACK_IMPORTED_MODULE_6__/* .writeFolderListing */ .l)(ghPagesPath, path__WEBPACK_IMPORTED_MODULE_0__.join(baseDir, branchName, reportId));
}
}
// outputs
_actions_core__WEBPACK_IMPORTED_MODULE_1__.setOutput('report_url', reportUrl);
_actions_core__WEBPACK_IMPORTED_MODULE_1__.setOutput('report_history_url', ghPagesBaseDir);
_actions_core__WEBPACK_IMPORTED_MODULE_1__.setOutput('report_history_url', ghPagesBaseUrl);
_actions_core__WEBPACK_IMPORTED_MODULE_1__.setOutput('run_unique_id', runUniqueId);
_actions_core__WEBPACK_IMPORTED_MODULE_1__.setOutput('report_path', reportDir);
if (branchCleanupEnabled) {
await (0,_src_cleanup_js__WEBPACK_IMPORTED_MODULE_7__/* .cleanupOutdatedBranches */ .B)(ghPagesBaseDir);
}
if (maxReports > 0) {
await (0,_src_cleanup_js__WEBPACK_IMPORTED_MODULE_7__/* .cleanupOutdatedReports */ .g)(ghPagesBaseDir, maxReports);
}
}
catch (error) {
_actions_core__WEBPACK_IMPORTED_MODULE_1__.setFailed(error.message);
Expand All @@ -38671,6 +38694,107 @@ catch (error) {
__webpack_async_result__();
} catch(e) { __webpack_async_result__(e); } }, 1);

/***/ }),

/***/ 646:
/***/ ((__unused_webpack_module, __webpack_exports__, __nccwpck_require__) => {


// EXPORTS
__nccwpck_require__.d(__webpack_exports__, {
"B": () => (/* binding */ cleanupOutdatedBranches),
"g": () => (/* binding */ cleanupOutdatedReports)
});

// EXTERNAL MODULE: external "path"
var external_path_ = __nccwpck_require__(1017);
// EXTERNAL MODULE: external "fs/promises"
var promises_ = __nccwpck_require__(3292);
;// CONCATENATED MODULE: external "child_process"
const external_child_process_namespaceObject = __WEBPACK_EXTERNAL_createRequire(import.meta.url)("child_process");
;// CONCATENATED MODULE: ./src/spawnProcess.ts

const logError = (err, output) => {
console.log(output.join(''));
return err;
};
const spawnProcess = async (command, args, cwd) => {
const childProcess = external_child_process_namespaceObject.spawn(command, args, { cwd });
return new Promise((resolve, reject) => {
const output = [];
const r1 = childProcess.stdout?.on('data', (d) => output.push(d.toString()));
const r2 = childProcess.stderr?.on('data', (d) => output.push(d.toString()));
const p1 = new Promise((resolve) => (r1 ? r1.once('close', resolve) : resolve()));
const p2 = new Promise((resolve) => (r2 ? r2.once('close', resolve) : resolve()));
childProcess.once('error', (err) => reject(logError(err, output)));
childProcess.once('exit', async (code) => {
r1?.removeAllListeners('data');
r2?.removeAllListeners('data');
await p1;
await p2;
return code === 0 ? resolve(output.join('')) : reject(logError(code, output));
});
});
};

// EXTERNAL MODULE: ./src/helpers.ts
var helpers = __nccwpck_require__(3015);
;// CONCATENATED MODULE: ./src/cleanup.ts




const cleanupOutdatedBranches = async (ghPagesBaseDir) => {
try {
const prefix = 'refs/heads/';
const lsRemote = await spawnProcess('git', ['ls-remote', '--heads']);
const remoteBranches = lsRemote
.split('\n')
.filter((l) => l.includes(prefix))
.map((l) => (0,helpers/* normalizeBranchName */.i)(l.split(prefix)[1]));
const localBranches = (await promises_.readdir(ghPagesBaseDir, { withFileTypes: true })).filter((d) => d.isDirectory()).map((d) => d.name);
for (const localBranch of localBranches) {
if (!remoteBranches.includes(localBranch)) {
await promises_.rm(external_path_.join(ghPagesBaseDir, localBranch), { recursive: true, force: true });
}
}
}
catch (err) {
console.error('cleanup outdated branches failed.', err);
}
};
const cleanupOutdatedReports = async (ghPagesBaseDir, maxReports) => {
try {
const localBranches = (await promises_.readdir(ghPagesBaseDir, { withFileTypes: true })).filter((d) => d.isDirectory()).map((d) => d.name);
// branches
for (const localBranch of localBranches) {
const reports = (await promises_.readdir(external_path_.join(ghPagesBaseDir, localBranch), { withFileTypes: true }))
.filter((d) => d.isDirectory())
.map((d) => d.name);
// report per branch
for (const reportName of reports) {
const runs = (await promises_.readdir(external_path_.join(ghPagesBaseDir, localBranch, reportName), { withFileTypes: true }))
.filter((d) => d.isDirectory())
.map((d) => d.name);
// run per report
if (runs.length > maxReports) {
runs.sort();
while (runs.length > maxReports) {
await promises_.rm(external_path_.join(ghPagesBaseDir, localBranch, reportName, runs.shift()), {
recursive: true,
force: true,
});
}
}
}
}
}
catch (err) {
console.error('cleanup outdated reports failed.', err);
}
};


/***/ }),

/***/ 7925:
Expand Down Expand Up @@ -38788,11 +38912,13 @@ const csvReport = async (sourceReportDir, reportBaseDir, reportId, meta) => {
/***/ ((__unused_webpack_module, __webpack_exports__, __nccwpck_require__) => {

/* harmony export */ __nccwpck_require__.d(__webpack_exports__, {
/* harmony export */ "L": () => (/* binding */ getBranchName)
/* harmony export */ "L": () => (/* binding */ getBranchName),
/* harmony export */ "i": () => (/* binding */ normalizeBranchName)
/* harmony export */ });
const normalizeBranchName = (branchName) => branchName.replaceAll('/', '_').replaceAll('.', '_');
const getBranchName = (gitRef, pull_request) => {
const branchName = pull_request ? pull_request.head.ref : gitRef.replace('refs/heads/', '');
return branchName.replaceAll('/', '_').replaceAll('.', '_');
return normalizeBranchName(branchName);
};


Expand Down
30 changes: 25 additions & 5 deletions index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ import { csvReport } from './src/csvReport.js'
import { isFileExist } from './src/isFileExists.js'
import { shouldWriteRootHtml, writeFolderListing } from './src/writeFolderListing.js'
import { getBranchName } from './src/helpers.js'
import { cleanupOutdatedBranches, cleanupOutdatedReports } from './src/cleanup.js'

const baseDir = 'report-action'

Expand All @@ -18,8 +19,12 @@ try {
const reportId = core.getInput('report_id')
const reportType = core.getInput('report_type')
const listDirs = core.getInput('list_dirs') == 'true'
const listDirsBranch = core.getInput('list_dirs_branch') == 'true'
const branchCleanupEnabled = core.getInput('branch_cleanup_enabled') == 'true'
const maxReports = parseInt(core.getInput('max_reports'), 10)
const branchName = getBranchName(github.context.ref, github.context.payload.pull_request)
const reportBaseDir = path.join(ghPagesPath, baseDir, branchName, reportId)
const ghPagesBaseDir = path.join(ghPagesPath, baseDir)
const reportBaseDir = path.join(ghPagesBaseDir, branchName, reportId)

/**
* `runId` is unique but won't change on job re-run
Expand All @@ -31,10 +36,10 @@ try {

// urls
const ghPagesUrl = `https://${github.context.repo.owner}.github.io/${github.context.repo.repo}`
const ghPagesBaseDir = `${ghPagesUrl}/${baseDir}/${branchName}/${reportId}`.replaceAll(' ', '%20')
const ghPagesReportDir = `${ghPagesBaseDir}/${runUniqueId}`.replaceAll(' ', '%20')
const ghPagesBaseUrl = `${ghPagesUrl}/${baseDir}/${branchName}/${reportId}`.replaceAll(' ', '%20')
const ghPagesReportUrl = `${ghPagesBaseUrl}/${runUniqueId}`.replaceAll(' ', '%20')

const reportUrl = reportType === 'csv' ? ghPagesBaseDir : ghPagesReportDir
const reportUrl = reportType === 'csv' ? ghPagesBaseUrl : ghPagesReportUrl

// log
console.log({
Expand All @@ -49,6 +54,9 @@ try {
reportDir,
report_url: reportUrl,
listDirs,
listDirsBranch,
branchCleanupEnabled,
maxReports,
})

if (!(await isFileExist(ghPagesPath))) {
Expand Down Expand Up @@ -77,6 +85,9 @@ try {
await writeFolderListing(ghPagesPath, '.')
}
await writeFolderListing(ghPagesPath, baseDir)
}
if (listDirsBranch) {
await writeFolderListing(ghPagesPath, path.join(baseDir, branchName))
await writeFolderListing(ghPagesPath, path.join(baseDir, branchName))
if (reportType === 'html') {
await writeFolderListing(ghPagesPath, path.join(baseDir, branchName, reportId))
Expand All @@ -85,7 +96,16 @@ try {

// outputs
core.setOutput('report_url', reportUrl)
core.setOutput('report_history_url', ghPagesBaseDir)
core.setOutput('report_history_url', ghPagesBaseUrl)
core.setOutput('run_unique_id', runUniqueId)
core.setOutput('report_path', reportDir)

if (branchCleanupEnabled) {
await cleanupOutdatedBranches(ghPagesBaseDir)
}
if (maxReports > 0) {
await cleanupOutdatedReports(ghPagesBaseDir, maxReports)
}
} catch (error) {
core.setFailed(error.message)
}
Loading

0 comments on commit a13bcfd

Please sign in to comment.