diff --git a/CHANGELOG.md b/CHANGELOG.md index 893bb7f..6397aaf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,17 @@ # test-runner - Changelog +## 3.1.0 - 2023-11-24 + +* Format millisecond/second time values in reports as numbers. + * For JSON: `"time": "12030 ms"` is now `"time": 12030`. + * For JUnit XML: `value="12.03 s"` is now `value="12.03"`. +* Add elapsed time to test run status logging. +* Add `rerunExecutionTime` value to main test reports. + * This is sum of all the re-run `RunTime`s. Similar to `testExecutionTime`. +* Remove spaces in class time CSV report. +* Add output of class time report to JSON. +* Optimise missing test re-run request payload when requesting all tests in a class. + ## 3.0.0 - 2023-10-23 * **BREAKING**: `AsyncTestRunner` now does not re-throw errors. Instead it returns a `TestRunnerResult` type which includes all test results retrieved and optional error. diff --git a/package.json b/package.json index 40e1724..64a253a 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@apexdevtools/test-runner", - "version": "3.0.0", + "version": "3.1.0", "description": "Apex parallel test runner with reliability goodness", "author": { "name": "Apex Dev Tools Team", @@ -37,7 +37,7 @@ "url": "https://github.com/apex-dev-tools/test-runner/issues" }, "homepage": "https://github.com/apex-dev-tools/test-runner#readme", - "packageManager": "pnpm@8.2.0", + "packageManager": "pnpm@8.9.2", "dependencies": { "@apexdevtools/sfdx-auth-helper": "^2.1.0", "@salesforce/apex-node": "^1.6.2", diff --git a/src/command/Testall.ts b/src/command/Testall.ts index 31d3707..c4d07dc 100644 --- a/src/command/Testall.ts +++ b/src/command/Testall.ts @@ -194,20 +194,18 @@ export class Testall { // Filter expected by actual results to find residual // Try again if something was missed - const missingTests = await this.resolveMissingTests( - expectedTestsPromise, - store.tests - ); + const expectedTests = await expectedTestsPromise; + const missingTests = this.resolveMissingTests(expectedTests, store.tests); if (missingTests.size > 0) { this._logger.logTestallRerun(missingTests); const testItems: TestItem[] = Array.from( missingTests, - ([className, methods]) => ({ - className, - testMethods: Array.from(methods), - }) + ([className, methods]) => + methods.size === expectedTests.get(className)?.size + ? { className } // run all methods + : { className, testMethods: Array.from(methods) } ); await this.asyncRun( @@ -219,11 +217,10 @@ export class Testall { } } - private async resolveMissingTests( - expectedTestsPromise: Promise>>, + private resolveMissingTests( + expectedTests: Map>, results: Map - ): Promise>> { - const expectedTests = await expectedTestsPromise; + ): Map> { const missingTests = new Map>(); expectedTests.forEach((methods, className) => { @@ -314,7 +311,7 @@ export class Testall { return this.convertToSyncResult(result, timestamp); } catch (err) { - this._logger.logMessage( + this._logger.logErrorMessage( `${getTestName(currentResult)} re-run failed. ${this.getErrorMsg(err)}` ); } @@ -383,7 +380,7 @@ export class Testall { store.saveCoverage(coverage); } catch (err) { - this._logger.logMessage( + this._logger.logErrorMessage( `Failed to get coverage: ${this.getErrorMsg(err)}` ); } diff --git a/src/log/BaseLogger.ts b/src/log/BaseLogger.ts index 7de1fe1..8b78297 100644 --- a/src/log/BaseLogger.ts +++ b/src/log/BaseLogger.ts @@ -3,6 +3,7 @@ */ import path from 'path'; +import os from 'os'; import { TestallOptions, getMaxErrorsForReRun } from '../command/Testall'; import { ApexTestResult, BaseTestResult } from '../model/ApexTestResult'; import { ApexTestRunResult } from '../model/ApexTestRunResult'; @@ -26,20 +27,20 @@ export abstract class BaseLogger implements Logger { logError(error: MaybeError): void { if (error instanceof Error) { if (error.name == 'ALREADY_IN_PROCESS') { - this.logMessage( + this.logErrorMessage( "One or more of the tests is already queued to run, they can't be requeued" ); } else { - this.logMessage(error.message); + this.logErrorMessage(error.message); if (error.stack !== undefined) - this.logMessage('Error stack: ' + error.stack); + this.logErrorMessage('Error stack: ' + error.stack); } } else { - this.logMessage('Error: ' + JSON.stringify(error)); + this.logErrorMessage('Error: ' + JSON.stringify(error)); } if (error.data !== undefined) { - this.logMessage('Additional data: ' + JSON.stringify(error.data)); + this.logErrorMessage('Additional data: ' + JSON.stringify(error.data)); } } @@ -47,6 +48,10 @@ export abstract class BaseLogger implements Logger { this.logMessage('Warning: ' + message); } + logErrorMessage(message: string): void { + this.logMessage(message); + } + logOutputFile(filepath: string, contents: string): void { // if filepath is absolute it will be used instead // given resolve() right to left logic @@ -106,10 +111,12 @@ export abstract class BaseLogger implements Logger { // i.e its failed with a different message, show what happened if (rerunMsg && firstMsg) { if (rerunMsg !== firstMsg) { - this.logMessage(` [Before] ${firstMsg}`); - this.logMessage(` [After] ${rerunMsg}`); + this.logErrorMessage(`${os.EOL} [Before] ${firstMsg}`); + this.logErrorMessage(` [After] ${rerunMsg}${os.EOL}`); } else { - this.logMessage(` [Before and After] ${rerunMsg}`); + this.logErrorMessage( + `${os.EOL} [Before and After] ${rerunMsg}${os.EOL}` + ); } } } @@ -134,7 +141,11 @@ export abstract class BaseLogger implements Logger { ); } - logStatus(testRunResult: ApexTestRunResult, tests: ApexTestResult[]): void { + logStatus( + testRunResult: ApexTestRunResult, + tests: ApexTestResult[], + elapsedTime: string + ): void { const status = testRunResult.Status; const outcomes = groupByOutcome(tests); const completed = tests.length; @@ -144,7 +155,7 @@ export abstract class BaseLogger implements Logger { const complete = total > 0 ? Math.floor((completed * 100) / total) : 0; this.logMessage( - `[${status}] Passed: ${passed} | Failed: ${failed} | ${completed}/${total} Complete (${complete}%)` + `${elapsedTime} [${status}] Passed: ${passed} | Failed: ${failed} | ${completed}/${total} Complete (${complete}%)` ); } @@ -159,16 +170,22 @@ export abstract class BaseLogger implements Logger { Object.entries(failedResultsByClassId).forEach(([, results]) => { const tests = results.slice(0, 2); + const hasMore = results.length > 2; - this.logMessage(` Failing Tests: ${getClassName(tests[0])}`); + this.logErrorMessage( + `${os.EOL} Failing tests in '${getClassName(tests[0])}':` + ); - tests.forEach(t => { + tests.forEach((t, i) => { const msg = t.Message ? ` - ${t.Message}` : ''; - this.logMessage(` * ${t.MethodName}${msg}`); + const suffix = !hasMore && i == tests.length - 1 ? os.EOL : ''; + this.logErrorMessage(` * ${t.MethodName}${msg}${suffix}`); }); - results.length > 2 && - this.logMessage(` (and ${results.length - 2} more...)`); + hasMore && + this.logErrorMessage( + ` (and ${results.length - 2} more...)${os.EOL}` + ); }); } diff --git a/src/log/Logger.ts b/src/log/Logger.ts index fef6efb..1455ff3 100644 --- a/src/log/Logger.ts +++ b/src/log/Logger.ts @@ -13,6 +13,7 @@ export interface Logger { // For general use logError(error: any): void; + logErrorMessage(message: any): void; logWarning(message: any): void; logMessage(message: any): void; @@ -34,7 +35,11 @@ export interface Logger { // Test runner logRunStarted(testRunId: string): void; logNoProgress(testRunId: string): void; - logStatus(status: ApexTestRunResult, tests: ApexTestResult[]): void; + logStatus( + status: ApexTestRunResult, + tests: ApexTestResult[], + elapsedTime: string + ): void; logTestFailures(newResults: ApexTestResult[]): void; // Test job cancelling diff --git a/src/results/ClassTimeGenerator.ts b/src/results/ClassTimeGenerator.ts index 88c3b07..d381bcf 100644 --- a/src/results/ClassTimeGenerator.ts +++ b/src/results/ClassTimeGenerator.ts @@ -8,7 +8,7 @@ import { SfDate } from 'jsforce'; import path from 'path'; /* - * Create a report (CSV) of summary stats for each test class. The report can be useful in finding long running + * Create a report (CSV/JSON) of summary stats for each test class. The report can be useful in finding long running * test which are delaying the completion of a test run. */ export class ClassTimeGenerator implements OutputGenerator { @@ -52,13 +52,33 @@ export class ClassTimeGenerator implements OutputGenerator { // Report results as CSV const lines: string[] = []; classRanges.forEach((v, k) => { - lines.push(`${k}, ${v[0]}, ${v[1]}, ${v[2]}`); + lines.push(`${k},${v[0]},${v[1]},${v[2]}`); }); logger.logOutputFile( path.join(outputDirBase, fileName + '-time.csv'), - 'ClassName, StartTime, EndTime, TotalTime\n' + + 'ClassName,StartTime,EndTime,TotalTime\n' + `# ${this.instanceUrl} ${this.orgId} ${this.username}\n` + lines.join('\n') ); + + // Report results as json + const json: { + className: string; + startTime: number; + endTime: number; + totalTime: number; + }[] = []; + classRanges.forEach((v, k) => { + json.push({ + className: k, + startTime: v[0], + endTime: v[1], + totalTime: v[2], + }); + }); + logger.logOutputFile( + path.join(outputDirBase, fileName + '-time.json'), + JSON.stringify(json, undefined, 2) + ); } } diff --git a/src/results/ReportGenerator.ts b/src/results/ReportGenerator.ts index 08b4341..7892b19 100644 --- a/src/results/ReportGenerator.ts +++ b/src/results/ReportGenerator.ts @@ -15,7 +15,7 @@ import path from 'path'; import { Logger } from '../log/Logger'; import { ApexTestResult } from '../model/ApexTestResult'; import { ApexTestRunResult } from '../model/ApexTestRunResult'; -import { OutputGenerator, TestRunSummary } from './OutputGenerator'; +import { OutputGenerator, TestRerun, TestRunSummary } from './OutputGenerator'; export class ReportGenerator implements OutputGenerator { private instanceUrl: string; @@ -41,9 +41,9 @@ export class ReportGenerator implements OutputGenerator { fileName: string, runSummary: TestRunSummary ): void { - const { startTime, testResults, runResult } = runSummary; + const { startTime, testResults, runResult, reruns } = runSummary; const results = testResults as ExtendedApexTestResult[]; - const summary = this.summary(startTime, results, runResult); + const summary = this.summary(startTime, results, runResult, reruns); logger.logOutputFile( path.join(outputDirBase, fileName + '.xml'), this.generateJunit(summary, results) @@ -57,7 +57,8 @@ export class ReportGenerator implements OutputGenerator { summary( startTime: Date, testResults: ExtendedApexTestResult[], - runResults: ApexTestRunResult + runResults: ApexTestRunResult, + reruns: TestRerun[] ): SummaryData { // combine test and method names for fullname testResults.forEach(test => { @@ -83,18 +84,14 @@ export class ReportGenerator implements OutputGenerator { const failures = testResults.filter( test => test.Outcome !== 'Pass' && test.Outcome !== 'Skip' ); - const totalFailed = failures.length; + const failing = failures.length; const skips = testResults.filter(test => test.Outcome === 'Skip'); - const totalSkipped = skips.length; - const total = testResults.length; - const outcome = totalFailed > 0 ? 'Failed' : 'Passed'; - const totalPassed = total - totalFailed - totalSkipped; - const passRate = `${((totalPassed / (total - totalSkipped)) * 100).toFixed( - 2 - )}%`; - const failRate = `${((totalFailed / (total - totalSkipped)) * 100).toFixed( - 2 - )}%`; + const skipped = skips.length; + const testsRan = testResults.length; + const outcome = failing > 0 ? 'Failed' : 'Passed'; + const passing = testsRan - failing - skipped; + const passRate = `${((passing / (testsRan - skipped)) * 100).toFixed(2)}%`; + const failRate = `${((failing / (testsRan - skipped)) * 100).toFixed(2)}%`; // time of cmd invocation const commandTime = moment().diff(moment(startTime), 'millisecond', true); @@ -110,19 +107,24 @@ export class ReportGenerator implements OutputGenerator { (result, test) => result + test.RunTime, 0 ); + const rerunExecutionTime = reruns.reduce( + (result, rerun) => result + rerun.after.RunTime, + 0 + ); return { - outcome: outcome, - testsRan: total, - passing: totalPassed, - failing: totalFailed, - skipped: totalSkipped, - passRate: passRate, - failRate: failRate, - testStartTime: testStartTime, - testExecutionTime: testExecutionTime, - testTotalTime: testTotalTime, - commandTime: commandTime, + outcome, + testsRan, + passing, + failing, + skipped, + passRate, + failRate, + testStartTime, + testExecutionTime, + testTotalTime, + commandTime, + rerunExecutionTime, hostname: this.instanceUrl, orgId: this.orgId, username: this.username, @@ -163,13 +165,16 @@ export class ReportGenerator implements OutputGenerator { )}"/>\n`; junit += ` \n`; + )}"/>\n`; junit += ` \n`; + )}"/>\n`; junit += ` \n`; + )}"/>\n`; + junit += ` \n`; junit += ` \n`; junit += ` \n`; junit += ` \n`; @@ -209,16 +214,17 @@ export class ReportGenerator implements OutputGenerator { json += ' "summary": {\n'; json += ` "outcome": "${summary.outcome}",\n`; - json += ` "testsRan": "${summary.testsRan}",\n`; - json += ` "passing": "${summary.passing}",\n`; - json += ` "failing": "${summary.failing}",\n`; - json += ` "skipped": "${summary.skipped}",\n`; + json += ` "testsRan": ${summary.testsRan},\n`; + json += ` "passing": ${summary.passing},\n`; + json += ` "failing": ${summary.failing},\n`; + json += ` "skipped": ${summary.skipped},\n`; json += ` "passRate": "${summary.passRate}",\n`; json += ` "failRate": "${summary.failRate}",\n`; json += ` "testStartTime": "${summary.testStartTime.format('lll')}",\n`; - json += ` "testExecutionTime": "${summary.testExecutionTime} ms",\n`; - json += ` "testTotalTime": "${summary.testTotalTime} ms",\n`; - json += ` "commandTime": "${summary.commandTime} ms",\n`; + json += ` "testExecutionTime": ${summary.testExecutionTime},\n`; + json += ` "testTotalTime": ${summary.testTotalTime},\n`; + json += ` "commandTime": ${summary.commandTime},\n`; + json += ` "rerunExecutionTime": ${summary.rerunExecutionTime},\n`; json += ` "hostname": "${summary.hostname}",\n`; json += ` "orgId": "${summary.orgId}",\n`; json += ` "username": "${summary.username}",\n`; @@ -286,6 +292,7 @@ interface SummaryData { testExecutionTime: number; // ms testTotalTime: number; // ms commandTime: number; //ms + rerunExecutionTime: number; hostname: string; orgId: string; username: string; diff --git a/src/runner/TestRunner.ts b/src/runner/TestRunner.ts index ff06cff..47a1f7c 100644 --- a/src/runner/TestRunner.ts +++ b/src/runner/TestRunner.ts @@ -11,6 +11,7 @@ import { AsyncTestArrayConfiguration, TestItem, } from '@salesforce/apex-node'; +import moment, { Moment } from 'moment'; import { Logger } from '../log/Logger'; import { ApexTestRunResult, @@ -204,6 +205,7 @@ export class AsyncTestRunner implements TestRunner { ): Promise { let seenTests: Set = new Set(); let lastResult: TestRunnerResult | undefined; + const start = moment(); const testRunStatus: Pollable = { pollDelay: getStatusPollInterval(this._options).milliseconds, @@ -214,7 +216,7 @@ export class AsyncTestRunner implements TestRunner { const run = await this.testRunResult(testRunId); const tests = await this.testResults(testRunId); - await this.updateProgress(run, tests); + await this.updateProgress(run, tests, start); seenTests = this.notifyNewResults(tests, seenTests); return (lastResult = { @@ -271,9 +273,11 @@ export class AsyncTestRunner implements TestRunner { private async updateProgress( testRunResult: ApexTestRunResult, - results: ApexTestResult[] + results: ApexTestResult[], + startTime: Moment ): Promise { - this._logger.logStatus(testRunResult, results); + const time = moment.utc(moment().diff(startTime)).format('HH:mm:ss'); + this._logger.logStatus(testRunResult, results, time); this._stats = this._stats.update(results.length); if (this._logger.verbose) { diff --git a/test/Setup.ts b/test/Setup.ts index 66d84fc..3ae4730 100644 --- a/test/Setup.ts +++ b/test/Setup.ts @@ -40,6 +40,8 @@ export const defaultTestInfo = { export const isoDateFormat = '[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}.[0-9]{3}Z'; +export const timeFormat = '[0-9]{1,2}:[0-9]{2}:[0-9]{2}'; + export const timeoutMs = Duration.minutes(120).milliseconds; export function mockSetTimeout(sandbox: SinonSandbox, testTimeoutMs = 50) { @@ -82,7 +84,7 @@ export async function createMockConnection( } export function logRegex(entry: string): RegExp { - return new RegExp(`^${isoDateFormat} - ${entry}$`, 'gm'); + return new RegExp(`^${isoDateFormat} -\\s*${entry}\\s*$`, 'gm'); } export function createQueryHelper( diff --git a/test/log/BaseLogger.spec.ts b/test/log/BaseLogger.spec.ts index 404b2b2..723c978 100644 --- a/test/log/BaseLogger.spec.ts +++ b/test/log/BaseLogger.spec.ts @@ -29,7 +29,8 @@ describe('BaseLogger', () => { const logger = new CapturingLogger(); logger.logStatus( mockTestRunResult as ApexTestRunResult, - mockTestResults as ApexTestResult[] + mockTestResults as ApexTestResult[], + '' ); expect(logger.entries.length).to.equal(1); diff --git a/test/report/ClassTimeGenerator.spec.ts b/test/report/ClassTimeGenerator.spec.ts index 51b1b9e..95f7109 100644 --- a/test/report/ClassTimeGenerator.spec.ts +++ b/test/report/ClassTimeGenerator.spec.ts @@ -8,7 +8,7 @@ import { CapturingLogger } from '../../src/log/CapturingLogger'; import { ClassTimeGenerator } from '../../src/results/ClassTimeGenerator'; describe('ClassTimeGenerator', () => { - it('should create csv output', () => { + it('should create csv and json output', () => { const now = Date.now(); const generator = new ClassTimeGenerator( 'instanceUrl', @@ -102,16 +102,18 @@ describe('ClassTimeGenerator', () => { reruns: [], }); - expect(logger.files.length).to.equal(1); + expect(logger.files.length).to.equal(2); expect(logger.files[0][0]).to.equal('/test-output-time.csv'); expect(logger.files[0][1].length).not.to.equal(0); + expect(logger.files[1][0]).to.equal('/test-output-time.json'); + expect(logger.files[1][1].length).not.to.equal(0); const lines = logger.files[0][1].split('\n'); expect(lines.length).to.equal(5); - expect(lines[0]).to.equal('ClassName, StartTime, EndTime, TotalTime'); + expect(lines[0]).to.equal('ClassName,StartTime,EndTime,TotalTime'); expect(lines[1]).to.equal('# instanceUrl orgId username'); - expect(lines[2]).to.match(/Class1, [0-9]*, [0-9]*, 10/); - expect(lines[3]).to.match(/Class3, [0-9]*, [0-9]*, 40/); - expect(lines[4]).to.match(/Class2, [0-9]*, [0-9]*, 20/); + expect(lines[2]).to.match(/Class1,[0-9]*,[0-9]*,10/); + expect(lines[3]).to.match(/Class3,[0-9]*,[0-9]*,40/); + expect(lines[4]).to.match(/Class2,[0-9]*,[0-9]*,20/); }); }); diff --git a/test/runner/TestRunner.spec.ts b/test/runner/TestRunner.spec.ts index a83a88d..4dde624 100644 --- a/test/runner/TestRunner.spec.ts +++ b/test/runner/TestRunner.spec.ts @@ -32,6 +32,7 @@ import { setupMultipleQueryApexTestResults, setupQueryApexTestResults, testRunId, + timeFormat, } from '../Setup'; describe('TestRunner', () => { @@ -118,12 +119,12 @@ describe('TestRunner', () => { ); expect(logger.entries[1]).to.match( logRegex( - '\\[Completed\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)' + `${timeFormat} \\[Completed\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)` ) ); - expect(logger.entries[2]).to.match(logRegex('\\s*Failing Tests: Class3')); + expect(logger.entries[2]).to.match(logRegex("Failing tests in 'Class3':")); expect(logger.entries[3]).to.match( - logRegex('\\s*\\* Method2 - Exception: Test Failed') + logRegex('\\* Method2 - Exception: Test Failed') ); }); @@ -151,12 +152,12 @@ describe('TestRunner', () => { ); expect(logger.entries[1]).to.match( logRegex( - '\\[Completed\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)' + `${timeFormat} \\[Completed\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)` ) ); - expect(logger.entries[2]).to.match(logRegex('\\s*Failing Tests: Class3')); + expect(logger.entries[2]).to.match(logRegex("Failing tests in 'Class3':")); expect(logger.entries[3]).to.match( - logRegex('\\s*\\* Method2 - Exception: Test Failed') + logRegex('\\* Method2 - Exception: Test Failed') ); }); @@ -190,12 +191,12 @@ describe('TestRunner', () => { ); expect(logger.entries[1]).to.match( logRegex( - '\\[Failed\\] Passed: 0 \\| Failed: 1 \\| 1/2 Complete \\(50%\\)' + `${timeFormat} \\[Failed\\] Passed: 0 \\| Failed: 1 \\| 1/2 Complete \\(50%\\)` ) ); - expect(logger.entries[2]).to.match(logRegex('\\s*Failing Tests: Class3')); + expect(logger.entries[2]).to.match(logRegex("Failing tests in 'Class3':")); expect(logger.entries[3]).to.match( - logRegex('\\s*\\* Method2 - Exception: Test Failed') + logRegex('\\* Method2 - Exception: Test Failed') ); }); @@ -227,7 +228,7 @@ describe('TestRunner', () => { ); expect(logger.entries[1]).to.match( logRegex( - '\\[Aborted\\] Passed: 0 \\| Failed: 0 \\| 0/2 Complete \\(0%\\)' + `${timeFormat} \\[Aborted\\] Passed: 0 \\| Failed: 0 \\| 0/2 Complete \\(0%\\)` ) ); }); @@ -331,21 +332,21 @@ describe('TestRunner', () => { ); expect(logger.entries[1]).to.match( logRegex( - '\\[Processing\\] Passed: 1 \\| Failed: 0 \\| 1/2 Complete \\(50%\\)' + `${timeFormat} \\[Processing\\] Passed: 1 \\| Failed: 0 \\| 1/2 Complete \\(50%\\)` ) ); expect(logger.entries[2]).to.match( logRegex( - '\\[Processing\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)' + `${timeFormat} \\[Processing\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)` ) ); - expect(logger.entries[3]).to.match(logRegex('\\s*Failing Tests: Class3')); + expect(logger.entries[3]).to.match(logRegex("Failing tests in 'Class3':")); expect(logger.entries[4]).to.match( - logRegex('\\s*\\* Method2 - Exception: Test Failed') + logRegex('\\* Method2 - Exception: Test Failed') ); expect(logger.entries[5]).to.match( logRegex( - '\\[Completed\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)' + `${timeFormat} \\[Completed\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)` ) ); }); @@ -424,16 +425,16 @@ describe('TestRunner', () => { ); expect(logger.entries[1]).to.match( logRegex( - '\\[Processing\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)' + `${timeFormat} \\[Processing\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)` ) ); - expect(logger.entries[2]).to.match(logRegex('\\s*Failing Tests: Class3')); + expect(logger.entries[2]).to.match(logRegex("Failing tests in 'Class3':")); expect(logger.entries[3]).to.match( - logRegex('\\s*\\* Method2 - Exception: Test Failed') + logRegex('\\* Method2 - Exception: Test Failed') ); expect(logger.entries[4]).to.match( logRegex( - '\\[Processing\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)' + `${timeFormat} \\[Processing\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)` ) ); expect(logger.entries[5]).to.match( @@ -446,7 +447,7 @@ describe('TestRunner', () => { ); expect(logger.entries[7]).to.match( logRegex( - '\\[Completed\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)' + `${timeFormat} \\[Completed\\] Passed: 1 \\| Failed: 1 \\| 2/2 Complete \\(100%\\)` ) ); }); @@ -522,26 +523,26 @@ describe('TestRunner', () => { ); expect(logger.entries[1]).to.match( logRegex( - '\\[Processing\\] Passed: 1 \\| Failed: 0 \\| 1/3 Complete \\(33%\\)' + `${timeFormat} \\[Processing\\] Passed: 1 \\| Failed: 0 \\| 1/3 Complete \\(33%\\)` ) ); expect(logger.entries[2]).to.match( logRegex( - '\\[Processing\\] Passed: 1 \\| Failed: 1 \\| 2/3 Complete \\(66%\\)' + `${timeFormat} \\[Processing\\] Passed: 1 \\| Failed: 1 \\| 2/3 Complete \\(66%\\)` ) ); - expect(logger.entries[3]).to.match(logRegex('\\s*Failing Tests: Class3')); + expect(logger.entries[3]).to.match(logRegex("Failing tests in 'Class3':")); expect(logger.entries[4]).to.match( - logRegex('\\s*\\* Method2 - Exception: Test Failed') + logRegex('\\* Method2 - Exception: Test Failed') ); expect(logger.entries[5]).to.match( logRegex( - '\\[Processing\\] Passed: 2 \\| Failed: 1 \\| 3/3 Complete \\(100%\\)' + `${timeFormat} \\[Processing\\] Passed: 2 \\| Failed: 1 \\| 3/3 Complete \\(100%\\)` ) ); expect(logger.entries[6]).to.match( logRegex( - '\\[Completed\\] Passed: 2 \\| Failed: 1 \\| 3/3 Complete \\(100%\\)' + `${timeFormat} \\[Completed\\] Passed: 2 \\| Failed: 1 \\| 3/3 Complete \\(100%\\)` ) ); });