Skip to content

Commit

Permalink
[Reporting] Upgrade Puppeteer dependency to 10.2.0 (#115682) (#115864)
Browse files Browse the repository at this point in the history
* [Reporting] Upgrade Puppeteer dependency to 10.2.0

* Update x-pack/plugins/reporting/server/browsers/chromium/paths.ts

* self-edit

* Apply suggestions from code review

Co-authored-by: Michael Dokolin <dokmic@gmail.com>

* fix lint

Co-authored-by: Michael Dokolin <dokmic@gmail.com>

Co-authored-by: Michael Dokolin <dokmic@gmail.com>
  • Loading branch information
tsullivan and dokmic authored Oct 21, 2021
1 parent 7d6dc80 commit 50d97ca
Show file tree
Hide file tree
Showing 11 changed files with 165 additions and 96 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -315,7 +315,7 @@
"prop-types": "^15.7.2",
"proxy-from-env": "1.0.0",
"puid": "1.0.7",
"puppeteer": "^8.0.0",
"puppeteer": "^10.2.0",
"query-string": "^6.13.2",
"random-word-slugs": "^0.0.5",
"raw-loader": "^3.1.0",
Expand Down
10 changes: 9 additions & 1 deletion x-pack/build_chromium/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -99,7 +99,15 @@ are created in x64 using cross-compiling. CentOS is not supported for building C
## Artifacts

After the build completes, there will be a .zip file and a .md5 file in `~/chromium/chromium/src/out/headless`. These are named like so: `chromium-{first_7_of_SHA}-{platform}-{arch}`, for example: `chromium-4747cc2-linux-x64`.
The zip files and md5 files are copied to a staging bucket in GCP storage.
The zip files and md5 files are copied to a **staging** bucket in GCP storage.

To publish the built artifacts for bunding in Kibana, copy the files from the `headless_shell_staging` bucket to the `headless_shell` bucket.
```
gsutil cp gs://headless_shell_staging/chromium-d163fd7-linux_arm64.md5 gs://headless_shell/
gsutil cp gs://headless_shell_staging/chromium-d163fd7-linux_arm64.zip gs://headless_shell/
```

IMPORTANT: Do not replace builds in the `headless_shell` bucket that are referenced in an active Kibana branch. CI tests on that branch will fail since the archive checksum no longer matches the original version.

## Testing
Search the Puppeteer Github repo for known issues that could affect our use case, and make sure to test anywhere that is affected.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -265,7 +265,7 @@ export class HeadlessChromiumDriver {
}

// @ts-ignore
// FIXME: use `await page.target().createCDPSession();`
// FIXME: retrieve the client in open() and pass in the client
const client = this.page._client;

// We have to reach into the Chrome Devtools Protocol to apply headers as using
Expand Down Expand Up @@ -372,7 +372,6 @@ export class HeadlessChromiumDriver {

await client.send('Debugger.enable');
await client.send('Debugger.pause');
// @ts-ignore
const targetId = target._targetId;
const wsEndpoint = this.page.browser().wsEndpoint();
const { port } = parseUrl(wsEndpoint);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,18 +25,6 @@ import { HeadlessChromiumDriver } from '../driver';
import { args } from './args';
import { getMetrics, Metrics } from './metrics';

// Puppeteer type definitions do not match the documentation.
// See https://pptr.dev/#?product=Puppeteer&version=v8.0.0&show=api-puppeteerlaunchoptions
interface ReportingLaunchOptions extends puppeteer.LaunchOptions {
userDataDir?: string;
ignoreHTTPSErrors?: boolean;
args?: string[];
}

declare module 'puppeteer' {
function launch(options: ReportingLaunchOptions): Promise<puppeteer.Browser>;
}

type BrowserConfig = CaptureConfig['browser']['chromium'];

export class HeadlessChromiumDriverFactory {
Expand Down
30 changes: 17 additions & 13 deletions x-pack/plugins/reporting/server/browsers/chromium/paths.ts
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,7 @@ interface PackageInfo {
archiveChecksum: string;
binaryChecksum: string;
binaryRelativePath: string;
revision: number;
}

enum BaseUrl {
Expand All @@ -32,8 +33,6 @@ interface CommonPackageInfo extends PackageInfo {
}

export class ChromiumArchivePaths {
public readonly revision = '856583';

public readonly packages: Array<CustomPackageInfo | CommonPackageInfo> = [
{
platform: 'darwin',
Expand All @@ -43,34 +42,38 @@ export class ChromiumArchivePaths {
binaryChecksum: 'dfcd6e007214175997663c50c8d871ea',
binaryRelativePath: 'headless_shell-darwin_x64/headless_shell',
location: 'custom',
revision: 856583,
},
{
platform: 'linux',
architecture: 'x64',
archiveFilename: 'chromium-d163fd7-linux_x64.zip',
archiveChecksum: 'fba0a240d409228a3494aef415c300fc',
binaryChecksum: '99cfab472d516038b94ef86649e52871',
archiveFilename: 'chromium-70f5d88-linux_x64.zip',
archiveChecksum: '7b1c9c2fb613444fbdf004a3b75a58df',
binaryChecksum: '82e80f9727a88ba3836ce230134bd126',
binaryRelativePath: 'headless_shell-linux_x64/headless_shell',
location: 'custom',
revision: 901912,
},
{
platform: 'linux',
architecture: 'arm64',
archiveFilename: 'chromium-d163fd7-linux_arm64.zip',
archiveChecksum: '29834735bc2f0e0d9134c33bc0580fb6',
binaryChecksum: '13baccf2e5c8385cb9d9588db6a9e2c2',
archiveFilename: 'chromium-70f5d88-linux_arm64.zip',
archiveChecksum: '4a0217cfe7da86ad1e3d0e9e5895ddb5',
binaryChecksum: '29e943fbee6d87a217abd6cb6747058e',
binaryRelativePath: 'headless_shell-linux_arm64/headless_shell',
location: 'custom',
revision: 901912,
},
{
platform: 'win32',
architecture: 'x64',
archiveFilename: 'chrome-win.zip',
archiveChecksum: '64999a384bfb6c96c50c4cb6810dbc05',
binaryChecksum: '13b8bbb4a12f9036b8cc3b57b3a71fec',
archiveChecksum: '861bb8b7b8406a6934a87d3cbbce61d9',
binaryChecksum: 'ffa0949471e1b9a57bc8f8633fca9c7b',
binaryRelativePath: 'chrome-win\\chrome.exe',
location: 'common',
archivePath: 'Win',
revision: 901912,
},
];

Expand All @@ -82,7 +85,8 @@ export class ChromiumArchivePaths {
}

public resolvePath(p: PackageInfo) {
return path.resolve(this.archivesPath, p.archiveFilename);
// adding architecture to the path allows it to download two binaries that have the same name, but are different architecture
return path.resolve(this.archivesPath, p.architecture, p.archiveFilename);
}

public getAllArchiveFilenames(): string[] {
Expand All @@ -91,9 +95,9 @@ export class ChromiumArchivePaths {

public getDownloadUrl(p: CustomPackageInfo | CommonPackageInfo) {
if (p.location === 'common') {
return `${BaseUrl.common}/${p.archivePath}/${this.revision}/${p.archiveFilename}`;
return `${BaseUrl.common}/${p.archivePath}/${p.revision}/${p.archiveFilename}`;
}
return BaseUrl.custom + '/' + p.archiveFilename;
return BaseUrl.custom + '/' + p.archiveFilename; // revision is not used for URL if package is a custom build
}

public getBinaryPath(p: PackageInfo) {
Expand Down
2 changes: 2 additions & 0 deletions x-pack/plugins/reporting/server/browsers/download/download.ts
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,8 @@ export async function download(
resolve();
});
});
} catch (err) {
throw new Error(`Unable to download ${url}: ${err}`);
} finally {
closeSync(handle);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@
* 2.0.
*/

import path from 'path';
import mockFs from 'mock-fs';
import { existsSync, readdirSync } from 'fs';
import { chromium } from '../chromium';
Expand All @@ -27,16 +28,16 @@ describe('ensureBrowserDownloaded', () => {
} as unknown as typeof logger;

(md5 as jest.MockedFunction<typeof md5>).mockImplementation(
async (path) =>
async (packagePath) =>
chromium.paths.packages.find(
(packageInfo) => chromium.paths.resolvePath(packageInfo) === path
(packageInfo) => chromium.paths.resolvePath(packageInfo) === packagePath
)?.archiveChecksum ?? 'some-md5'
);

(download as jest.MockedFunction<typeof download>).mockImplementation(
async (_url, path) =>
async (_url, packagePath) =>
chromium.paths.packages.find(
(packageInfo) => chromium.paths.resolvePath(packageInfo) === path
(packageInfo) => chromium.paths.resolvePath(packageInfo) === packagePath
)?.archiveChecksum ?? 'some-md5'
);

Expand Down Expand Up @@ -93,11 +94,19 @@ describe('ensureBrowserDownloaded', () => {
await ensureBrowserDownloaded(logger);

expect(download).not.toHaveBeenCalled();
expect(readdirSync(chromium.paths.archivesPath)).toEqual(
expect.arrayContaining(
chromium.paths.packages.map(({ archiveFilename }) => archiveFilename)
)
);
const paths = [
readdirSync(path.resolve(chromium.paths.archivesPath + '/x64')),
readdirSync(path.resolve(chromium.paths.archivesPath + '/arm64')),
];

expect(paths).toEqual([
expect.arrayContaining([
'chrome-win.zip',
'chromium-70f5d88-linux_x64.zip',
'chromium-d163fd7-darwin_x64.zip',
]),
expect.arrayContaining(['chromium-70f5d88-linux_arm64.zip']),
]);
});

it('should download again if md5 hash different', async () => {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,6 @@ import { download } from './download';
/**
* Check for the downloaded archive of each requested browser type and
* download them if they are missing or their checksum is invalid
* @return {Promise<undefined>}
*/
export async function ensureBrowserDownloaded(logger: GenericLevelLogger) {
await ensureDownloaded([chromium], logger);
Expand All @@ -25,41 +24,64 @@ export async function ensureBrowserDownloaded(logger: GenericLevelLogger) {
* Clears the unexpected files in the browsers archivesPath
* and ensures that all packages/archives are downloaded and
* that their checksums match the declared value
* @param {BrowserSpec} browsers
* @return {Promise<undefined>}
*/
async function ensureDownloaded(browsers: BrowserDownload[], logger: GenericLevelLogger) {
await Promise.all(
browsers.map(async ({ paths: pSet }) => {
(
await del(`${pSet.archivesPath}/**/*`, {
force: true,
ignore: pSet.getAllArchiveFilenames(),
})
).forEach((path) => logger.warning(`Deleting unexpected file ${path}`));
const removedFiles = await del(`${pSet.archivesPath}/**/*`, {
force: true,
onlyFiles: true,
ignore: pSet.getAllArchiveFilenames(),
});

removedFiles.forEach((path) => {
logger.warning(`Deleting unexpected file ${path}`);
});

const invalidChecksums: string[] = [];
await Promise.all(
pSet.packages.map(async (p) => {
const { archiveFilename, archiveChecksum } = p;
if (archiveFilename && archiveChecksum) {
const path = pSet.resolvePath(p);
const pathExists = existsSync(path);

let foundChecksum: string;
try {
foundChecksum = await md5(path).catch();
} catch {
foundChecksum = 'MISSING';
}

if (existsSync(path) && (await md5(path)) === archiveChecksum) {
logger.debug(`Browser archive exists in ${path}`);
if (pathExists && foundChecksum === archiveChecksum) {
logger.debug(`Browser archive for ${p.platform}/${p.architecture} found in ${path} `);
return;
}

if (!pathExists) {
logger.warning(
`Browser archive for ${p.platform}/${p.architecture} not found in ${path}.`
);
}
if (foundChecksum !== archiveChecksum) {
logger.warning(
`Browser archive checksum for ${p.platform}/${p.architecture} ` +
`is ${foundChecksum} but ${archiveChecksum} was expected.`
);
}

const url = pSet.getDownloadUrl(p);
try {
const downloadedChecksum = await download(url, path, logger);
if (downloadedChecksum !== archiveChecksum) {
logger.warning(
`Invalid checksum for ${p.platform}/${p.architecture}: ` +
`expected ${archiveChecksum} got ${downloadedChecksum}`
);
invalidChecksums.push(`${url} => ${path}`);
}
} catch (err) {
const message = new Error(`Failed to download ${url}`);
logger.error(err);
throw message;
throw new Error(`Failed to download ${url}: ${err}`);
}
}
})
Expand Down
5 changes: 3 additions & 2 deletions x-pack/plugins/reporting/server/browsers/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,8 @@ export interface BrowserDownload {
}

export const initializeBrowserDriverFactory = async (core: ReportingCore, logger: LevelLogger) => {
const { binaryPath$ } = installBrowser(logger);
const chromiumLogger = logger.clone(['chromium']);
const { binaryPath$ } = installBrowser(chromiumLogger);
const binaryPath = await binaryPath$.pipe(first()).toPromise();
return chromium.createDriverFactory(core, binaryPath, logger);
return chromium.createDriverFactory(core, binaryPath, chromiumLogger);
};
23 changes: 18 additions & 5 deletions x-pack/plugins/reporting/server/browsers/install.ts
Original file line number Diff line number Diff line change
Expand Up @@ -39,15 +39,28 @@ export function installBrowser(
const binaryChecksum = await md5(binaryPath).catch(() => '');

if (binaryChecksum !== pkg.binaryChecksum) {
await ensureBrowserDownloaded(logger);
await del(chromiumPath);
logger.warning(
`Found browser binary checksum for ${pkg.platform}/${pkg.architecture} ` +
`is ${binaryChecksum} but ${pkg.binaryChecksum} was expected. Re-installing...`
);
try {
await del(chromiumPath);
} catch (err) {
logger.error(err);
}

const archive = path.join(paths.archivesPath, pkg.archiveFilename);
logger.info(`Extracting [${archive}] to [${chromiumPath}]`);
await extract(archive, chromiumPath);
try {
await ensureBrowserDownloaded(logger);
const archive = path.join(paths.archivesPath, pkg.architecture, pkg.archiveFilename);
logger.info(`Extracting [${archive}] to [${chromiumPath}]`);
await extract(archive, chromiumPath);
} catch (err) {
logger.error(err);
}
}

logger.info(`Browser executable: ${binaryPath}`);

binaryPath$.next(binaryPath); // subscribers wait for download and extract to complete
};

Expand Down
Loading

0 comments on commit 50d97ca

Please sign in to comment.