Skip to content

Commit

Permalink
Add wrangler pages project validate [directory] command (#3762)
Browse files Browse the repository at this point in the history
* Add `wrangler pages project validate [directory]` command

* Update packages/wrangler/src/pages/validate.tsx

Co-authored-by: John Fawcett <jfawcett@cloudflare.com>

* Add Pages team as codeowners of their own tests

---------

Co-authored-by: John Fawcett <jfawcett@cloudflare.com>
  • Loading branch information
GregBrimble and jrf0110 authored Aug 21, 2023
1 parent 3bba1eb commit 18dc7b5
Show file tree
Hide file tree
Showing 7 changed files with 213 additions and 106 deletions.
5 changes: 5 additions & 0 deletions .changeset/dull-pets-search.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,5 @@
---
"wrangler": patch
---

feat: Add internal `wrangler pages project validate [directory]` command which validates an asset directory
1 change: 1 addition & 0 deletions CODEOWNERS
Validating CODEOWNERS rules …
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
/packages/wrangler/pages/ @cloudflare/pages @cloudflare/wrangler
/packages/wrangler/src/api/pages/ @cloudflare/pages @cloudflare/wrangler
/packages/wrangler/src/pages/ @cloudflare/pages @cloudflare/wrangler
/packages/wrangler/src/__tests__/pages/ @cloudflare/pages @cloudflare/wrangler

/packages/wrangler/src/api/d1/ @cloudflare/d1 @cloudflare/wrangler
/packages/wrangler/src/d1/ @cloudflare/d1 @cloudflare/wrangler
Expand Down
54 changes: 54 additions & 0 deletions packages/wrangler/src/__tests__/pages/project-validate.test.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,54 @@
// /* eslint-disable no-shadow */
import { writeFileSync } from "node:fs";
import { endEventLoop } from "../helpers/end-event-loop";
import { mockConsoleMethods } from "../helpers/mock-console";
import { runInTempDir } from "../helpers/run-in-tmp";
import { runWrangler } from "../helpers/run-wrangler";

jest.mock("../../pages/constants", () => ({
...jest.requireActual("../../pages/constants"),
MAX_ASSET_SIZE: 1 * 1024 * 1024,
MAX_ASSET_COUNT: 10,
}));

describe("project validate", () => {
const std = mockConsoleMethods();

runInTempDir();

afterEach(async () => {
// Force a tick to ensure that all promises resolve
await endEventLoop();
});

it("should exit cleanly for a good directory", async () => {
writeFileSync("logo.png", "foobar");

await runWrangler("pages project validate .");

expect(std.out).toMatchInlineSnapshot(`""`);
expect(std.err).toMatchInlineSnapshot(`""`);
});

it("should error for a large file", async () => {
writeFileSync("logo.png", Buffer.alloc(1 * 1024 * 1024 + 1));

await expect(() => runWrangler("pages project validate .")).rejects
.toThrowErrorMatchingInlineSnapshot(`
"Error: Pages only supports files up to 1.05 MB in size
logo.png is 1.05 MB in size"
`);
});

it("should error for a large directory", async () => {
for (let i = 0; i < 10 + 1; i++) {
writeFileSync(`logo${i}.png`, Buffer.alloc(1));
}

await expect(() =>
runWrangler("pages project validate .")
).rejects.toThrowErrorMatchingInlineSnapshot(
`"Error: Pages only supports up to 10 files in a deployment. Ensure you have specified your build output directory correctly."`
);
});
});
5 changes: 4 additions & 1 deletion packages/wrangler/src/api/pages/deploy.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@ import {
} from "../../pages/functions/buildWorker";
import { validateRoutes } from "../../pages/functions/routes-validation";
import { upload } from "../../pages/upload";
import { validate } from "../../pages/validate";
import { createUploadWorkerBundleContents } from "./create-worker-bundle-contents";
import type { BundleResult } from "../../deployment-bundle/bundle";
import type { Project, Deployment } from "@cloudflare/types";
Expand Down Expand Up @@ -196,8 +197,10 @@ export async function deploy({
}
}

const fileMap = await validate({ directory });

const manifest = await upload({
directory,
fileMap,
accountId,
projectName,
skipCaching: skipCaching ?? false,
Expand Down
7 changes: 7 additions & 0 deletions packages/wrangler/src/pages/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import * as Functions from "./functions";
import * as Projects from "./projects";
import * as Upload from "./upload";
import { CLEANUP } from "./utils";
import * as Validate from "./validate";
import type { CommonYargsArgv } from "../yargs-types";

process.on("SIGINT", () => {
Expand Down Expand Up @@ -69,6 +70,12 @@ export function pages(yargs: CommonYargsArgv) {
Projects.DeleteHandler
)
.command("upload [directory]", false, Upload.Options, Upload.Handler)
.command(
"validate [directory]",
false,
Validate.Options,
Validate.Handler
)
)
.command(
"deployment",
Expand Down
120 changes: 15 additions & 105 deletions packages/wrangler/src/pages/upload.tsx
Original file line number Diff line number Diff line change
@@ -1,32 +1,28 @@
import { mkdir, readdir, readFile, stat, writeFile } from "node:fs/promises";
import { dirname, join, relative, resolve, sep } from "node:path";
import { mkdir, readFile, writeFile } from "node:fs/promises";
import { dirname } from "node:path";
import { render, Text } from "ink";
import Spinner from "ink-spinner";
import { getType } from "mime";
import { Minimatch } from "minimatch";
import PQueue from "p-queue";
import prettyBytes from "pretty-bytes";
import React from "react";
import { fetchResult } from "../cfetch";
import { FatalError } from "../errors";
import isInteractive from "../is-interactive";
import { logger } from "../logger";
import {
MAX_ASSET_COUNT,
MAX_ASSET_SIZE,
BULK_UPLOAD_CONCURRENCY,
MAX_BUCKET_FILE_COUNT,
MAX_BUCKET_SIZE,
MAX_CHECK_MISSING_ATTEMPTS,
MAX_UPLOAD_ATTEMPTS,
} from "./constants";
import { hashFile } from "./hash";

import { validate } from "./validate";
import type {
CommonYargsArgv,
StrictYargsOptionsToInterface,
} from "../yargs-types";
import type { UploadPayloadFile } from "./types";
import type { FileContainer } from "./validate";

type UploadArgs = StrictYargsOptionsToInterface<typeof Options>;

Expand Down Expand Up @@ -62,8 +58,10 @@ export const Handler = async ({
throw new FatalError("No JWT given.", 1);
}

const fileMap = await validate({ directory });

const manifest = await upload({
directory,
fileMap,
jwt: process.env.CF_PAGES_UPLOAD_JWT,
skipCaching: skipCaching ?? false,
});
Expand All @@ -79,12 +77,12 @@ export const Handler = async ({
export const upload = async (
args:
| {
directory: string;
fileMap: Map<string, FileContainer>;
jwt: string;
skipCaching: boolean;
}
| {
directory: string;
fileMap: Map<string, FileContainer>;
accountId: string;
projectName: string;
skipCaching: boolean;
Expand All @@ -102,95 +100,7 @@ export const upload = async (
}
}

type FileContainer = {
path: string;
contentType: string;
sizeInBytes: number;
hash: string;
};

const IGNORE_LIST = [
"_worker.js",
"_redirects",
"_headers",
"_routes.json",
"functions",
"**/.DS_Store",
"**/node_modules",
"**/.git",
].map((pattern) => new Minimatch(pattern));

const directory = resolve(args.directory);

// TODO(future): Use this to more efficiently load files in and speed up uploading
// Limit memory to 1 GB unless more is specified
// let maxMemory = 1_000_000_000;
// if (process.env.NODE_OPTIONS && (process.env.NODE_OPTIONS.includes('--max-old-space-size=') || process.env.NODE_OPTIONS.includes('--max_old_space_size='))) {
// const parsed = parser(process.env.NODE_OPTIONS);
// maxMemory = (parsed['max-old-space-size'] ? parsed['max-old-space-size'] : parsed['max_old_space_size']) * 1000 * 1000; // Turn MB into bytes
// }

const walk = async (
dir: string,
fileMap: Map<string, FileContainer> = new Map(),
startingDir: string = dir
) => {
const files = await readdir(dir);

await Promise.all(
files.map(async (file) => {
const filepath = join(dir, file);
const relativeFilepath = relative(startingDir, filepath);
const filestat = await stat(filepath);

for (const minimatch of IGNORE_LIST) {
if (minimatch.match(relativeFilepath)) {
return;
}
}

if (filestat.isSymbolicLink()) {
return;
}

if (filestat.isDirectory()) {
fileMap = await walk(filepath, fileMap, startingDir);
} else {
const name = relativeFilepath.split(sep).join("/");

if (filestat.size > MAX_ASSET_SIZE) {
throw new FatalError(
`Error: Pages only supports files up to ${prettyBytes(
MAX_ASSET_SIZE
)} in size\n${name} is ${prettyBytes(filestat.size)} in size`,
1
);
}

// We don't want to hold the content in memory. We instead only want to read it when it's needed
fileMap.set(name, {
path: filepath,
contentType: getType(name) || "application/octet-stream",
sizeInBytes: filestat.size,
hash: hashFile(filepath),
});
}
})
);

return fileMap;
};

const fileMap = await walk(directory);

if (fileMap.size > MAX_ASSET_COUNT) {
throw new FatalError(
`Error: Pages only supports up to ${MAX_ASSET_COUNT.toLocaleString()} files in a deployment. Ensure you have specified your build output directory correctly.`,
1
);
}

const files = [...fileMap.values()];
const files = [...args.fileMap.values()];

let jwt = await fetchJwt();

Expand Down Expand Up @@ -274,8 +184,8 @@ export const upload = async (
bucketOffset++;
}

let counter = fileMap.size - sortedFiles.length;
const { rerender, unmount } = renderProgress(counter, fileMap.size);
let counter = args.fileMap.size - sortedFiles.length;
const { rerender, unmount } = renderProgress(counter, args.fileMap.size);

const queue = new PQueue({ concurrency: BULK_UPLOAD_CONCURRENCY });

Expand Down Expand Up @@ -333,7 +243,7 @@ export const upload = async (
doUpload().then(
() => {
counter += bucket.files.length;
rerender(counter, fileMap.size);
rerender(counter, args.fileMap.size);
},
(error) => {
return Promise.reject(
Expand All @@ -355,7 +265,7 @@ export const upload = async (

const uploadMs = Date.now() - start;

const skipped = fileMap.size - missingHashes.length;
const skipped = args.fileMap.size - missingHashes.length;
const skippedMessage = skipped > 0 ? `(${skipped} already uploaded) ` : "";

logger.log(
Expand Down Expand Up @@ -406,7 +316,7 @@ export const upload = async (
}

return Object.fromEntries(
[...fileMap.entries()].map(([fileName, file]) => [
[...args.fileMap.entries()].map(([fileName, file]) => [
`/${fileName}`,
file.hash,
])
Expand Down
Loading

0 comments on commit 18dc7b5

Please sign in to comment.