diff --git a/.changeset/eleven-planes-promise.md b/.changeset/eleven-planes-promise.md
new file mode 100644
index 000000000000..4d80d454dcc7
--- /dev/null
+++ b/.changeset/eleven-planes-promise.md
@@ -0,0 +1,7 @@
+---
+"wrangler": minor
+---
+
+feat: update the `--experimental-dev-env` (shorthand: `--x-dev-env`) flag to on-by-default
+
+If you experience any issues, you can disable the flag with `--x-dev-env=false`. Please also let us know by opening an issue at https://github.com/cloudflare/workers-sdk/issues/new/choose.
diff --git a/packages/wrangler/e2e/__snapshots__/dev.test.ts.snap b/packages/wrangler/e2e/__snapshots__/dev.test.ts.snap
index e669f5743c19..12f6b973fa7f 100644
--- a/packages/wrangler/e2e/__snapshots__/dev.test.ts.snap
+++ b/packages/wrangler/e2e/__snapshots__/dev.test.ts.snap
@@ -1,17 +1,17 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
-exports[`basic js dev: 'wrangler dev --remote --x-dev-env' > can modify worker during wrangler dev --remote --x-dev-env 1`] = `"Hello World!"`;
+exports[`basic js dev: 'wrangler dev --no-x-dev-env' > can modify worker during wrangler dev --no-x-dev-env 1`] = `"Hello World!"`;
-exports[`basic js dev: 'wrangler dev --remote --x-dev-env' > can modify worker during wrangler dev --remote --x-dev-env 2`] = `"Updated Worker! value"`;
+exports[`basic js dev: 'wrangler dev --no-x-dev-env' > can modify worker during wrangler dev --no-x-dev-env 2`] = `"Updated Worker! value"`;
-exports[`basic js dev: 'wrangler dev --remote' > can modify worker during wrangler dev --remote 1`] = `"Hello World!"`;
+exports[`basic js dev: 'wrangler dev --remote --no-x-dev-env' > can modify worker during wrangler dev --remote --no-x-dev-env 1`] = `"Hello World!"`;
-exports[`basic js dev: 'wrangler dev --remote' > can modify worker during wrangler dev --remote 2`] = `"Updated Worker! value"`;
+exports[`basic js dev: 'wrangler dev --remote --no-x-dev-env' > can modify worker during wrangler dev --remote --no-x-dev-env 2`] = `"Updated Worker! value"`;
-exports[`basic js dev: 'wrangler dev --x-dev-env' > can modify worker during wrangler dev --x-dev-env 1`] = `"Hello World!"`;
+exports[`basic js dev: 'wrangler dev --remote --x-dev-env' > can modify worker during wrangler dev --remote --x-dev-env 1`] = `"Hello World!"`;
-exports[`basic js dev: 'wrangler dev --x-dev-env' > can modify worker during wrangler dev --x-dev-env 2`] = `"Updated Worker! value"`;
+exports[`basic js dev: 'wrangler dev --remote --x-dev-env' > can modify worker during wrangler dev --remote --x-dev-env 2`] = `"Updated Worker! value"`;
-exports[`basic js dev: 'wrangler dev' > can modify worker during wrangler dev 1`] = `"Hello World!"`;
+exports[`basic js dev: 'wrangler dev --x-dev-env' > can modify worker during wrangler dev --x-dev-env 1`] = `"Hello World!"`;
-exports[`basic js dev: 'wrangler dev' > can modify worker during wrangler dev 2`] = `"Updated Worker! value"`;
+exports[`basic js dev: 'wrangler dev --x-dev-env' > can modify worker during wrangler dev --x-dev-env 2`] = `"Updated Worker! value"`;
diff --git a/packages/wrangler/e2e/__snapshots__/pages-dev.test.ts.snap b/packages/wrangler/e2e/__snapshots__/pages-dev.test.ts.snap
index 0d5afdc7a27d..5316b41c55fb 100644
--- a/packages/wrangler/e2e/__snapshots__/pages-dev.test.ts.snap
+++ b/packages/wrangler/e2e/__snapshots__/pages-dev.test.ts.snap
@@ -1,12 +1,13 @@
// Vitest Snapshot v1, https://vitest.dev/guide/snapshot.html
-exports[`Pages 'wrangler pages dev --x-dev-env' > should merge (with override) \`wrangler.toml\` configuration with configuration provided via the command line, with command line args taking precedence 1`] = `
+exports[`Pages 'wrangler pages dev --no-x-dev-env' > should merge (with override) \`wrangler.toml\` configuration with configuration provided via the command line, with command line args taking precedence 1`] = `
"✨ Compiled Worker successfully
▲ [WARNING] WARNING: You have Durable Object bindings that are not defined locally in the worker being developed.
Be aware that changes to the data stored in these Durable Objects will be permanent and affect the live instances.
Remote Durable Objects that are affected:
- {"name":"DO_BINDING_1_TOML","class_name":"DO_1_TOML","script_name":"DO_SCRIPT_1_TOML"}
- {"name":"DO_BINDING_2_TOML","class_name":"DO_2_TOML","script_name":"DO_SCRIPT_2_TOML"}
+▲ [WARNING] This worker is bound to live services: SERVICE_BINDING_1_TOML (SERVICE_NAME_1_TOML), SERVICE_BINDING_2_TOML (SERVICE_NAME_2_TOML)
Your worker has access to the following bindings:
- Durable Objects:
- DO_BINDING_1_TOML: NEW_DO_1 (defined in NEW_DO_SCRIPT_1)
@@ -34,19 +35,18 @@ Your worker has access to the following bindings:
- VAR1: "(hidden)"
- VAR2: "VAR_2_TOML"
- VAR3: "(hidden)"
-▲ [WARNING] This worker is bound to live services: SERVICE_BINDING_1_TOML (NEW_SERVICE_NAME_1), SERVICE_BINDING_3_TOML (SERVICE_NAME_3_ARGS), SERVICE_BINDING_2_TOML (SERVICE_NAME_2_TOML)
-▲ [WARNING] Using Workers AI always accesses your Cloudflare account in order to run AI models, and so will incur usage charges even in local development.
+▲ [WARNING] ⎔ Support for service bindings in local mode is experimental and may change.
+▲ [WARNING] ⎔ Support for external Durable Objects in local mode is experimental and may change.
"
`;
-exports[`Pages 'wrangler pages dev' > should merge (with override) \`wrangler.toml\` configuration with configuration provided via the command line, with command line args taking precedence 1`] = `
+exports[`Pages 'wrangler pages dev --x-dev-env' > should merge (with override) \`wrangler.toml\` configuration with configuration provided via the command line, with command line args taking precedence 1`] = `
"✨ Compiled Worker successfully
▲ [WARNING] WARNING: You have Durable Object bindings that are not defined locally in the worker being developed.
Be aware that changes to the data stored in these Durable Objects will be permanent and affect the live instances.
Remote Durable Objects that are affected:
- {"name":"DO_BINDING_1_TOML","class_name":"DO_1_TOML","script_name":"DO_SCRIPT_1_TOML"}
- {"name":"DO_BINDING_2_TOML","class_name":"DO_2_TOML","script_name":"DO_SCRIPT_2_TOML"}
-▲ [WARNING] This worker is bound to live services: SERVICE_BINDING_1_TOML (SERVICE_NAME_1_TOML), SERVICE_BINDING_2_TOML (SERVICE_NAME_2_TOML)
Your worker has access to the following bindings:
- Durable Objects:
- DO_BINDING_1_TOML: NEW_DO_1 (defined in NEW_DO_SCRIPT_1)
@@ -74,7 +74,7 @@ Your worker has access to the following bindings:
- VAR1: "(hidden)"
- VAR2: "VAR_2_TOML"
- VAR3: "(hidden)"
-▲ [WARNING] ⎔ Support for service bindings in local mode is experimental and may change.
-▲ [WARNING] ⎔ Support for external Durable Objects in local mode is experimental and may change.
+▲ [WARNING] This worker is bound to live services: SERVICE_BINDING_1_TOML (NEW_SERVICE_NAME_1), SERVICE_BINDING_3_TOML (SERVICE_NAME_3_ARGS), SERVICE_BINDING_2_TOML (SERVICE_NAME_2_TOML)
+▲ [WARNING] Using Workers AI always accesses your Cloudflare account in order to run AI models, and so will incur usage charges even in local development.
"
`;
diff --git a/packages/wrangler/e2e/dev-with-resources.test.ts b/packages/wrangler/e2e/dev-with-resources.test.ts
index 63dc60c79383..78fcfab31b88 100644
--- a/packages/wrangler/e2e/dev-with-resources.test.ts
+++ b/packages/wrangler/e2e/dev-with-resources.test.ts
@@ -9,8 +9,8 @@ import { WranglerE2ETestHelper } from "./helpers/e2e-wrangler-test";
import { generateResourceName } from "./helpers/generate-resource-name";
const RUNTIMES = [
- { flags: "", runtime: "local" },
- { flags: "--remote", runtime: "remote" },
+ { flags: "--no-x-dev-env", runtime: "local" },
+ { flags: "--remote --no-x-dev-env", runtime: "remote" },
{ flags: "--x-dev-env", runtime: "local" },
{ flags: "--remote --x-dev-env", runtime: "remote" },
] as const;
diff --git a/packages/wrangler/e2e/dev.test.ts b/packages/wrangler/e2e/dev.test.ts
index 1fac8f645612..d086b032ed5c 100644
--- a/packages/wrangler/e2e/dev.test.ts
+++ b/packages/wrangler/e2e/dev.test.ts
@@ -57,8 +57,8 @@ it("can import URL from 'url' in node_compat mode", async () => {
});
describe.each([
- { cmd: "wrangler dev" },
- { cmd: "wrangler dev --remote" },
+ { cmd: "wrangler dev --no-x-dev-env" },
+ { cmd: "wrangler dev --remote --no-x-dev-env" },
{ cmd: "wrangler dev --x-dev-env" },
{ cmd: "wrangler dev --remote --x-dev-env" },
])("basic js dev: $cmd", ({ cmd }) => {
@@ -112,10 +112,8 @@ describe.each([
// Skipping remote python tests because they consistently flake with timeouts
// Unskip once remote dev with python workers is more stable
describe.each([
- { cmd: "wrangler dev" },
- // { cmd: "wrangler dev --remote" },
+ { cmd: "wrangler dev --no-x-dev-env" },
{ cmd: "wrangler dev --x-dev-env" },
- // { cmd: "wrangler dev --remote --x-dev-env" },
])("basic python dev: $cmd", { timeout: 90_000 }, ({ cmd }) => {
it(`can modify entrypoint during ${cmd}`, async () => {
const helper = new WranglerE2ETestHelper();
@@ -224,10 +222,10 @@ describe.each([
});
describe.each([
- { cmd: "wrangler dev" },
- { cmd: "wrangler dev --x-dev-env" },
- { cmd: "wrangler dev --x-registry" },
+ { cmd: "wrangler dev --x-dev-env --no-x-registry" },
+ { cmd: "wrangler dev --no-x-dev-env --no-x-registry" },
{ cmd: "wrangler dev --x-dev-env --x-registry" },
+ { cmd: "wrangler dev --no-x-dev-env --x-registry" },
])("dev registry $cmd", ({ cmd }) => {
let a: string;
let b: string;
@@ -882,105 +880,106 @@ describe("custom builds", () => {
});
describe("watch mode", () => {
- describe.each([{ cmd: "wrangler dev" }, { cmd: "wrangler dev --x-dev-env" }])(
- "Workers watch mode: $cmd",
- ({ cmd }) => {
- it(`supports modifying the Worker script during dev session`, async () => {
- const helper = new WranglerE2ETestHelper();
- await helper.seed({
- "wrangler.toml": dedent`
+ describe.each([
+ { cmd: "wrangler dev" },
+ { cmd: "wrangler dev --no-x-dev-env" },
+ ])("Workers watch mode: $cmd", ({ cmd }) => {
+ it(`supports modifying the Worker script during dev session`, async () => {
+ const helper = new WranglerE2ETestHelper();
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
main = "src/workerA.ts"
compatibility_date = "2023-01-01"
`,
- "src/workerA.ts": dedent`
+ "src/workerA.ts": dedent`
export default {
fetch(request) {
return new Response("Hello from user Worker A!")
}
}`,
- });
+ });
- const worker = helper.runLongLived(cmd);
- const { url } = await worker.waitForReady();
+ const worker = helper.runLongLived(cmd);
+ const { url } = await worker.waitForReady();
- let text = await fetchText(url);
- expect(text).toBe("Hello from user Worker A!");
+ let text = await fetchText(url);
+ expect(text).toBe("Hello from user Worker A!");
- await helper.seed({
- "wrangler.toml": dedent`
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
main = "src/workerB.ts"
compatibility_date = "2023-01-01"
`,
- "src/workerB.ts": dedent`
+ "src/workerB.ts": dedent`
export default {
fetch(request) {
return new Response("Hello from user Worker B!")
}
}`,
- });
-
- await worker.waitForReload();
- text = await retry(
- (s) => s != "Hello from user Worker B!",
- async () => {
- return await fetchText(url);
- }
- );
- expect(text).toBe("Hello from user Worker B!");
});
- }
- );
- describe.each([{ cmd: "wrangler dev" }, { cmd: "wrangler dev --x-dev-env" }])(
- "Workers + Assets watch mode: $cmd",
- ({ cmd }) => {
- it(`supports modifying existing assets during dev session and errors when invalid routes are added`, async () => {
- const helper = new WranglerE2ETestHelper();
- await helper.seed({
- "wrangler.toml": dedent`
+ await worker.waitForReload();
+ text = await retry(
+ (s) => s != "Hello from user Worker B!",
+ async () => {
+ return await fetchText(url);
+ }
+ );
+ expect(text).toBe("Hello from user Worker B!");
+ });
+ });
+
+ describe.each([
+ { cmd: "wrangler dev" },
+ { cmd: "wrangler dev --no-x-dev-env" },
+ ])("Workers + Assets watch mode: $cmd", ({ cmd }) => {
+ it(`supports modifying existing assets during dev session and errors when invalid routes are added`, async () => {
+ const helper = new WranglerE2ETestHelper();
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
compatibility_date = "2023-01-01"
[assets]
directory = "./public"
`,
- "public/index.html": dedent`
+ "public/index.html": dedent`
Hello Workers + Assets
`,
- });
+ });
- const worker = helper.runLongLived(cmd);
- const { url } = await worker.waitForReady();
+ const worker = helper.runLongLived(cmd);
+ const { url } = await worker.waitForReady();
- let { response, cachedETags } = await fetchWithETag(
- `${url}/index.html`,
- {}
- );
- const originalETag = response.headers.get("etag");
- expect(await response.text()).toBe("Hello Workers + Assets
");
+ let { response, cachedETags } = await fetchWithETag(
+ `${url}/index.html`,
+ {}
+ );
+ const originalETag = response.headers.get("etag");
+ expect(await response.text()).toBe("Hello Workers + Assets
");
- await helper.seed({
- "public/index.html": dedent`
+ await helper.seed({
+ "public/index.html": dedent`
Hello Updated Workers + Assets
`,
- });
+ });
- await worker.waitForReload();
- ({ response, cachedETags } = await retry(
- (s) => s.response.status !== 200,
- async () => {
- return await fetchWithETag(`${url}/index.html`, cachedETags);
- }
- ));
- expect(await response.text()).toBe(
- "Hello Updated Workers + Assets
"
- );
- // expect a new eTag back because the content for this path has changed
- expect(response.headers.get("etag")).not.toBe(originalETag);
+ await worker.waitForReload();
+ ({ response, cachedETags } = await retry(
+ (s) => s.response.status !== 200,
+ async () => {
+ return await fetchWithETag(`${url}/index.html`, cachedETags);
+ }
+ ));
+ expect(await response.text()).toBe(
+ "Hello Updated Workers + Assets
"
+ );
+ // expect a new eTag back because the content for this path has changed
+ expect(response.headers.get("etag")).not.toBe(originalETag);
- // changes to routes should error while in watch mode
- await helper.seed({
- "wrangler.toml": dedent`
+ // changes to routes should error while in watch mode
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
compatibility_date = "2023-01-01"
route = "example.com/path/*"
@@ -988,263 +987,263 @@ describe("watch mode", () => {
[assets]
directory = "./public"
`,
- });
- await worker.readUntil(/UserError: Invalid Routes:/);
});
+ await worker.readUntil(/Invalid Routes:/);
+ });
- it(`supports adding new assets during dev session`, async () => {
- const helper = new WranglerE2ETestHelper();
- await helper.seed({
- "wrangler.toml": dedent`
+ it(`supports adding new assets during dev session`, async () => {
+ const helper = new WranglerE2ETestHelper();
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
compatibility_date = "2023-01-01"
[assets]
directory = "./public"
`,
- "public/index.html": dedent`
+ "public/index.html": dedent`
Hello Workers + Assets
`,
- });
+ });
- const worker = helper.runLongLived(cmd);
- const { url } = await worker.waitForReady();
- let { response, cachedETags } = await fetchWithETag(
- `${url}/index.html`,
- {}
- );
+ const worker = helper.runLongLived(cmd);
+ const { url } = await worker.waitForReady();
+ let { response, cachedETags } = await fetchWithETag(
+ `${url}/index.html`,
+ {}
+ );
- expect(await response.text()).toBe("Hello Workers + Assets
");
+ expect(await response.text()).toBe("Hello Workers + Assets
");
- await helper.seed({
- "public/about.html": dedent`About Workers + Assets`,
- "public/workers/index.html": dedent`Cloudflare Workers!`,
- });
+ await helper.seed({
+ "public/about.html": dedent`About Workers + Assets`,
+ "public/workers/index.html": dedent`Cloudflare Workers!`,
+ });
- await worker.waitForReload();
+ await worker.waitForReload();
- // re-calculating the asset manifest / reverse assets map might not be
- // done at this point, so retry until they are available
- ({ response, cachedETags } = await retry(
- (s) => s.response.status !== 200,
- async () => {
- return await fetchWithETag(`${url}/about.html`, cachedETags);
- }
- ));
- expect(await response.text()).toBe("About Workers + Assets");
-
- ({ response, cachedETags } = await fetchWithETag(
- `${url}/workers/index.html`,
- cachedETags
- ));
- expect(await response.text()).toBe("Cloudflare Workers!");
-
- // expect 304 for the original asset as the content has not changed
- ({ response, cachedETags } = await fetchWithETag(
- `${url}/index.html`,
- cachedETags
- ));
- expect(response.status).toBe(304);
- });
+ // re-calculating the asset manifest / reverse assets map might not be
+ // done at this point, so retry until they are available
+ ({ response, cachedETags } = await retry(
+ (s) => s.response.status !== 200,
+ async () => {
+ return await fetchWithETag(`${url}/about.html`, cachedETags);
+ }
+ ));
+ expect(await response.text()).toBe("About Workers + Assets");
- it(`supports removing existing assets during dev session`, async () => {
- const helper = new WranglerE2ETestHelper();
- await helper.seed({
- "wrangler.toml": dedent`
+ ({ response, cachedETags } = await fetchWithETag(
+ `${url}/workers/index.html`,
+ cachedETags
+ ));
+ expect(await response.text()).toBe("Cloudflare Workers!");
+
+ // expect 304 for the original asset as the content has not changed
+ ({ response, cachedETags } = await fetchWithETag(
+ `${url}/index.html`,
+ cachedETags
+ ));
+ expect(response.status).toBe(304);
+ });
+
+ it(`supports removing existing assets during dev session`, async () => {
+ const helper = new WranglerE2ETestHelper();
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
compatibility_date = "2023-01-01"
[assets]
directory = "./public"
`,
- "public/index.html": dedent`
+ "public/index.html": dedent`
Hello Workers + Assets
`,
- "public/about.html": dedent`About Workers + Assets`,
- "public/workers/index.html": dedent`Cloudflare Workers!`,
- });
-
- const worker = helper.runLongLived(cmd);
- const { url } = await worker.waitForReady();
- let { response, cachedETags } = await fetchWithETag(
- `${url}/index.html`,
- {}
- );
- expect(await response.text()).toBe("Hello Workers + Assets
");
-
- ({ response, cachedETags } = await fetchWithETag(
- `${url}/about.html`,
- cachedETags
- ));
- expect(await response.text()).toBe("About Workers + Assets");
- ({ response, cachedETags } = await fetchWithETag(
- `${url}/workers/index.html`,
- cachedETags
- ));
- expect(await response.text()).toBe("Cloudflare Workers!");
-
- await helper.removeFiles(["public/index.html"]);
-
- await worker.waitForReload();
-
- // re-calculating the asset manifest / reverse assets map might not be
- // done at this point, so retry until they are available
- ({ response, cachedETags } = await retry(
- (s) => s.response.status !== 404,
- async () => {
- return await fetchWithETag(`${url}/index.html`, cachedETags);
- }
- ));
- expect(response.status).toBe(404);
+ "public/about.html": dedent`About Workers + Assets`,
+ "public/workers/index.html": dedent`Cloudflare Workers!`,
});
- it(`supports modifying the assets directory in wrangler.toml during dev session`, async () => {
- const helper = new WranglerE2ETestHelper();
- await helper.seed({
- "wrangler.toml": dedent`
+ const worker = helper.runLongLived(cmd);
+ const { url } = await worker.waitForReady();
+ let { response, cachedETags } = await fetchWithETag(
+ `${url}/index.html`,
+ {}
+ );
+ expect(await response.text()).toBe("Hello Workers + Assets
");
+
+ ({ response, cachedETags } = await fetchWithETag(
+ `${url}/about.html`,
+ cachedETags
+ ));
+ expect(await response.text()).toBe("About Workers + Assets");
+ ({ response, cachedETags } = await fetchWithETag(
+ `${url}/workers/index.html`,
+ cachedETags
+ ));
+ expect(await response.text()).toBe("Cloudflare Workers!");
+
+ await helper.removeFiles(["public/index.html"]);
+
+ await worker.waitForReload();
+
+ // re-calculating the asset manifest / reverse assets map might not be
+ // done at this point, so retry until they are available
+ ({ response, cachedETags } = await retry(
+ (s) => s.response.status !== 404,
+ async () => {
+ return await fetchWithETag(`${url}/index.html`, cachedETags);
+ }
+ ));
+ expect(response.status).toBe(404);
+ });
+
+ it(`supports modifying the assets directory in wrangler.toml during dev session`, async () => {
+ const helper = new WranglerE2ETestHelper();
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
compatibility_date = "2023-01-01"
[assets]
directory = "./public"
`,
- "public/index.html": dedent`
+ "public/index.html": dedent`
Hello Workers + Assets
`,
- });
- await helper.seed({
- "public2/index.html": dedent`
+ });
+ await helper.seed({
+ "public2/index.html": dedent`
Hola Workers + Assets
`,
- "public2/about/index.html": dedent`
+ "public2/about/index.html": dedent`
Read more about Workers + Assets
`,
- });
- const worker = helper.runLongLived(cmd);
- const { url } = await worker.waitForReady();
+ });
+ const worker = helper.runLongLived(cmd);
+ const { url } = await worker.waitForReady();
- let { response, cachedETags } = await fetchWithETag(
- `${url}/index.html`,
- {}
- );
- expect(await response.text()).toBe("Hello Workers + Assets
");
+ let { response, cachedETags } = await fetchWithETag(
+ `${url}/index.html`,
+ {}
+ );
+ expect(await response.text()).toBe("Hello Workers + Assets
");
- await helper.seed({
- "wrangler.toml": dedent`
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
compatibility_date = "2023-01-01"
[assets]
directory = "./public2"
`,
- });
+ });
- await worker.waitForReload();
+ await worker.waitForReload();
- ({ response, cachedETags } = await retry(
- (s) => s.response.status !== 200,
- async () => {
- return await fetchWithETag(`${url}/index.html`, cachedETags);
- }
- ));
- expect(await response.text()).toBe("Hola Workers + Assets
");
- ({ response, cachedETags } = await fetchWithETag(
- `${url}/about/index.html`,
- {}
- ));
- expect(await response.text()).toBe(
- "Read more about Workers + Assets
"
- );
- });
+ ({ response, cachedETags } = await retry(
+ (s) => s.response.status !== 200,
+ async () => {
+ return await fetchWithETag(`${url}/index.html`, cachedETags);
+ }
+ ));
+ expect(await response.text()).toBe("Hola Workers + Assets
");
+ ({ response, cachedETags } = await fetchWithETag(
+ `${url}/about/index.html`,
+ {}
+ ));
+ expect(await response.text()).toBe(
+ "Read more about Workers + Assets
"
+ );
+ });
- it(`supports switching from Workers without assets to assets-only Workers during the current dev session`, async () => {
- const helper = new WranglerE2ETestHelper();
- await helper.seed({
- "wrangler.toml": dedent`
+ it(`supports switching from Workers without assets to assets-only Workers during the current dev session`, async () => {
+ const helper = new WranglerE2ETestHelper();
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
main = "src/index.ts"
compatibility_date = "2023-01-01"
`,
- "src/index.ts": dedent`
+ "src/index.ts": dedent`
export default {
fetch(request) {
return new Response("Hello from user Worker!")
}
}`,
- });
+ });
- const worker = helper.runLongLived(cmd);
- const { url } = await worker.waitForReady();
+ const worker = helper.runLongLived(cmd);
+ const { url } = await worker.waitForReady();
- let response = await fetch(`${url}/hey`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello from user Worker!");
+ let response = await fetch(`${url}/hey`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello from user Worker!");
- response = await fetch(`${url}/index.html`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello from user Worker!");
+ response = await fetch(`${url}/index.html`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello from user Worker!");
- await helper.seed({
- "wrangler.toml": dedent`
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
compatibility_date = "2023-01-01"
[assets]
directory = "./public"
`,
- "public/index.html": dedent`
+ "public/index.html": dedent`
Hello Workers + Assets
`,
- });
+ });
- await worker.waitForReload();
-
- // verify response from Asset Worker
- const { status, text } = await retry(
- (s) => s.text !== "Hello Workers + Assets
",
- async () => {
- const fetchResponse = await fetch(url);
- return {
- status: fetchResponse.status,
- text: await fetchResponse.text(),
- };
- }
- );
- expect(status).toBe(200);
- expect(text).toBe("Hello Workers + Assets
");
+ await worker.waitForReload();
- response = await fetch(`${url}/index.html`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello Workers + Assets
");
+ // verify response from Asset Worker
+ const { status, text } = await retry(
+ (s) => s.text !== "Hello Workers + Assets
",
+ async () => {
+ const fetchResponse = await fetch(url);
+ return {
+ status: fetchResponse.status,
+ text: await fetchResponse.text(),
+ };
+ }
+ );
+ expect(status).toBe(200);
+ expect(text).toBe("Hello Workers + Assets
");
- // verify we no longer get a response from the User Worker
- response = await fetch(`${url}/hey`);
- expect(response.status).toBe(404);
- });
+ response = await fetch(`${url}/index.html`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello Workers + Assets
");
- it(`supports switching from Workers without assets to Workers with assets during the current dev session`, async () => {
- const helper = new WranglerE2ETestHelper();
- await helper.seed({
- "wrangler.toml": dedent`
+ // verify we no longer get a response from the User Worker
+ response = await fetch(`${url}/hey`);
+ expect(response.status).toBe(404);
+ });
+
+ it(`supports switching from Workers without assets to Workers with assets during the current dev session`, async () => {
+ const helper = new WranglerE2ETestHelper();
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
main = "src/index.ts"
compatibility_date = "2023-01-01"
`,
- "src/index.ts": dedent`
+ "src/index.ts": dedent`
export default {
fetch(request) {
return new Response("Hello from user Worker!")
}
}`,
- });
+ });
- const worker = helper.runLongLived(cmd);
- const { url } = await worker.waitForReady();
+ const worker = helper.runLongLived(cmd);
+ const { url } = await worker.waitForReady();
- let response = await fetch(`${url}/hey`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello from user Worker!");
+ let response = await fetch(`${url}/hey`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello from user Worker!");
- response = await fetch(`${url}/index.html`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello from user Worker!");
+ response = await fetch(`${url}/index.html`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello from user Worker!");
- await helper.seed({
- "wrangler.toml": dedent`
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
main = "src/index.ts"
compatibility_date = "2023-01-01"
@@ -1252,64 +1251,64 @@ describe("watch mode", () => {
[assets]
directory = "./public"
`,
- "public/index.html": dedent`
+ "public/index.html": dedent`
Hello Workers + Assets
`,
- });
+ });
- await worker.waitForReload();
-
- // verify response from Asset Worker
- const { status, text } = await retry(
- (s) => s.text !== "Hello Workers + Assets
",
- async () => {
- const fetchResponse = await fetch(url);
- return {
- status: fetchResponse.status,
- text: await fetchResponse.text(),
- };
- }
- );
- expect(status).toBe(200);
- expect(text).toBe("Hello Workers + Assets
");
+ await worker.waitForReload();
+
+ // verify response from Asset Worker
+ const { status, text } = await retry(
+ (s) => s.text !== "Hello Workers + Assets
",
+ async () => {
+ const fetchResponse = await fetch(url);
+ return {
+ status: fetchResponse.status,
+ text: await fetchResponse.text(),
+ };
+ }
+ );
+ expect(status).toBe(200);
+ expect(text).toBe("Hello Workers + Assets
");
- response = await fetch(`${url}/index.html`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello Workers + Assets
");
+ response = await fetch(`${url}/index.html`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello Workers + Assets
");
- // verify response from the User Worker
- response = await fetch(`${url}/hey`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello from user Worker!");
- });
+ // verify response from the User Worker
+ response = await fetch(`${url}/hey`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello from user Worker!");
+ });
- it(`supports switching from assets-only Workers to Workers with assets during the current dev session`, async () => {
- const helper = new WranglerE2ETestHelper();
- await helper.seed({
- "wrangler.toml": dedent`
+ it(`supports switching from assets-only Workers to Workers with assets during the current dev session`, async () => {
+ const helper = new WranglerE2ETestHelper();
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
compatibility_date = "2023-01-01"
[assets]
directory = "./public"
`,
- "public/index.html": dedent`
+ "public/index.html": dedent`
Hello Workers + Assets
`,
- });
- const worker = helper.runLongLived(cmd);
- const { url } = await worker.waitForReady();
+ });
+ const worker = helper.runLongLived(cmd);
+ const { url } = await worker.waitForReady();
- // verify response from Asset Worker
- let response = await fetch(`${url}/index.html`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello Workers + Assets
");
+ // verify response from Asset Worker
+ let response = await fetch(`${url}/index.html`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello Workers + Assets
");
- // verify no response from route that will be handled by the
- // User Worker in the future
- response = await fetch(`${url}/hey`);
- expect(response.status).toBe(404);
+ // verify no response from route that will be handled by the
+ // User Worker in the future
+ response = await fetch(`${url}/hey`);
+ expect(response.status).toBe(404);
- await helper.seed({
- "wrangler.toml": dedent`
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
main = "src/index.ts"
compatibility_date = "2023-01-01"
@@ -1317,31 +1316,31 @@ describe("watch mode", () => {
[assets]
directory = "./public"
`,
- "src/index.ts": dedent`
+ "src/index.ts": dedent`
export default {
fetch(request) {
return new Response("Hello from user Worker!")
}
}`,
- });
+ });
- await worker.waitForReload();
+ await worker.waitForReload();
- // verify we still get the correct response for the Asset Worker
- response = await fetch(`${url}/index.html`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello Workers + Assets
");
+ // verify we still get the correct response for the Asset Worker
+ response = await fetch(`${url}/index.html`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello Workers + Assets
");
- // verify response from User Worker
- response = await fetch(`${url}/hey`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello from user Worker!");
- });
+ // verify response from User Worker
+ response = await fetch(`${url}/hey`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello from user Worker!");
+ });
- it(`supports switching from Workers with assets to assets-only Workers during the current dev session`, async () => {
- const helper = new WranglerE2ETestHelper();
- await helper.seed({
- "wrangler.toml": dedent`
+ it(`supports switching from Workers with assets to assets-only Workers during the current dev session`, async () => {
+ const helper = new WranglerE2ETestHelper();
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
main = "src/index.ts"
compatibility_date = "2023-01-01"
@@ -1349,52 +1348,51 @@ describe("watch mode", () => {
[assets]
directory = "./public"
`,
- "public/index.html": dedent`
+ "public/index.html": dedent`
Hello Workers + Assets
`,
- "src/index.ts": dedent`
+ "src/index.ts": dedent`
export default {
fetch(request) {
return new Response("Hello from user Worker!")
}
}`,
- });
+ });
- const worker = helper.runLongLived(cmd);
- const { url } = await worker.waitForReady();
+ const worker = helper.runLongLived(cmd);
+ const { url } = await worker.waitForReady();
- // verify response from Asset Worker
- let response = await fetch(`${url}/index.html`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello Workers + Assets
");
+ // verify response from Asset Worker
+ let response = await fetch(`${url}/index.html`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello Workers + Assets
");
- // verify response from User Worker
- response = await fetch(`${url}/hey`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello from user Worker!");
+ // verify response from User Worker
+ response = await fetch(`${url}/hey`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello from user Worker!");
- await helper.seed({
- "wrangler.toml": dedent`
+ await helper.seed({
+ "wrangler.toml": dedent`
name = "${workerName}"
compatibility_date = "2023-01-01"
[assets]
directory = "./public"
`,
- });
+ });
- await worker.waitForReload();
+ await worker.waitForReload();
- // verify we still get the correct response from Asset Worker
- response = await fetch(`${url}/index.html`);
- expect(response.status).toBe(200);
- expect(await response.text()).toBe("Hello Workers + Assets
");
+ // verify we still get the correct response from Asset Worker
+ response = await fetch(`${url}/index.html`);
+ expect(response.status).toBe(200);
+ expect(await response.text()).toBe("Hello Workers + Assets
");
- // verify we no longer get a response from the User Worker
- response = await fetch(`${url}/hey`);
- expect(response.status).toBe(404);
- });
- }
- );
+ // verify we no longer get a response from the User Worker
+ response = await fetch(`${url}/hey`);
+ expect(response.status).toBe(404);
+ });
+ });
describe.each([
{ cmd: "wrangler dev --assets=dist" },
@@ -1490,7 +1488,7 @@ describe("watch mode", () => {
route = "example.com/path/*"
`,
});
- await worker.readUntil(/UserError: Invalid Routes:/);
+ await worker.readUntil(/Invalid Routes:/);
});
it(`supports switching from assets-only Workers to Workers with assets during the current dev session`, async () => {
diff --git a/packages/wrangler/e2e/pages-dev.test.ts b/packages/wrangler/e2e/pages-dev.test.ts
index e4bc859263d5..e0d0da25316e 100644
--- a/packages/wrangler/e2e/pages-dev.test.ts
+++ b/packages/wrangler/e2e/pages-dev.test.ts
@@ -8,7 +8,7 @@ import { fetchText } from "./helpers/fetch-text";
import { normalizeOutput } from "./helpers/normalize";
describe.each([
- { cmd: "wrangler pages dev" },
+ { cmd: "wrangler pages dev --no-x-dev-env" },
{ cmd: "wrangler pages dev --x-dev-env" },
])("Pages $cmd", ({ cmd }) => {
it("should warn if no [--compatibility_date] command line arg was specified", async () => {
diff --git a/packages/wrangler/src/__tests__/api/startDevWorker/ConfigController.test.ts b/packages/wrangler/src/__tests__/api/startDevWorker/ConfigController.test.ts
index 47fd9a8c7cc7..77617274e7a3 100644
--- a/packages/wrangler/src/__tests__/api/startDevWorker/ConfigController.test.ts
+++ b/packages/wrangler/src/__tests__/api/startDevWorker/ConfigController.test.ts
@@ -3,6 +3,7 @@ import path from "node:path";
import dedent from "ts-dedent";
import { describe, it } from "vitest";
import { ConfigController } from "../../../api/startDevWorker/ConfigController";
+import { mockAccountId, mockApiToken } from "../../helpers/mock-account-id";
import { mockConsoleMethods } from "../../helpers/mock-console";
import { runInTempDir } from "../../helpers/run-in-tmp";
import { seed } from "../../helpers/seed";
@@ -18,6 +19,8 @@ async function waitForConfigUpdate(
describe("ConfigController", () => {
runInTempDir();
mockConsoleMethods();
+ mockAccountId();
+ mockApiToken();
it("should emit configUpdate events with defaults applied", async () => {
const controller = new ConfigController();
diff --git a/packages/wrangler/src/__tests__/deploy.test.ts b/packages/wrangler/src/__tests__/deploy.test.ts
index c2abede00275..ccf3e2a8d5a5 100644
--- a/packages/wrangler/src/__tests__/deploy.test.ts
+++ b/packages/wrangler/src/__tests__/deploy.test.ts
@@ -4303,6 +4303,31 @@ addEventListener('fetch', event => {});`
);
});
+ it("should error if --assets and config.tail_consumers are used together", async () => {
+ writeWranglerToml({
+ tail_consumers: [{ service: "" }],
+ });
+ fs.mkdirSync("public");
+ await expect(
+ runWrangler("deploy --assets public")
+ ).rejects.toThrowErrorMatchingInlineSnapshot(
+ `[Error: Cannot use assets and tail consumers in the same Worker. Tail Workers are not yet supported for Workers with assets.]`
+ );
+ });
+
+ it("should error if config.assets and config.tail_consumers are used together", async () => {
+ writeWranglerToml({
+ assets: { directory: "./public" },
+ tail_consumers: [{ service: "" }],
+ });
+ fs.mkdirSync("public");
+ await expect(
+ runWrangler("deploy")
+ ).rejects.toThrowErrorMatchingInlineSnapshot(
+ `[Error: Cannot use assets and tail consumers in the same Worker. Tail Workers are not yet supported for Workers with assets.]`
+ );
+ });
+
it("should error if directory specified by flag --assets does not exist", async () => {
await expect(runWrangler("deploy --assets abc")).rejects.toThrow(
new RegExp(
diff --git a/packages/wrangler/src/__tests__/dev.test.tsx b/packages/wrangler/src/__tests__/dev.test.ts
similarity index 69%
rename from packages/wrangler/src/__tests__/dev.test.tsx
rename to packages/wrangler/src/__tests__/dev.test.ts
index 7ca8d43c4d97..8400b08a70e2 100644
--- a/packages/wrangler/src/__tests__/dev.test.tsx
+++ b/packages/wrangler/src/__tests__/dev.test.ts
@@ -5,11 +5,14 @@ import { http, HttpResponse } from "msw";
import patchConsole from "patch-console";
import dedent from "ts-dedent";
import { vi } from "vitest";
-import Dev from "../dev/dev";
+import { ConfigController } from "../api/startDevWorker/ConfigController";
+import registerDevHotKeys from "../dev/hotkeys";
import { getWorkerAccountAndContext } from "../dev/remote";
+import { FatalError } from "../errors";
import { CI } from "../is-ci";
import { mockAccountId, mockApiToken } from "./helpers/mock-account-id";
import { mockConsoleMethods } from "./helpers/mock-console";
+import { useMockIsTTY } from "./helpers/mock-istty";
import {
msw,
mswSuccessOauthHandlers,
@@ -22,40 +25,106 @@ import {
writeWranglerJson,
writeWranglerToml,
} from "./helpers/write-wrangler-toml";
-import type { Mock } from "vitest";
+import type {
+ Binding,
+ StartDevWorkerInput,
+ StartDevWorkerOptions,
+ Trigger,
+} from "../api";
+import type { Mock, MockInstance } from "vitest";
+
+vi.mock("../api/startDevWorker/ConfigController", (importOriginal) =>
+ importOriginal()
+);
+
+vi.mock("../dev/hotkeys");
+
+// Don't memoize in tests. If we did, it would memoize across test runs, which causes problems
+vi.mock("../utils/memoizeGetPort", () => {
+ return {
+ memoizeGetPort: (port: number, host: string) => async () => {
+ return await getPort({ port: port, host: host });
+ },
+ };
+});
async function expectedHostAndZone(
+ config: StartDevWorkerOptions & { input: StartDevWorkerInput },
host: string,
zone: string
): Promise {
- const config = (Dev as Mock).mock.calls[0][0];
- expect(config).toEqual(
- expect.objectContaining({
- localUpstream: host,
- })
- );
- await expect(
- getWorkerAccountAndContext({
- accountId: "",
- host: config.host,
- routes: config.routes,
- })
- ).resolves.toEqual(
+ expect(config).toMatchObject({
+ dev: { origin: { hostname: host } },
+ });
+
+ const ctx = await getWorkerAccountAndContext({
+ accountId: "",
+ host: config.input.dev?.origin?.hostname,
+ routes: config.triggers
+ ?.filter(
+ (trigger): trigger is Extract =>
+ trigger.type === "route"
+ )
+ .map((trigger) => {
+ const { type: _, ...route } = trigger;
+ if (
+ "custom_domain" in route ||
+ "zone_id" in route ||
+ "zone_name" in route
+ ) {
+ return route;
+ } else {
+ return route.pattern;
+ }
+ }),
+ env: undefined,
+ legacyEnv: undefined,
+ sendMetrics: undefined,
+ });
+
+ expect(ctx).toEqual(
expect.objectContaining({
workerContext: {
host,
zone,
- routes: config.routes,
+ routes: config.triggers
+ ?.filter(
+ (trigger): trigger is Extract =>
+ trigger.type === "route"
+ )
+ .map((trigger) => {
+ const { type: _, ...route } = trigger;
+ if (
+ "custom_domain" in route ||
+ "zone_id" in route ||
+ "zone_name" in route
+ ) {
+ return route;
+ } else {
+ return route.pattern;
+ }
+ }),
},
})
);
- (Dev as Mock).mockClear();
return config;
}
-describe("wrangler dev", () => {
+describe.sequential("wrangler dev", () => {
+ let spy: MockInstance;
+ let setSpy: MockInstance;
+ const { setIsTTY } = useMockIsTTY();
+
beforeEach(() => {
+ setIsTTY(true);
+ setSpy = vi.spyOn(ConfigController.prototype, "set");
+ spy = vi
+ .spyOn(ConfigController.prototype, "emitConfigUpdateEvent")
+ .mockImplementation(() => {
+ // In unit tests of `wrangler dev` we only care about the first config parse event, so exit early
+ throw new FatalError("Bailing early in tests");
+ });
msw.use(
...mswZoneHandlers,
...mswSuccessOauthHandlers,
@@ -68,11 +137,24 @@ describe("wrangler dev", () => {
mockApiToken();
const std = mockConsoleMethods();
afterEach(() => {
- (Dev as Mock).mockClear();
patchConsole(() => {});
msw.resetHandlers();
+ spy.mockClear();
+ setSpy.mockClear();
});
+ async function runWranglerUntilConfig(
+ cmd?: string,
+ env?: Record
+ ): Promise {
+ try {
+ await runWrangler(cmd, env);
+ } catch (e) {
+ console.error(e);
+ }
+ return { ...spy.mock.calls[0][0], input: setSpy.mock.calls[0][0] };
+ }
+
describe("config file support", () => {
it("should support wrangler.toml", async () => {
writeWranglerToml({
@@ -82,10 +164,8 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
+ const options = await runWranglerUntilConfig("dev");
+ expect(options.name).toMatchInlineSnapshot(`"test-worker-toml"`);
});
it("should support wrangler.json", async () => {
@@ -96,10 +176,10 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --experimental-json-config");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
+ const options = await runWranglerUntilConfig(
+ "dev --experimental-json-config"
+ );
+ expect(options.name).toMatchInlineSnapshot(`"test-worker-json"`);
});
it("should support wrangler.jsonc", async () => {
@@ -113,10 +193,10 @@ describe("wrangler dev", () => {
);
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --experimental-json-config");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
+ const options = await runWranglerUntilConfig(
+ "dev --experimental-json-config"
+ );
+ expect(options.name).toMatchInlineSnapshot(`"test-worker-jsonc"`);
});
});
@@ -139,10 +219,8 @@ describe("wrangler dev", () => {
describe("compatibility-date", () => {
it("should not warn if there is no wrangler.toml and no compatibility-date specified", async () => {
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js");
- expect(std.out).toMatchInlineSnapshot(`""`);
+ await runWranglerUntilConfig("dev index.js");
expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
});
it("should warn if there is a wrangler.toml but no compatibility-date", async () => {
@@ -151,7 +229,7 @@ describe("wrangler dev", () => {
compatibility_date: undefined,
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
+ await runWranglerUntilConfig("dev");
const miniflareEntry = require.resolve("miniflare");
const miniflareRequire = module.createRequire(miniflareEntry);
@@ -160,7 +238,6 @@ describe("wrangler dev", () => {
};
const currentDate = miniflareWorkerd.compatibilityDate;
- expect(std.out).toMatchInlineSnapshot(`""`);
expect(std.warn.replaceAll(currentDate, ""))
.toMatchInlineSnapshot(`
"[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mNo compatibility_date was specified. Using the installed Workers runtime's latest supported date: .[0m
@@ -172,7 +249,6 @@ describe("wrangler dev", () => {
"
`);
- expect(std.err).toMatchInlineSnapshot(`""`);
});
it("should not warn if there is a wrangler.toml but compatibility-date is specified at the command line", async () => {
@@ -181,32 +257,8 @@ describe("wrangler dev", () => {
compatibility_date: undefined,
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --compatibility-date=2020-01-01");
- expect(std.out).toMatchInlineSnapshot(`""`);
+ await runWranglerUntilConfig("dev --compatibility-date=2020-01-01");
expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
- });
- });
-
- describe("usage-model", () => {
- it("should read wrangler.toml's usage_model", async () => {
- writeWranglerToml({
- main: "index.js",
- usage_model: "unbound",
- });
- fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].usageModel).toEqual("unbound");
- });
-
- it("should read wrangler.toml's usage_model in local mode", async () => {
- writeWranglerToml({
- main: "index.js",
- usage_model: "unbound",
- });
- fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].usageModel).toEqual("unbound");
});
});
@@ -233,11 +285,8 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].entry.file).toMatch(/index\.js$/);
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.entrypoint).toMatch(/index\.js$/);
});
it("should use `main` from a named environment", async () => {
@@ -249,11 +298,8 @@ describe("wrangler dev", () => {
},
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --env=ENV1");
- expect((Dev as Mock).mock.calls[0][0].entry.file).toMatch(/index\.js$/);
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev --env=ENV1");
+ expect(config.entrypoint).toMatch(/index\.js$/);
});
it("should use `main` from a named environment, rather than the top-level", async () => {
@@ -266,16 +312,13 @@ describe("wrangler dev", () => {
},
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --env=ENV1");
- expect((Dev as Mock).mock.calls[0][0].entry.file).toMatch(/index\.js$/);
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev --env=ENV1");
+ expect(config.entrypoint).toMatch(/index\.js$/);
});
});
describe("routes", () => {
- it("should pass routes to ", async () => {
+ it("should pass routes to emitConfigUpdate", async () => {
fs.writeFileSync("index.js", `export default {};`);
// config.routes
@@ -284,18 +327,21 @@ describe("wrangler dev", () => {
main: "index.js",
routes: ["http://5.some-host.com/some/path/*"],
});
- await runWrangler("dev --remote");
+ const config = await runWranglerUntilConfig("dev --remote");
const devConfig = await expectedHostAndZone(
+ config,
"5.some-host.com",
"some-zone-id-5"
);
- expect(devConfig).toEqual(
- expect.objectContaining({
- routes: ["http://5.some-host.com/some/path/*"],
- })
- );
+ expect(devConfig).toMatchObject({
+ triggers: [
+ {
+ pattern: "http://5.some-host.com/some/path/*",
+ },
+ ],
+ });
});
it("should error if custom domains with paths are passed in but allow paths on normal routes", async () => {
fs.writeFileSync("index.js", `export default {};`);
@@ -390,9 +436,11 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
mockGetZones("some-host.com", [{ id: "some-zone-id" }]);
- await runWrangler("dev --remote --host some-host.com");
+ const config = await runWranglerUntilConfig(
+ "dev --remote --host some-host.com"
+ );
- await expectedHostAndZone("some-host.com", "some-zone-id");
+ await expectedHostAndZone(config, "some-host.com", "some-zone-id");
});
it("should read wrangler.toml's dev.host", async () => {
@@ -404,8 +452,8 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
mockGetZones("some-host.com", [{ id: "some-zone-id" }]);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].host).toEqual("some-host.com");
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.origin?.hostname).toEqual("some-host.com");
});
it("should read --route", async () => {
@@ -414,8 +462,10 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
mockGetZones("some-host.com", [{ id: "some-zone-id" }]);
- await runWrangler("dev --route http://some-host.com/some/path/*");
- await expectedHostAndZone("some-host.com", "some-zone-id");
+ const config = await runWranglerUntilConfig(
+ "dev --route http://some-host.com/some/path/*"
+ );
+ await expectedHostAndZone(config, "some-host.com", "some-zone-id");
});
it("should read wrangler.toml's routes", async () => {
@@ -428,8 +478,8 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
mockGetZones("some-host.com", [{ id: "some-zone-id" }]);
- await runWrangler("dev");
- await expectedHostAndZone("some-host.com", "some-zone-id");
+ const config = await runWranglerUntilConfig("dev");
+ await expectedHostAndZone(config, "some-host.com", "some-zone-id");
});
it("should read wrangler.toml's environment specific routes", async () => {
@@ -450,8 +500,8 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
mockGetZones("some-host.com", [{ id: "some-zone-id" }]);
- await runWrangler("dev --env staging");
- await expectedHostAndZone("some-host.com", "some-zone-id");
+ const config = await runWranglerUntilConfig("dev --env staging");
+ await expectedHostAndZone(config, "some-host.com", "some-zone-id");
});
it("should strip leading `*` from given host when deducing a zone id", async () => {
@@ -461,8 +511,8 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
mockGetZones("some-host.com", [{ id: "some-zone-id" }]);
- await runWrangler("dev");
- await expectedHostAndZone("some-host.com", "some-zone-id");
+ const config = await runWranglerUntilConfig("dev");
+ await expectedHostAndZone(config, "some-host.com", "some-zone-id");
});
it("should strip leading `*.` from given host when deducing a zone id", async () => {
@@ -472,8 +522,8 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
mockGetZones("some-host.com", [{ id: "some-zone-id" }]);
- await runWrangler("dev");
- await expectedHostAndZone("some-host.com", "some-zone-id");
+ const config = await runWranglerUntilConfig("dev");
+ await expectedHostAndZone(config, "some-host.com", "some-zone-id");
});
it("should, when provided, use a configured zone_id", async () => {
@@ -484,9 +534,9 @@ describe("wrangler dev", () => {
],
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --remote");
+ const config = await runWranglerUntilConfig("dev --remote");
- await expectedHostAndZone("some-domain.com", "some-zone-id");
+ await expectedHostAndZone(config, "some-domain.com", "some-zone-id");
});
it("should, when provided, use a zone_name to get a zone_id", async () => {
@@ -501,9 +551,9 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
mockGetZones("some-zone.com", [{ id: "a-zone-id" }]);
- await runWrangler("dev --remote");
+ const config = await runWranglerUntilConfig("dev --remote");
- await expectedHostAndZone("some-zone.com", "a-zone-id");
+ await expectedHostAndZone(config, "some-zone.com", "a-zone-id");
});
it("should find the host from the given pattern, not zone_name", async () => {
@@ -517,9 +567,9 @@ describe("wrangler dev", () => {
],
});
await fs.promises.writeFile("index.js", `export default {};`);
- await runWrangler("dev");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+
+ expect(config.dev.origin?.hostname).toBe("subdomain.exists.com");
});
it("should fail for non-existing zones, when falling back from */*", async () => {
@@ -551,9 +601,13 @@ describe("wrangler dev", () => {
],
});
await fs.promises.writeFile("index.js", `export default {};`);
- await runWrangler("dev");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+
+ expect(config.triggers).toMatchObject([
+ {
+ zone_name: "exists.com",
+ },
+ ]);
});
it("fails when given the pattern */* and no zone_name", async () => {
writeWranglerToml({
@@ -609,77 +663,89 @@ describe("wrangler dev", () => {
})
);
- await runWrangler("dev --remote --host 111.222.333.some-host.com");
+ const config = await runWranglerUntilConfig(
+ "dev --remote --host 111.222.333.some-host.com"
+ );
- await expectedHostAndZone("111.222.333.some-host.com", "some-zone-id");
+ await expectedHostAndZone(
+ config,
+ "111.222.333.some-host.com",
+ "some-zone-id"
+ );
});
- it("should, in order, use args.host/config.dev.host/args.routes/(config.route|config.routes)", async () => {
- // This test might seem like it's testing implementation details, but let's be specific and consider it a spec
+ describe("should, in order, use args.host/config.dev.host/args.routes/(config.route|config.routes)", () => {
+ it("config.routes", async () => {
+ fs.writeFileSync("index.js", `export default {};`);
- fs.writeFileSync("index.js", `export default {};`);
+ mockGetZones("5.some-host.com", [{ id: "some-zone-id-5" }]);
+ writeWranglerToml({
+ main: "index.js",
+ routes: ["http://5.some-host.com/some/path/*"],
+ });
+ const config = await runWranglerUntilConfig("dev --remote");
- // config.routes
- mockGetZones("5.some-host.com", [{ id: "some-zone-id-5" }]);
- writeWranglerToml({
- main: "index.js",
- routes: ["http://5.some-host.com/some/path/*"],
+ await expectedHostAndZone(config, "5.some-host.com", "some-zone-id-5");
});
- await runWrangler("dev --remote");
+ it("config.route", async () => {
+ fs.writeFileSync("index.js", `export default {};`);
- await expectedHostAndZone("5.some-host.com", "some-zone-id-5");
+ mockGetZones("4.some-host.com", [{ id: "some-zone-id-4" }]);
+ writeWranglerToml({
+ main: "index.js",
+ route: "https://4.some-host.com/some/path/*",
+ });
+ const config2 = await runWranglerUntilConfig("dev --remote");
- // config.route
- mockGetZones("4.some-host.com", [{ id: "some-zone-id-4" }]);
- writeWranglerToml({
- main: "index.js",
- route: "https://4.some-host.com/some/path/*",
+ await expectedHostAndZone(config2, "4.some-host.com", "some-zone-id-4");
});
- await runWrangler("dev --remote");
+ it("--routes", async () => {
+ fs.writeFileSync("index.js", `export default {};`);
- await expectedHostAndZone("4.some-host.com", "some-zone-id-4");
+ mockGetZones("3.some-host.com", [{ id: "some-zone-id-3" }]);
+ writeWranglerToml({
+ main: "index.js",
+ route: "https://4.some-host.com/some/path/*",
+ });
+ const config3 = await runWranglerUntilConfig(
+ "dev --remote --routes http://3.some-host.com/some/path/*"
+ );
- // --routes
- mockGetZones("3.some-host.com", [{ id: "some-zone-id-3" }]);
- writeWranglerToml({
- main: "index.js",
- route: "https://4.some-host.com/some/path/*",
+ await expectedHostAndZone(config3, "3.some-host.com", "some-zone-id-3");
});
- await runWrangler(
- "dev --remote --routes http://3.some-host.com/some/path/*"
- );
-
- await expectedHostAndZone("3.some-host.com", "some-zone-id-3");
+ it("config.dev.host", async () => {
+ fs.writeFileSync("index.js", `export default {};`);
- // config.dev.host
- mockGetZones("2.some-host.com", [{ id: "some-zone-id-2" }]);
- writeWranglerToml({
- main: "index.js",
- dev: {
- host: `2.some-host.com`,
- },
- route: "4.some-host.com/some/path/*",
+ mockGetZones("2.some-host.com", [{ id: "some-zone-id-2" }]);
+ writeWranglerToml({
+ main: "index.js",
+ dev: {
+ host: `2.some-host.com`,
+ },
+ route: "4.some-host.com/some/path/*",
+ });
+ const config4 = await runWranglerUntilConfig(
+ "dev --remote --routes http://3.some-host.com/some/path/*"
+ );
+ expect(config4.dev.origin?.hostname).toBe("2.some-host.com");
});
- await runWrangler(
- "dev --remote --routes http://3.some-host.com/some/path/*"
- );
- await expectedHostAndZone("2.some-host.com", "some-zone-id-2");
+ it("host", async () => {
+ fs.writeFileSync("index.js", `export default {};`);
- // --host
- mockGetZones("1.some-host.com", [{ id: "some-zone-id-1" }]);
- writeWranglerToml({
- main: "index.js",
- dev: {
- host: `2.some-host.com`,
- },
- route: "4.some-host.com/some/path/*",
+ mockGetZones("1.some-host.com", [{ id: "some-zone-id-1" }]);
+ writeWranglerToml({
+ main: "index.js",
+ dev: {
+ host: `2.some-host.com`,
+ },
+ route: "4.some-host.com/some/path/*",
+ });
+ const config5 = await runWranglerUntilConfig(
+ "dev --remote --routes http://3.some-host.com/some/path/* --host 1.some-host.com"
+ );
+ await expectedHostAndZone(config5, "1.some-host.com", "some-zone-id-1");
});
- await runWrangler(
- "dev --remote --routes http://3.some-host.com/some/path/* --host 1.some-host.com"
- );
- await expectedHostAndZone("1.some-host.com", "some-zone-id-1");
});
-
it("should error if a host can't resolve to a zone", async () => {
writeWranglerToml({
main: "index.js",
@@ -698,8 +764,9 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --host some-host.com");
- expect((Dev as Mock).mock.calls[0][0].zone).toEqual(undefined);
+ const config = await runWranglerUntilConfig("dev --host some-host.com");
+ // This is testing the _lack_ of the error in the test above: https://github.com/cloudflare/workers-sdk/tree/main/packages/wrangler/src/__tests__/dev.test.tsx#L725-L726
+ expect(config.dev.origin?.hostname).toBe("some-host.com");
});
});
@@ -712,10 +779,8 @@ describe("wrangler dev", () => {
},
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].localUpstream).toEqual(
- "2.some-host.com"
- );
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.origin?.hostname).toEqual("2.some-host.com");
});
it("should use route from toml by default", async () => {
@@ -724,10 +789,8 @@ describe("wrangler dev", () => {
route: "https://4.some-host.com/some/path/*",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].localUpstream).toEqual(
- "4.some-host.com"
- );
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.origin?.hostname).toEqual("4.some-host.com");
});
it("should respect the option when provided", async () => {
@@ -736,10 +799,11 @@ describe("wrangler dev", () => {
route: `2.some-host.com`,
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --local-upstream some-host.com");
- expect((Dev as Mock).mock.calls[0][0].localUpstream).toEqual(
- "some-host.com"
+
+ const config = await runWranglerUntilConfig(
+ "dev --local-upstream some-host.com"
);
+ expect(config.dev.origin?.hostname).toEqual("some-host.com");
});
});
@@ -751,35 +815,37 @@ describe("wrangler dev", () => {
},
});
- await runWrangler("dev index.js");
+ const config = await runWranglerUntilConfig("dev index.js");
expect(fs.readFileSync("index.js", "utf-8")).toMatchInlineSnapshot(
`"export default { fetch(){ return new Response(123) } }"`
);
// and the command would pass through
- expect((Dev as Mock).mock.calls[0][0].build).toEqual({
+ expect(config.build.custom).toEqual({
command:
"node -e \"4+4; require('fs').writeFileSync('index.js', 'export default { fetch(){ return new Response(123) } }')\"",
- cwd: undefined,
- watch_dir: "src",
+ workingDirectory: undefined,
+ watch: "src",
});
expect(std.out).toMatchInlineSnapshot(
- `"Running custom build: node -e \\"4+4; require('fs').writeFileSync('index.js', 'export default { fetch(){ return new Response(123) } }')\\""`
+ `
+ "Running custom build: node -e \\"4+4; require('fs').writeFileSync('index.js', 'export default { fetch(){ return new Response(123) } }')\\"
+ "
+ `
);
- expect(std.err).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
});
- if (process.platform !== "win32") {
- it("should run a custom build of multiple steps combined by && before starting `dev`", async () => {
+ it.skipIf(process.platform === "win32")(
+ "should run a custom build of multiple steps combined by && before starting `dev`",
+ async () => {
writeWranglerToml({
build: {
command: `echo "export default { fetch(){ return new Response(123) } }" > index.js`,
},
});
- await runWrangler("dev index.js");
+ await runWranglerUntilConfig("dev index.js");
expect(fs.readFileSync("index.js", "utf-8")).toMatchInlineSnapshot(`
"export default { fetch(){ return new Response(123) } }
@@ -787,12 +853,13 @@ describe("wrangler dev", () => {
`);
expect(std.out).toMatchInlineSnapshot(
- `"Running custom build: echo \\"export default { fetch(){ return new Response(123) } }\\" > index.js"`
+ `
+ "Running custom build: echo \\"export default { fetch(){ return new Response(123) } }\\" > index.js
+ "
+ `
);
- expect(std.err).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- });
- }
+ }
+ );
it("should throw an error if the entry doesn't exist after the build finishes", async () => {
writeWranglerToml({
@@ -845,12 +912,12 @@ describe("wrangler dev", () => {
});
it("should load environment variables from `.env`", async () => {
- await runWrangler("dev");
+ await runWranglerUntilConfig("dev");
const output = fs.readFileSync("var.txt", "utf8");
expect(output).toMatch("default");
});
it("should prefer to load environment variables from `.env.` if `--env ` is set", async () => {
- await runWrangler("dev --env custom");
+ await runWranglerUntilConfig("dev --env custom");
const output = fs.readFileSync("var.txt", "utf8");
expect(output).toMatch("custom");
});
@@ -863,11 +930,8 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --remote");
- expect((Dev as Mock).mock.calls[0][0].upstreamProtocol).toEqual("https");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev --remote");
+ expect(config.dev.origin?.secure).toEqual(true);
});
it("should warn if `--upstream-protocol=http` is used in remote mode", async () => {
@@ -875,9 +939,10 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --upstream-protocol=http --remote");
- expect((Dev as Mock).mock.calls[0][0].upstreamProtocol).toEqual("http");
- expect(std.out).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig(
+ "dev --upstream-protocol=http --remote"
+ );
+ expect(config.dev.origin?.secure).toEqual(false);
expect(std.warn).toMatchInlineSnapshot(`
"[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mSetting upstream-protocol to http is not currently supported for remote mode.[0m
@@ -886,7 +951,6 @@ describe("wrangler dev", () => {
"
`);
- expect(std.err).toMatchInlineSnapshot(`""`);
});
it("should default upstream-protocol to local-protocol if local mode", async () => {
@@ -894,11 +958,8 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --local-protocol=https");
- expect((Dev as Mock).mock.calls[0][0].upstreamProtocol).toEqual("https");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev --local-protocol=https");
+ expect(config.dev.origin?.secure).toEqual(true);
});
it("should default upstream-protocol to http if no local-protocol in local mode", async () => {
@@ -906,11 +967,8 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].upstreamProtocol).toEqual("http");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.origin?.secure).toEqual(false);
});
});
@@ -920,11 +978,8 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].localProtocol).toEqual("http");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.server?.secure).toEqual(false);
});
it("should use `local_protocol` from `wrangler.toml`, if available", async () => {
@@ -935,11 +990,8 @@ describe("wrangler dev", () => {
},
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].localProtocol).toEqual("https");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.server?.secure).toEqual(true);
});
it("should use --local-protocol command line arg, if provided", async () => {
@@ -952,11 +1004,8 @@ describe("wrangler dev", () => {
},
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --local-protocol=http");
- expect((Dev as Mock).mock.calls[0][0].localProtocol).toEqual("http");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev --local-protocol=http");
+ expect(config.dev.server?.secure).toEqual(false);
});
});
@@ -966,13 +1015,10 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].initialIp).toEqual(
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.server?.hostname).toEqual(
process.platform === "win32" ? "127.0.0.1" : "localhost"
);
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
});
it("should use to `ip` from `wrangler.toml`, if available", async () => {
@@ -983,11 +1029,8 @@ describe("wrangler dev", () => {
},
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].initialIp).toEqual("::1");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.server?.hostname).toEqual("::1");
});
it("should use --ip command line arg, if provided", async () => {
@@ -998,11 +1041,8 @@ describe("wrangler dev", () => {
},
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --ip=127.0.0.1");
- expect((Dev as Mock).mock.calls[0][0].initialIp).toEqual("127.0.0.1");
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev --ip=127.0.0.1");
+ expect(config.dev.server?.hostname).toEqual("127.0.0.1");
});
});
@@ -1013,17 +1053,8 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].inspectorPort).toEqual(9229);
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "",
- "warn": "",
- }
- `);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.inspector?.port).toEqual(9229);
});
it("should read --inspector-port", async () => {
@@ -1032,17 +1063,8 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --inspector-port=9999");
- expect((Dev as Mock).mock.calls[0][0].inspectorPort).toEqual(9999);
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "",
- "warn": "",
- }
- `);
+ const config = await runWranglerUntilConfig("dev --inspector-port=9999");
+ expect(config.dev.inspector?.port).toEqual(9999);
});
it("should read dev.inspector_port from wrangler.toml", async () => {
@@ -1053,17 +1075,8 @@ describe("wrangler dev", () => {
},
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].inspectorPort).toEqual(9999);
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "",
- "warn": "",
- }
- `);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.inspector?.port).toEqual(9999);
});
it("should error if a bad dev.inspector_port config is provided", async () => {
@@ -1089,11 +1102,8 @@ describe("wrangler dev", () => {
main: "index.js",
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].initialPort).toEqual(8787);
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.server?.port).toEqual(8787);
});
it("should use `port` from `wrangler.toml`, if available", async () => {
@@ -1105,13 +1115,10 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
// Mock `getPort()` to resolve to a completely different port.
- (getPort as Mock).mockResolvedValue(98765);
+ (getPort as Mock).mockResolvedValueOnce(98765);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].initialPort).toEqual(8888);
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.server?.port).toEqual(8888);
});
it("should error if a bad dev.port config is provided", async () => {
@@ -1139,13 +1146,10 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
// Mock `getPort()` to resolve to a completely different port.
- (getPort as Mock).mockResolvedValue(98765);
+ (getPort as Mock).mockResolvedValueOnce(98766);
- await runWrangler("dev --port=9999");
- expect((Dev as Mock).mock.calls[0][0].initialPort).toEqual(9999);
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev --port=9999");
+ expect(config.dev.server?.port).toEqual(9999);
});
it("should use a different port to the default if it is in use", async () => {
@@ -1154,13 +1158,10 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
// Mock `getPort()` to resolve to a completely different port.
- (getPort as Mock).mockResolvedValue(98765);
+ (getPort as Mock).mockResolvedValueOnce(98767);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].initialPort).toEqual(98765);
- expect(std.out).toMatchInlineSnapshot(`""`);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.server?.port).toEqual(98767);
});
});
@@ -1186,47 +1187,47 @@ describe("wrangler dev", () => {
},
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].initialIp).toEqual(
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.dev.server?.hostname).toEqual(
process.platform === "win32" ? "127.0.0.1" : "localhost"
);
expect(std.out).toMatchInlineSnapshot(`
- "Your worker has access to the following bindings:
- - Durable Objects:
- - NAME_1: CLASS_1
- - NAME_2: CLASS_2 (defined in SCRIPT_A)
- - NAME_3: CLASS_3
- - NAME_4: CLASS_4 (defined in SCRIPT_B)"
- `);
+ "Your worker has access to the following bindings:
+ - Durable Objects:
+ - NAME_1: CLASS_1
+ - NAME_2: CLASS_2 (defined in SCRIPT_A)
+ - NAME_3: CLASS_3
+ - NAME_4: CLASS_4 (defined in SCRIPT_B)
+ "
+ `);
expect(std.warn).toMatchInlineSnapshot(`
- "[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mProcessing wrangler.toml configuration:[0m
+ "[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mProcessing wrangler.toml configuration:[0m
- - In wrangler.toml, you have configured [durable_objects] exported by this Worker (CLASS_1,
- CLASS_3), but no [migrations] for them. This may not work as expected until you add a [migrations]
- section to your wrangler.toml. Add this configuration to your wrangler.toml:
+ - In wrangler.toml, you have configured [durable_objects] exported by this Worker (CLASS_1,
+ CLASS_3), but no [migrations] for them. This may not work as expected until you add a [migrations]
+ section to your wrangler.toml. Add this configuration to your wrangler.toml:
- \`\`\`
- [[migrations]]
- tag = \\"v1\\" # Should be unique for each entry
- new_classes = [\\"CLASS_1\\", \\"CLASS_3\\"]
- \`\`\`
+ \`\`\`
+ [[migrations]]
+ tag = \\"v1\\" # Should be unique for each entry
+ new_classes = [\\"CLASS_1\\", \\"CLASS_3\\"]
+ \`\`\`
- Refer to
- [4mhttps://developers.cloudflare.com/durable-objects/reference/durable-objects-migrations/[0m for more
- details.
+ Refer to
+ [4mhttps://developers.cloudflare.com/durable-objects/reference/durable-objects-migrations/[0m for more
+ details.
- [33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mWARNING: You have Durable Object bindings that are not defined locally in the worker being developed.[0m
+ [33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mWARNING: You have Durable Object bindings that are not defined locally in the worker being developed.[0m
- Be aware that changes to the data stored in these Durable Objects will be permanent and affect the
- live instances.
- Remote Durable Objects that are affected:
- - {\\"name\\":\\"NAME_2\\",\\"class_name\\":\\"CLASS_2\\",\\"script_name\\":\\"SCRIPT_A\\"}
- - {\\"name\\":\\"NAME_4\\",\\"class_name\\":\\"CLASS_4\\",\\"script_name\\":\\"SCRIPT_B\\"}
+ Be aware that changes to the data stored in these Durable Objects will be permanent and affect the
+ live instances.
+ Remote Durable Objects that are affected:
+ - {\\"name\\":\\"NAME_2\\",\\"class_name\\":\\"CLASS_2\\",\\"script_name\\":\\"SCRIPT_A\\"}
+ - {\\"name\\":\\"NAME_4\\",\\"class_name\\":\\"CLASS_4\\",\\"script_name\\":\\"SCRIPT_B\\"}
- "
- `);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ "
+ `);
});
});
@@ -1258,9 +1259,17 @@ describe("wrangler dev", () => {
UNQUOTED: "original unquoted",
},
});
- await runWrangler("dev");
- const varBindings: Record = (Dev as Mock).mock
- .calls[0][0].bindings.vars;
+ const config = await runWranglerUntilConfig("dev");
+ const varBindings: Record = Object.fromEntries(
+ Object.entries(config.bindings ?? {})
+ .filter(
+ (
+ binding
+ ): binding is [string, Extract] =>
+ binding[1].type === "plain_text"
+ )
+ .map(([b, v]) => [b, v.value])
+ );
expect(varBindings).toEqual({
VAR_1: "var #1 value",
@@ -1272,19 +1281,18 @@ describe("wrangler dev", () => {
UNQUOTED: "unquoted value", // Note that whitespace is trimmed
});
expect(std.out).toMatchInlineSnapshot(`
- "Using vars defined in .dev.vars
- Your worker has access to the following bindings:
- - Vars:
- - VAR_1: \\"(hidden)\\"
- - VAR_2: \\"original value 2\\"
- - VAR_3: \\"(hidden)\\"
- - VAR_MULTI_LINE_1: \\"(hidden)\\"
- - VAR_MULTI_LINE_2: \\"(hidden)\\"
- - EMPTY: \\"(hidden)\\"
- - UNQUOTED: \\"(hidden)\\""
- `);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ "Using vars defined in .dev.vars
+ Your worker has access to the following bindings:
+ - Vars:
+ - VAR_1: \\"(hidden)\\"
+ - VAR_2: \\"original value 2\\"
+ - VAR_3: \\"(hidden)\\"
+ - VAR_MULTI_LINE_1: \\"(hidden)\\"
+ - VAR_MULTI_LINE_2: \\"(hidden)\\"
+ - EMPTY: \\"(hidden)\\"
+ - UNQUOTED: \\"(hidden)\\"
+ "
+ `);
});
it("should prefer `.dev.vars.` if `--env set`", async () => {
@@ -1293,19 +1301,26 @@ describe("wrangler dev", () => {
fs.writeFileSync(".dev.vars.custom", "CUSTOM_VAR=custom");
writeWranglerToml({ main: "index.js", env: { custom: {} } });
- await runWrangler("dev --env custom");
- const varBindings: Record = (Dev as Mock).mock
- .calls[0][0].bindings.vars;
+ const config = await runWranglerUntilConfig("dev --env custom");
+ const varBindings: Record = Object.fromEntries(
+ Object.entries(config.bindings ?? {})
+ .filter(
+ (
+ binding
+ ): binding is [string, Extract] =>
+ binding[1].type === "plain_text"
+ )
+ .map(([b, v]) => [b, v.value])
+ );
expect(varBindings).toEqual({ CUSTOM_VAR: "custom" });
expect(std.out).toMatchInlineSnapshot(`
- "Using vars defined in .dev.vars.custom
- Your worker has access to the following bindings:
- - Vars:
- - CUSTOM_VAR: \\"(hidden)\\""
- `);
- expect(std.warn).toMatchInlineSnapshot(`""`);
- expect(std.err).toMatchInlineSnapshot(`""`);
+ "Using vars defined in .dev.vars.custom
+ Your worker has access to the following bindings:
+ - Vars:
+ - CUSTOM_VAR: \\"(hidden)\\"
+ "
+ `);
});
});
@@ -1370,7 +1385,7 @@ describe("wrangler dev", () => {
--test-scheduled Test scheduled events by visiting /__scheduled in browser [boolean] [default: false]
--log-level Specify logging level [choices: \\"debug\\", \\"info\\", \\"log\\", \\"warn\\", \\"error\\", \\"none\\"] [default: \\"log\\"]
--show-interactive-dev-session Show interactive dev session (defaults to true if the terminal supports interactivity) [boolean]
- --experimental-dev-env, --x-dev-env Use the experimental DevEnv instantiation (unified across wrangler dev and unstable_dev) [boolean] [default: false]
+ --experimental-dev-env, --x-dev-env Use the experimental DevEnv instantiation (unified across wrangler dev and unstable_dev) [boolean] [default: true]
--experimental-registry, --x-registry Use the experimental file based dev registry for multi-worker development [boolean] [default: false]",
"warn": "",
}
@@ -1433,20 +1448,34 @@ describe("wrangler dev", () => {
);
});
- it("should indicate whether Sites is being used", async () => {
- writeWranglerToml({
- main: "index.js",
- });
- fs.writeFileSync("index.js", `export default {};`);
+ describe("should indicate whether Sites is being used", () => {
+ it("no use", async () => {
+ writeWranglerToml({
+ main: "index.js",
+ });
+ fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
- expect((Dev as Mock).mock.calls[0][0].isWorkersSite).toEqual(false);
+ const config = await runWranglerUntilConfig("dev");
+ expect(config.legacy.site).toBeFalsy();
+ });
+ it("--site arg", async () => {
+ writeWranglerToml({
+ main: "index.js",
+ });
+ fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --site abc");
- expect((Dev as Mock).mock.calls[1][0].isWorkersSite).toEqual(true);
+ const config = await runWranglerUntilConfig("dev --site abc");
+ expect(config.legacy.site).toBeTruthy();
+ });
+ it("--legacy-assets arg", async () => {
+ writeWranglerToml({
+ main: "index.js",
+ });
+ fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev --legacy-assets abc");
- expect((Dev as Mock).mock.calls[2][0].isWorkersSite).toEqual(false);
+ const config = await runWranglerUntilConfig("dev --legacy-assets abc");
+ expect(config.legacy.site).toBeFalsy();
+ });
});
it("should warn if --legacy-assets is used", async () => {
@@ -1455,7 +1484,7 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler('dev --legacy-assets "./assets"');
+ await runWranglerUntilConfig('dev --legacy-assets "./assets"');
expect(std.warn).toMatchInlineSnapshot(`
"[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mThe --legacy-assets argument has been deprecated. Please use --assets instead.[0m
@@ -1474,7 +1503,7 @@ describe("wrangler dev", () => {
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev");
+ await runWranglerUntilConfig("dev");
expect(std.warn).toMatchInlineSnapshot(`
"[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mProcessing wrangler.toml configuration:[0m
@@ -1493,7 +1522,7 @@ describe("wrangler dev", () => {
assets: { directory: "assets" },
});
- await runWrangler("dev");
+ await runWranglerUntilConfig("dev");
});
it("should error if config.site and config.assets are used together", async () => {
@@ -1506,6 +1535,8 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
fs.mkdirSync("assets");
+ fs.mkdirSync("xyz");
+
await expect(
runWrangler("dev")
).rejects.toThrowErrorMatchingInlineSnapshot(
@@ -1525,6 +1556,8 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
fs.mkdirSync("assets");
+ fs.mkdirSync("xyz");
+
await expect(
runWrangler("dev --assets assets")
).rejects.toThrowErrorMatchingInlineSnapshot(
@@ -1549,6 +1582,8 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
fs.mkdirSync("assets");
+ fs.mkdirSync("xyz");
+
await expect(
runWrangler("dev")
).rejects.toThrowErrorMatchingInlineSnapshot(
@@ -1585,6 +1620,7 @@ describe("wrangler dev", () => {
});
fs.writeFileSync("index.js", `export default {};`);
fs.mkdirSync("assets");
+ fs.mkdirSync("xyz");
await expect(
runWrangler("dev --assets assets")
).rejects.toThrowErrorMatchingInlineSnapshot(
@@ -1618,6 +1654,7 @@ describe("wrangler dev", () => {
writeWranglerToml({
assets: { directory: "assets", binding: "ASSETS" },
});
+ fs.mkdirSync("assets");
await expect(
runWrangler("dev")
).rejects.toThrowErrorMatchingInlineSnapshot(
@@ -1655,31 +1692,6 @@ describe("wrangler dev", () => {
);
});
- it("should error if --assets and config.tail_consumers are used together", async () => {
- writeWranglerToml({
- tail_consumers: [{ service: "" }],
- });
- fs.mkdirSync("public");
- await expect(
- runWrangler("dev --assets public")
- ).rejects.toThrowErrorMatchingInlineSnapshot(
- `[Error: Cannot use assets and tail consumers in the same Worker. Tail Workers are not yet supported for Workers with assets.]`
- );
- });
-
- it("should error if config.assets and config.tail_consumers are used together", async () => {
- writeWranglerToml({
- assets: { directory: "./public" },
- tail_consumers: [{ service: "" }],
- });
- fs.mkdirSync("public");
- await expect(
- runWrangler("dev")
- ).rejects.toThrowErrorMatchingInlineSnapshot(
- `[Error: Cannot use assets and tail consumers in the same Worker. Tail Workers are not yet supported for Workers with assets.]`
- );
- });
-
it("should error if --assets and --remote are used together", async () => {
fs.mkdirSync("public");
await expect(
@@ -1705,114 +1717,47 @@ describe("wrangler dev", () => {
describe("--inspect", () => {
it("should warn if --inspect is used", async () => {
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js --inspect");
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "",
- "warn": "[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mPassing --inspect is unnecessary, now you can always connect to devtools.[0m
-
- ",
- }
- `);
- });
-
- it("should default to true, without a warning", async () => {
- fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js");
- expect((Dev as Mock).mock.calls[0][0].inspect).toEqual(true);
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "",
- "warn": "",
- }
- `);
- });
-
- it("should pass true, with a warning", async () => {
- fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js --inspect");
- expect((Dev as Mock).mock.calls[0][0].inspect).toEqual(true);
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "",
- "warn": "[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mPassing --inspect is unnecessary, now you can always connect to devtools.[0m
-
- ",
- }
- `);
- });
+ await runWranglerUntilConfig("dev index.js --inspect");
+ expect(std.warn).toMatchInlineSnapshot(`
+ "[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mPassing --inspect is unnecessary, now you can always connect to devtools.[0m
- it("should pass false, without a warning", async () => {
- fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js --inspect false");
- expect((Dev as Mock).mock.calls[0][0].inspect).toEqual(false);
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "",
- "warn": "",
- }
- `);
+ "
+ `);
});
});
describe("--log-level", () => {
it("should not output warnings with log-level 'none'", async () => {
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js --inspect --log-level none");
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "",
- "warn": "",
- }
- `);
+ await runWranglerUntilConfig("dev index.js --inspect --log-level none");
+ expect(std.warn).toMatchInlineSnapshot(`""`);
});
it("should output warnings with log-level 'warn'", async () => {
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js --inspect --log-level warn");
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "",
- "warn": "[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mPassing --inspect is unnecessary, now you can always connect to devtools.[0m
-
- ",
- }
- `);
+ await runWranglerUntilConfig("dev index.js --inspect --log-level warn");
+ expect(std.warn).toMatchInlineSnapshot(`
+ "[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mPassing --inspect is unnecessary, now you can always connect to devtools.[0m
+
+ "
+ `);
});
});
describe("--show-interactive-dev-session", () => {
it("should show interactive dev session with --show-interactive-dev-session", async () => {
fs.writeFileSync("index.js", `export default { }`);
- await runWrangler("dev index.js --show-interactive-dev-session");
- expect(
- (Dev as Mock).mock.calls[0][0].showInteractiveDevSession
- ).toBeTruthy();
+ await runWranglerUntilConfig(
+ "dev index.js --show-interactive-dev-session"
+ );
+ expect(vi.mocked(registerDevHotKeys).mock.calls.length).toBe(1);
});
it("should not show interactive dev session with --show-interactive-dev-session=false", async () => {
fs.writeFileSync("index.js", `export default { }`);
- await runWrangler("dev index.js --show-interactive-dev-session=false");
- expect(
- (Dev as Mock).mock.calls[0][0].showInteractiveDevSession
- ).toBeFalsy();
+ await runWranglerUntilConfig(
+ "dev index.js --show-interactive-dev-session=false"
+ );
+ expect(vi.mocked(registerDevHotKeys).mock.calls.length).toBe(0);
});
});
@@ -1825,19 +1770,19 @@ describe("wrangler dev", () => {
],
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js");
+ await runWranglerUntilConfig("dev index.js");
expect(std.out).toMatchInlineSnapshot(`
- "Your worker has access to the following bindings:
- - Services:
- - WorkerA: A
- - WorkerB: B - staging"
- `);
+ "Your worker has access to the following bindings:
+ - Services:
+ - WorkerA: A
+ - WorkerB: B - staging
+ "
+ `);
expect(std.warn).toMatchInlineSnapshot(`
"[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mThis worker is bound to live services: WorkerA (A), WorkerB (B@staging)[0m
"
`);
- expect(std.err).toMatchInlineSnapshot(`""`);
});
});
@@ -1850,21 +1795,19 @@ describe("wrangler dev", () => {
],
});
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js");
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "Your worker has access to the following bindings:
- - Services:
- - WorkerA: A
- - WorkerB: B - staging",
- "warn": "[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mThis worker is bound to live services: WorkerA (A), WorkerB (B@staging)[0m
-
- ",
- }
- `);
+ await runWranglerUntilConfig("dev index.js");
+ expect(std.out).toMatchInlineSnapshot(`
+ "Your worker has access to the following bindings:
+ - Services:
+ - WorkerA: A
+ - WorkerB: B - staging
+ "
+ `);
+ expect(std.warn).toMatchInlineSnapshot(`
+ "[33m▲ [43;33m[[43;30mWARNING[43;33m][0m [1mThis worker is bound to live services: WorkerA (A), WorkerB (B@staging)[0m
+
+ "
+ `);
});
it("should mask vars that were overriden in .dev.vars", async () => {
@@ -1882,21 +1825,16 @@ describe("wrangler dev", () => {
`
);
fs.writeFileSync("index.js", `export default {};`);
- await runWrangler("dev index.js");
- expect(std).toMatchInlineSnapshot(`
- Object {
- "debug": "",
- "err": "",
- "info": "",
- "out": "Using vars defined in .dev.vars
- Your worker has access to the following bindings:
- - Vars:
- - variable: 123
- - overriden: \\"(hidden)\\"
- - SECRET: \\"(hidden)\\"",
- "warn": "",
- }
- `);
+ await runWranglerUntilConfig("dev index.js");
+ expect(std.out).toMatchInlineSnapshot(`
+ "Using vars defined in .dev.vars
+ Your worker has access to the following bindings:
+ - Vars:
+ - variable: 123
+ - overriden: \\"(hidden)\\"
+ - SECRET: \\"(hidden)\\"
+ "
+ `);
});
});
diff --git a/packages/wrangler/src/api/startDevWorker/ConfigController.ts b/packages/wrangler/src/api/startDevWorker/ConfigController.ts
index 2ade248ec5dc..4b321a593365 100644
--- a/packages/wrangler/src/api/startDevWorker/ConfigController.ts
+++ b/packages/wrangler/src/api/startDevWorker/ConfigController.ts
@@ -9,7 +9,7 @@ import {
getScriptName,
isLegacyEnv,
} from "../..";
-import { processAssetsArg } from "../../assets";
+import { processAssetsArg, validateAssetsArgsAndConfig } from "../../assets";
import { printBindings, readConfig } from "../../config";
import { getEntry } from "../../deployment-bundle/entry";
import {
@@ -25,7 +25,9 @@ import { UserError } from "../../errors";
import { logger } from "../../logger";
import { getAccountId, requireApiToken } from "../../user";
import { memoizeGetPort } from "../../utils/memoizeGetPort";
+import { getZoneIdForPreview } from "../../zones";
import { Controller } from "./BaseController";
+import { castErrorCause } from "./events";
import {
convertCfWorkerInitBindingstoBindings,
extractBindingsOfType,
@@ -52,6 +54,15 @@ async function resolveDevConfig(
config: Config,
input: StartDevWorkerInput
): Promise {
+ const auth =
+ input.dev?.auth ??
+ (async () => {
+ return {
+ accountId: await getAccountId(),
+ apiToken: requireApiToken(),
+ };
+ });
+
const localPersistencePath = getLocalPersistencePath(
input.dev?.persist,
config.configPath
@@ -63,24 +74,29 @@ async function resolveDevConfig(
routes: input.triggers?.filter(
(t): t is Extract => t.type === "route"
),
- assets: input?.assets?.directory,
+ assets: input?.assets,
},
config
);
+ // TODO: Remove this hack once the React flow is removed
+ // This function throws if the zone ID can't be found given the provided host and routes
+ // However, it's called as part of initialising a preview session, which is nested deep within
+ // React/Ink and useEffect()s in `--no-x-dev-env` mode which swallow the error and turn it into a logged warning.
+ // Because it's a non-recoverable user error, we want it to exit the Wrangler process early to allow the user to fix it.
+ // Calling it here forces the error to be thrown where it will correctly exit the Wrangler process.
+ if (input.dev?.remote) {
+ const { accountId } = await unwrapHook(auth);
+ assert(accountId, "Account ID must be provided for remote dev");
+ await getZoneIdForPreview({ host, routes, accountId });
+ }
+
const initialIp = input.dev?.server?.hostname ?? config.dev.ip;
const initialIpListenCheck = initialIp === "*" ? "0.0.0.0" : initialIp;
return {
- auth:
- input.dev?.auth ??
- (async () => {
- return {
- accountId: await getAccountId(),
- apiToken: requireApiToken(),
- };
- }),
+ auth,
remote: input.dev?.remote,
server: {
hostname: input.dev?.server?.hostname || config.dev.ip,
@@ -89,7 +105,7 @@ async function resolveDevConfig(
config.dev.port ??
(await getLocalPort(initialIpListenCheck)),
secure:
- input.dev?.server?.secure || config.dev.local_protocol === "https",
+ input.dev?.server?.secure ?? config.dev.local_protocol === "https",
httpsKeyPath: input.dev?.server?.httpsKeyPath,
httpsCertPath: input.dev?.server?.httpsCertPath,
},
@@ -101,7 +117,7 @@ async function resolveDevConfig(
},
origin: {
secure:
- input.dev?.origin?.secure || config.dev.upstream_protocol === "https",
+ input.dev?.origin?.secure ?? config.dev.upstream_protocol === "https",
hostname: host ?? getInferredHost(routes),
},
liveReload: input.dev?.liveReload || false,
@@ -165,7 +181,7 @@ async function resolveTriggers(
routes: input.triggers?.filter(
(t): t is Extract => t.type === "route"
),
- assets: input?.assets?.directory,
+ assets: input?.assets,
},
config
);
@@ -212,7 +228,7 @@ async function resolveConfig(
// getEntry only needs to know if assets was specified.
// The actualy value is not relevant here, which is why not passing
// the entire Assets object is fine.
- assets: input?.assets?.directory,
+ assets: input?.assets,
},
config,
"dev"
@@ -224,7 +240,7 @@ async function resolveConfig(
const assetsOptions = processAssetsArg(
{
- assets: input?.assets?.directory,
+ assets: input?.assets,
script: input.entrypoint,
},
config
@@ -280,10 +296,18 @@ async function resolveConfig(
if (resolved.legacy.legacyAssets && resolved.legacy.site) {
throw new UserError(
- "Cannot use Assets and Workers Sites in the same Worker."
+ "Cannot use legacy assets and Workers Sites in the same Worker."
+ );
+ }
+
+ if (resolved.assets && resolved.dev.remote) {
+ throw new UserError(
+ "Cannot use assets in remote mode. Workers with assets are only supported in local mode. Please use `wrangler dev`."
);
}
+ validateAssetsArgsAndConfig(resolved);
+
const services = extractBindingsOfType("service", resolved.bindings);
if (services && services.length > 0) {
logger.warn(
@@ -372,8 +396,8 @@ export class ConfigController extends Controller {
});
}
}
- public set(input: StartDevWorkerInput) {
- return this.#updateConfig(input);
+ public set(input: StartDevWorkerInput, throwErrors = false) {
+ return this.#updateConfig(input, throwErrors);
}
public patch(input: Partial) {
assert(
@@ -389,12 +413,11 @@ export class ConfigController extends Controller {
return this.#updateConfig(config);
}
- async #updateConfig(input: StartDevWorkerInput) {
+ async #updateConfig(input: StartDevWorkerInput, throwErrors = false) {
this.#abortController?.abort();
this.#abortController = new AbortController();
const signal = this.#abortController.signal;
this.latestInput = input;
-
try {
const fileConfig = readConfig(input.config, {
env: input.env,
@@ -415,13 +438,12 @@ export class ConfigController extends Controller {
: "http",
});
- void this.#ensureWatchingConfig(fileConfig.configPath);
+ if (typeof vitest === "undefined") {
+ void this.#ensureWatchingConfig(fileConfig.configPath);
+ }
- const assets = processAssetsArg(
- { assets: input?.assets?.directory },
- fileConfig
- );
- if (assets) {
+ const assets = processAssetsArg({ assets: input?.assets }, fileConfig);
+ if (assets && typeof vitest === "undefined") {
void this.#ensureWatchingAssets(assets.directory);
}
@@ -431,9 +453,20 @@ export class ConfigController extends Controller {
}
this.latestConfig = resolvedConfig;
this.emitConfigUpdateEvent(resolvedConfig);
+
return this.latestConfig;
} catch (err) {
- logger.error(err);
+ if (throwErrors) {
+ throw err;
+ } else {
+ this.emitErrorEvent({
+ type: "error",
+ reason: "Error resolving config",
+ cause: castErrorCause(err),
+ source: "ConfigController",
+ data: undefined,
+ });
+ }
}
}
diff --git a/packages/wrangler/src/api/startDevWorker/DevEnv.ts b/packages/wrangler/src/api/startDevWorker/DevEnv.ts
index 66463bd1471b..500b3117f112 100644
--- a/packages/wrangler/src/api/startDevWorker/DevEnv.ts
+++ b/packages/wrangler/src/api/startDevWorker/DevEnv.ts
@@ -1,6 +1,7 @@
import assert from "node:assert";
import { EventEmitter } from "node:events";
import { logger } from "../../logger";
+import { formatMessage, ParseError } from "../../parse";
import { BundlerController } from "./BundlerController";
import { ConfigController } from "./ConfigController";
import { LocalRuntimeController } from "./LocalRuntimeController";
@@ -46,12 +47,7 @@ export class DevEnv extends EventEmitter {
});
this.on("error", (event: ErrorEvent) => {
- // TODO: when we're are comfortable with StartDevWorker/DevEnv stability,
- // we can remove this handler and let the user handle the unknowable errors
- // or let the process crash. For now, log them to stderr
- // so we can identify knowable vs unknowable error candidates
-
- logger.error(`Error in ${event.source}: ${event.reason}\n`, event.cause);
+ logger.debug(`Error in ${event.source}: ${event.reason}\n`, event.cause);
logger.debug("=> Error contextual data:", event.data);
});
@@ -145,6 +141,14 @@ export class DevEnv extends EventEmitter {
logger.debug(`Error in ${ev.source}: ${ev.reason}\n`, ev.cause);
logger.debug("=> Error contextual data:", ev.data);
}
+ // Parse errors are recoverable by changing your `wrangler.toml` and saving
+ // All other errors from the ConfigController are non-recoverable
+ else if (
+ ev.source === "ConfigController" &&
+ ev.cause instanceof ParseError
+ ) {
+ logger.log(formatMessage(ev.cause));
+ }
// if other knowable + recoverable errors occur, handle them here
else {
// otherwise, re-emit the unknowable errors to the top-level
diff --git a/packages/wrangler/src/api/startDevWorker/RemoteRuntimeController.ts b/packages/wrangler/src/api/startDevWorker/RemoteRuntimeController.ts
index a4509136bcff..7ab4794ad80e 100644
--- a/packages/wrangler/src/api/startDevWorker/RemoteRuntimeController.ts
+++ b/packages/wrangler/src/api/startDevWorker/RemoteRuntimeController.ts
@@ -86,6 +86,7 @@ export class RemoteRuntimeController extends RuntimeController {
legacyEnv: props.legacyEnv,
host: props.host,
routes: props.routes,
+ sendMetrics: props.sendMetrics,
}
);
if (!this.#session) {
@@ -155,6 +156,7 @@ export class RemoteRuntimeController extends RuntimeController {
legacyEnv: !config.legacy?.enableServiceEnvironments, // wrangler environment -- just pass it through for now
host: config.dev.origin?.hostname,
routes,
+ sendMetrics: config.sendMetrics,
});
const bindings = (
@@ -183,6 +185,8 @@ export class RemoteRuntimeController extends RuntimeController {
compatibilityDate: config.compatibilityDate,
compatibilityFlags: config.compatibilityFlags,
routes,
+ host: config.dev.origin?.hostname,
+ sendMetrics: config.sendMetrics,
});
// If we received a new `bundleComplete` event before we were able to
diff --git a/packages/wrangler/src/api/startDevWorker/types.ts b/packages/wrangler/src/api/startDevWorker/types.ts
index db760a95627f..71ec5526e8f3 100644
--- a/packages/wrangler/src/api/startDevWorker/types.ts
+++ b/packages/wrangler/src/api/startDevWorker/types.ts
@@ -165,10 +165,10 @@ export interface StartDevWorkerInput {
enableServiceEnvironments?: boolean;
};
unsafe?: Omit;
- assets?: Omit;
+ assets?: string;
}
-export type StartDevWorkerOptions = StartDevWorkerInput & {
+export type StartDevWorkerOptions = Omit & {
/** A worker's directory. Usually where the wrangler.toml file is located */
directory: string;
build: StartDevWorkerInput["build"] & {
@@ -189,6 +189,7 @@ export type StartDevWorkerOptions = StartDevWorkerInput & {
persist: string;
};
entrypoint: string;
+ assets?: AssetsOptions;
};
export type HookValues = string | number | boolean | object | undefined | null;
diff --git a/packages/wrangler/src/assets.ts b/packages/wrangler/src/assets.ts
index 09f3b9a58e15..370b2b524936 100644
--- a/packages/wrangler/src/assets.ts
+++ b/packages/wrangler/src/assets.ts
@@ -19,8 +19,10 @@ import { logger, LOGGER_LEVELS } from "./logger";
import { hashFile } from "./pages/hash";
import { isJwtExpired } from "./pages/upload";
import { APIError } from "./parse";
+import { getBasePath } from "./paths";
import { dedent } from "./utils/dedent";
import { createPatternMatcher } from "./utils/filesystem";
+import type { StartDevWorkerOptions } from "./api";
import type { Config } from "./config";
import type { Assets } from "./config/environment";
import type { DeployArgs } from "./deploy";
@@ -361,19 +363,31 @@ export function processAssetsArg(
* - an asset binding cannot be used in a Worker that only has assets
* and throw an appropriate error if invalid.
*/
+export function validateAssetsArgsAndConfig(
+ args: Pick
+): void;
export function validateAssetsArgsAndConfig(
args:
| Pick
| Pick,
config: Config
-) {
+): void;
+export function validateAssetsArgsAndConfig(
+ args:
+ | Pick
+ | Pick
+ | Pick,
+ config?: Config
+): void {
/*
* - `config.legacy_assets` conflates `legacy_assets` and `assets`
* - `args.legacyAssets` conflates `legacy-assets` and `assets`
*/
if (
- (args.assets || config.assets) &&
- (args.legacyAssets || config.legacy_assets)
+ "legacy" in args
+ ? args.assets && args.legacy.legacyAssets
+ : (args.assets || config?.assets) &&
+ (args?.legacyAssets || config?.legacy_assets)
) {
throw new UserError(
"Cannot use assets and legacy assets in the same Worker.\n" +
@@ -381,20 +395,34 @@ export function validateAssetsArgsAndConfig(
);
}
- if ((args.assets || config.assets) && (args.site || config.site)) {
+ if (
+ "legacy" in args
+ ? args.assets && args.legacy.site
+ : (args.assets || config?.assets) && (args.site || config?.site)
+ ) {
throw new UserError(
"Cannot use assets and Workers Sites in the same Worker.\n" +
"Please remove either the `site` or `assets` field from your configuration file."
);
}
- if ((args.assets || config.assets) && config.tail_consumers?.length) {
+ // tail_consumers don't exist in dev, so ignore SDW here
+ if ((args.assets || config?.assets) && config?.tail_consumers?.length) {
throw new UserError(
"Cannot use assets and tail consumers in the same Worker. Tail Workers are not yet supported for Workers with assets."
);
}
- if (!(args.script || config.main) && config.assets?.binding) {
+ const noOpEntrypoint = path.resolve(
+ getBasePath(),
+ "templates/no-op-worker.js"
+ );
+
+ if (
+ "legacy" in args
+ ? args.entrypoint === noOpEntrypoint && args.assets?.binding
+ : !(args.script || config?.main) && config?.assets?.binding
+ ) {
throw new UserError(
"Cannot use assets with a binding in an assets-only Worker.\n" +
"Please remove the asset binding from your configuration file, or provide a Worker script in your configuration file (`main`)."
diff --git a/packages/wrangler/src/dev-registry.ts b/packages/wrangler/src/dev-registry.ts
index 58a3b0f4f023..895a05baabed 100644
--- a/packages/wrangler/src/dev-registry.ts
+++ b/packages/wrangler/src/dev-registry.ts
@@ -11,6 +11,7 @@ import {
import { createServer } from "node:http";
import net from "node:net";
import path from "node:path";
+import * as util from "node:util";
import bodyParser from "body-parser";
import { watch } from "chokidar";
import express from "express";
@@ -365,3 +366,68 @@ export async function getBoundRegisteredWorkers(
);
return filteredWorkers;
}
+
+/**
+ * A react-free version of the above hook
+ */
+export async function devRegistry(
+ cb: (workers: WorkerRegistry | undefined) => void
+): Promise<(name?: string) => Promise> {
+ let previousRegistry: WorkerRegistry | undefined;
+
+ let interval: ReturnType;
+
+ let hasFailedToFetch = false;
+
+ // The new file based registry supports a much more performant listener callback
+ if (getFlag("FILE_BASED_REGISTRY")) {
+ await startWorkerRegistry(async (registry) => {
+ if (!util.isDeepStrictEqual(registry, previousRegistry)) {
+ previousRegistry = registry;
+ cb(registry);
+ }
+ });
+ } else {
+ try {
+ await startWorkerRegistry();
+ } catch (err) {
+ logger.error("failed to start worker registry", err);
+ }
+ // Else we need to fall back to a polling based approach
+ interval = setInterval(async () => {
+ try {
+ const registry = await getRegisteredWorkers();
+ if (!util.isDeepStrictEqual(registry, previousRegistry)) {
+ previousRegistry = registry;
+ cb(registry);
+ }
+ } catch (err) {
+ if (!hasFailedToFetch) {
+ hasFailedToFetch = true;
+ logger.warn("Failed to get worker definitions", err);
+ }
+ }
+ }, 300);
+ }
+
+ return async (name) => {
+ interval && clearInterval(interval);
+ try {
+ const [unregisterResult, stopRegistryResult] = await Promise.allSettled([
+ name ? unregisterWorker(name) : Promise.resolve(),
+ stopWorkerRegistry(),
+ ]);
+ if (unregisterResult.status === "rejected") {
+ logger.error("Failed to unregister worker", unregisterResult.reason);
+ }
+ if (stopRegistryResult.status === "rejected") {
+ logger.error(
+ "Failed to stop worker registry",
+ stopRegistryResult.reason
+ );
+ }
+ } catch (err) {
+ logger.error("Failed to cleanup dev registry", err);
+ }
+ };
+}
diff --git a/packages/wrangler/src/dev.tsx b/packages/wrangler/src/dev.tsx
index 130b516dd271..c420f61e4544 100644
--- a/packages/wrangler/src/dev.tsx
+++ b/packages/wrangler/src/dev.tsx
@@ -15,8 +15,8 @@ import { findWranglerToml, printBindings, readConfig } from "./config";
import { validateRoutes } from "./deploy/deploy";
import { getEntry } from "./deployment-bundle/entry";
import { validateNodeCompatMode } from "./deployment-bundle/node-compat";
-import { getBoundRegisteredWorkers } from "./dev-registry";
-import Dev, { devRegistry } from "./dev/dev";
+import { devRegistry, getBoundRegisteredWorkers } from "./dev-registry";
+import Dev from "./dev/dev";
import { getVarsForDev } from "./dev/dev-vars";
import { getLocalPersistencePath } from "./dev/get-local-persistence-path";
import registerDevHotKeys from "./dev/hotkeys";
@@ -343,7 +343,7 @@ export function devOptions(yargs: CommonYargsArgv) {
type: "boolean",
describe:
"Use the experimental DevEnv instantiation (unified across wrangler dev and unstable_dev)",
- default: false,
+ default: true,
})
.option("experimental-registry", {
alias: ["x-registry"],
@@ -547,11 +547,6 @@ export async function startDev(args: StartDevOptions) {
let assetsWatcher: ReturnType | undefined;
let rerender: (node: React.ReactNode) => void | undefined;
try {
- const configPath =
- args.config ||
- (args.script && findWranglerToml(path.dirname(args.script)));
- let config = readConfig(configPath, args);
-
if (args.logLevel) {
logger.loggerLevel = args.logLevel;
}
@@ -589,36 +584,16 @@ export async function startDev(args: StartDevOptions) {
);
}
- if (
- (args.legacyAssets || config.legacy_assets) &&
- (args.site || config.site)
- ) {
- throw new UserError(
- "Cannot use legacy assets and Workers Sites in the same Worker."
- );
- }
-
- if ((args.assets || config.assets) && args.remote) {
- throw new UserError(
- "Cannot use assets in remote mode. Workers with assets are only supported in local mode. Please use `wrangler dev`."
- );
- }
-
- validateAssetsArgsAndConfig(args, config);
-
- let assetsOptions = processAssetsArg(args, config);
- if (assetsOptions) {
- args.forceLocal = true;
- }
-
- const projectRoot = configPath && path.dirname(configPath);
+ const configPath =
+ args.config ||
+ (args.script && findWranglerToml(path.dirname(args.script)));
const devEnv = new DevEnv();
if (args.experimentalDevEnv) {
// The ProxyWorker will have a stable host and port, so only listen for the first update
void devEnv.proxy.ready.promise.then(({ url }) => {
- if (process.send) {
+ if (process.send && typeof vitest === "undefined") {
process.send(
JSON.stringify({
event: "DEV_SERVER_READY",
@@ -661,138 +636,139 @@ export async function startDev(args: StartDevOptions) {
unregisterHotKeys = registerDevHotKeys(devEnv, args);
}
- await devEnv.config.set({
- name: args.name,
- config: configPath,
- entrypoint: args.script,
- compatibilityDate: args.compatibilityDate,
- compatibilityFlags: args.compatibilityFlags,
- triggers: args.routes?.map>(
- (r) => ({
- type: "route",
- pattern: r,
- })
- ),
-
- build: {
- bundle: args.bundle !== undefined ? args.bundle : undefined,
- define: collectKeyValues(args.define),
- jsxFactory: args.jsxFactory,
- jsxFragment: args.jsxFragment,
- tsconfig: args.tsconfig,
- minify: args.minify,
- processEntrypoint: args.processEntrypoint,
- additionalModules: args.additionalModules,
- moduleRoot: args.moduleRoot,
- moduleRules: args.rules,
- nodejsCompatMode: (parsedConfig: Config) =>
- validateNodeCompatMode(
- args.compatibilityDate ?? parsedConfig.compatibility_date,
- args.compatibilityFlags ?? parsedConfig.compatibility_flags ?? [],
- {
- nodeCompat: args.nodeCompat ?? parsedConfig.node_compat,
- noBundle: args.noBundle ?? parsedConfig.no_bundle,
- }
- ),
- },
- bindings: {
- ...(await getPagesAssetsFetcher(
- args.enablePagesAssetsServiceBinding
- )),
- ...collectPlainTextVars(args.var),
- ...convertCfWorkerInitBindingstoBindings({
- kv_namespaces: args.kv,
- vars: args.vars,
- send_email: undefined,
- wasm_modules: undefined,
- text_blobs: undefined,
- browser: undefined,
- ai: args.ai,
- version_metadata: args.version_metadata,
- data_blobs: undefined,
- durable_objects: { bindings: args.durableObjects ?? [] },
- queues: undefined,
- r2_buckets: args.r2,
- d1_databases: args.d1Databases,
- vectorize: undefined,
- hyperdrive: undefined,
- services: args.services,
- analytics_engine_datasets: undefined,
- dispatch_namespaces: undefined,
- mtls_certificates: undefined,
- pipelines: undefined,
- logfwdr: undefined,
- unsafe: undefined,
- assets: undefined,
- }),
- },
- dev: {
- auth: async () => {
- let accountId = args.accountId;
- if (!accountId) {
- unregisterHotKeys?.();
- accountId = await requireAuth({});
- unregisterHotKeys = registerDevHotKeys(devEnv, args);
- }
-
- return {
- accountId,
- apiToken: requireApiToken(),
- };
- },
- remote: !args.forceLocal && args.remote,
- server: {
- hostname: args.ip,
- port: args.port,
- secure:
- args.localProtocol === undefined
- ? undefined
- : args.localProtocol === "https",
- httpsCertPath: args.httpsCertPath,
- httpsKeyPath: args.httpsKeyPath,
+ await devEnv.config.set(
+ {
+ name: args.name,
+ config: configPath,
+ entrypoint: args.script,
+ compatibilityDate: args.compatibilityDate,
+ compatibilityFlags: args.compatibilityFlags,
+ triggers: args.routes?.map>(
+ (r) => ({
+ type: "route",
+ pattern: r,
+ })
+ ),
+ env: args.env,
+ build: {
+ bundle: args.bundle !== undefined ? args.bundle : undefined,
+ define: collectKeyValues(args.define),
+ jsxFactory: args.jsxFactory,
+ jsxFragment: args.jsxFragment,
+ tsconfig: args.tsconfig,
+ minify: args.minify,
+ processEntrypoint: args.processEntrypoint,
+ additionalModules: args.additionalModules,
+ moduleRoot: args.moduleRoot,
+ moduleRules: args.rules,
+ nodejsCompatMode: (parsedConfig: Config) =>
+ validateNodeCompatMode(
+ args.compatibilityDate ?? parsedConfig.compatibility_date,
+ args.compatibilityFlags ??
+ parsedConfig.compatibility_flags ??
+ [],
+ {
+ nodeCompat: args.nodeCompat ?? parsedConfig.node_compat,
+ noBundle: args.noBundle ?? parsedConfig.no_bundle,
+ }
+ ),
},
- inspector: {
- port: args.inspectorPort,
+ bindings: {
+ ...(await getPagesAssetsFetcher(
+ args.enablePagesAssetsServiceBinding
+ )),
+ ...collectPlainTextVars(args.var),
+ ...convertCfWorkerInitBindingstoBindings({
+ kv_namespaces: args.kv,
+ vars: args.vars,
+ send_email: undefined,
+ wasm_modules: undefined,
+ text_blobs: undefined,
+ browser: undefined,
+ ai: args.ai,
+ version_metadata: args.version_metadata,
+ data_blobs: undefined,
+ durable_objects: { bindings: args.durableObjects ?? [] },
+ queues: undefined,
+ r2_buckets: args.r2,
+ d1_databases: args.d1Databases,
+ vectorize: undefined,
+ hyperdrive: undefined,
+ services: args.services,
+ analytics_engine_datasets: undefined,
+ dispatch_namespaces: undefined,
+ mtls_certificates: undefined,
+ pipelines: undefined,
+ logfwdr: undefined,
+ unsafe: undefined,
+ assets: undefined,
+ }),
},
- origin: {
- hostname: args.host ?? args.localUpstream,
- secure:
- args.upstreamProtocol === undefined
- ? undefined
- : args.upstreamProtocol === "https",
+ dev: {
+ auth: async () => {
+ let accountId = args.accountId;
+ if (!accountId) {
+ unregisterHotKeys?.();
+ accountId = await requireAuth({});
+ unregisterHotKeys = registerDevHotKeys(devEnv, args);
+ }
+ return {
+ accountId,
+ apiToken: requireApiToken(),
+ };
+ },
+ remote: !args.forceLocal && args.remote,
+ server: {
+ hostname: args.ip,
+ port: args.port,
+ secure:
+ args.localProtocol === undefined
+ ? undefined
+ : args.localProtocol === "https",
+ httpsCertPath: args.httpsCertPath,
+ httpsKeyPath: args.httpsKeyPath,
+ },
+ inspector: {
+ port: args.inspectorPort,
+ },
+ origin: {
+ hostname: args.host ?? args.localUpstream,
+ secure:
+ args.upstreamProtocol === undefined
+ ? undefined
+ : args.upstreamProtocol === "https",
+ },
+ persist: args.persistTo,
+ liveReload: args.liveReload,
+ testScheduled: args.testScheduled,
+ logLevel: args.logLevel,
+ registry: devEnv.config.latestConfig?.dev.registry,
},
- persist: args.persistTo,
- liveReload: args.liveReload,
- testScheduled: args.testScheduled,
- logLevel: args.logLevel,
- registry: devEnv.config.latestConfig?.dev.registry,
- },
- legacy: {
- site: (configParam) => {
- const legacyAssetPaths = getResolvedLegacyAssetPaths(
- args,
- configParam
- );
-
- return Boolean(args.site || configParam.site) && legacyAssetPaths
- ? {
- bucket: path.join(
- legacyAssetPaths.baseDirectory,
- legacyAssetPaths?.assetDirectory
- ),
- include: legacyAssetPaths.includePatterns,
- exclude: legacyAssetPaths.excludePatterns,
- }
- : undefined;
+ legacy: {
+ site: (configParam) => {
+ const legacyAssetPaths = getResolvedLegacyAssetPaths(
+ args,
+ configParam
+ );
+ return Boolean(args.site || configParam.site) && legacyAssetPaths
+ ? {
+ bucket: path.join(
+ legacyAssetPaths.baseDirectory,
+ legacyAssetPaths?.assetDirectory
+ ),
+ include: legacyAssetPaths.includePatterns,
+ exclude: legacyAssetPaths.excludePatterns,
+ }
+ : undefined;
+ },
+ legacyAssets: (configParam) =>
+ args.legacyAssets ?? configParam.legacy_assets,
+ enableServiceEnvironments: !(args.legacyEnv ?? true),
},
- legacyAssets: (configParam) => configParam.legacy_assets,
- enableServiceEnvironments: !(args.legacyEnv ?? true),
- },
- // only pass `assetsOptions` if it came from args not from config
- // otherwise config at startup ends up overriding future config changes in the
- // ConfigController
- assets: args.assets ? assetsOptions : undefined,
- } satisfies StartDevWorkerInput);
+ assets: args.assets,
+ } satisfies StartDevWorkerInput,
+ true
+ );
void metrics.sendMetricsEvent(
"run dev",
@@ -809,227 +785,259 @@ export async function startDev(args: StartDevOptions) {
);
return devEnv;
- }
+ } else {
+ const projectRoot = configPath && path.dirname(configPath);
+ let config = readConfig(configPath, args);
- if (config.configPath && !args.experimentalDevEnv) {
- configFileWatcher = watch(config.configPath, {
- persistent: true,
- }).on("change", async (_event) => {
- try {
- // TODO: Do we need to handle different `_event` types differently?
- // e.g. what if the file is deleted, or added?
- config = readConfig(configPath, args);
- if (!config.configPath) {
- return;
- }
+ if (
+ (args.legacyAssets || config.legacy_assets) &&
+ (args.site || config.site)
+ ) {
+ throw new UserError(
+ "Cannot use legacy assets and Workers Sites in the same Worker."
+ );
+ }
- logger.log(`${path.basename(config.configPath)} changed...`);
+ if ((args.assets || config.assets) && args.remote) {
+ throw new UserError(
+ "Cannot use assets in remote mode. Workers with assets are only supported in local mode. Please use `wrangler dev`."
+ );
+ }
- // ensure we reflect config changes in the `main` entry point
- entry = await getEntry(
- {
- legacyAssets: args.legacyAssets,
- script: args.script,
- moduleRoot: args.moduleRoot,
- assets: args.assets,
- },
- config,
- "dev"
- );
+ validateAssetsArgsAndConfig(args, config);
+
+ let assetsOptions = processAssetsArg(args, config);
+ if (assetsOptions) {
+ args.forceLocal = true;
+ }
- // ensure we re-validate routes
- await getHostAndRoutes(args, config);
-
- assetsOptions = processAssetsArg(args, config);
-
- /*
- * Handle static assets watching on config file changes
- *
- * 1. if assets was specified via CLI args, only config file
- * changes related to `main` will matter. In this case, re-running
- * `processAssetsArg` is enough (see above)
- * 2. if assets was not specififed via the configuration
- * file, but it is now, we should start watching the assets
- * directory
- * 3. if assets was specified via the configuration
- * file, we should ensure we're still watching the correct
- * directory
- */
- if (assetsOptions && !args.assets) {
- await assetsWatcher?.close();
-
- if (assetsOptions) {
- const debouncedRerender = debounce(async () => {
- rerender(await getDevReactElement(config));
- }, 100);
-
- assetsWatcher = watch(assetsOptions.directory, {
- persistent: true,
- ignoreInitial: true,
- }).on("all", async (eventName, changedPath) => {
- const message = getAssetChangeMessage(eventName, changedPath);
-
- logger.log(`🌀 ${message}...`);
- debouncedRerender();
- });
+ if (config.configPath && !args.experimentalDevEnv) {
+ configFileWatcher = watch(config.configPath, {
+ persistent: true,
+ }).on("change", async (_event) => {
+ try {
+ // TODO: Do we need to handle different `_event` types differently?
+ // e.g. what if the file is deleted, or added?
+ config = readConfig(configPath, args);
+ if (!config.configPath) {
+ return;
}
- }
- rerender(await getDevReactElement(config));
- } catch (err) {
- logger.error(err);
- }
- });
- }
+ logger.log(`${path.basename(config.configPath)} changed...`);
- const devServerSettings = await validateDevServerSettings(args, config);
- let { entry } = devServerSettings;
- const {
- upstreamProtocol,
- host,
- routes,
- getLocalPort,
- getInspectorPort,
- getRuntimeInspectorPort,
- cliDefines,
- cliAlias,
- localPersistencePath,
- processEntrypoint,
- additionalModules,
- } = devServerSettings;
+ // ensure we reflect config changes in the `main` entry point
+ entry = await getEntry(
+ {
+ legacyAssets: args.legacyAssets,
+ script: args.script,
+ moduleRoot: args.moduleRoot,
+ assets: args.assets,
+ },
+ config,
+ "dev"
+ );
- const nodejsCompatMode = validateNodeCompatMode(
- args.compatibilityDate ?? config.compatibility_date,
- args.compatibilityFlags ?? config.compatibility_flags ?? [],
- {
- nodeCompat: args.nodeCompat ?? config.node_compat,
- noBundle: args.noBundle ?? config.no_bundle,
- }
- );
+ // ensure we re-validate routes
+ await getHostAndRoutes(args, config);
+
+ assetsOptions = processAssetsArg(args, config);
+
+ /*
+ * Handle static assets watching on config file changes
+ *
+ * 1. if assets was specified via CLI args, only config file
+ * changes related to `main` will matter. In this case, re-running
+ * `processAssetsArg` is enough (see above)
+ * 2. if assets was not specififed via the configuration
+ * file, but it is now, we should start watching the assets
+ * directory
+ * 3. if assets was specified via the configuration
+ * file, we should ensure we're still watching the correct
+ * directory
+ */
+ if (assetsOptions && !args.assets) {
+ await assetsWatcher?.close();
+
+ if (assetsOptions) {
+ const debouncedRerender = debounce(async () => {
+ rerender(await getDevReactElement(config));
+ }, 100);
+
+ assetsWatcher = watch(assetsOptions.directory, {
+ persistent: true,
+ ignoreInitial: true,
+ }).on("all", async (eventName, changedPath) => {
+ const message = getAssetChangeMessage(eventName, changedPath);
+
+ logger.log(`🌀 ${message}...`);
+ debouncedRerender();
+ });
+ }
+ }
- void metrics.sendMetricsEvent(
- "run dev",
- {
- local: !args.remote,
- usesTypeScript: /\.tsx?$/.test(entry.file),
- },
- { sendMetrics: config.send_metrics, offline: !args.remote }
- );
+ rerender(await getDevReactElement(config));
+ } catch (err) {
+ logger.error(err);
+ }
+ });
+ }
- // eslint-disable-next-line no-inner-declarations
- async function getDevReactElement(configParam: Config) {
- const { legacyAssetPaths, bindings } = getBindingsAndLegacyAssetPaths(
- args,
- configParam
+ const devServerSettings = await validateDevServerSettings(args, config);
+ let { entry } = devServerSettings;
+ const {
+ upstreamProtocol,
+ host,
+ routes,
+ getLocalPort,
+ getInspectorPort,
+ getRuntimeInspectorPort,
+ cliDefines,
+ cliAlias,
+ localPersistencePath,
+ processEntrypoint,
+ additionalModules,
+ } = devServerSettings;
+
+ const nodejsCompatMode = validateNodeCompatMode(
+ args.compatibilityDate ?? config.compatibility_date,
+ args.compatibilityFlags ?? config.compatibility_flags ?? [],
+ {
+ nodeCompat: args.nodeCompat ?? config.node_compat,
+ noBundle: args.noBundle ?? config.no_bundle,
+ }
);
- return (
-
+ void metrics.sendMetricsEvent(
+ "run dev",
+ {
+ local: !args.remote,
+ usesTypeScript: /\.tsx?$/.test(entry.file),
+ },
+ { sendMetrics: config.send_metrics, offline: !args.remote }
);
- }
- const devReactElement = render(await getDevReactElement(config));
- rerender = devReactElement.rerender;
+ // eslint-disable-next-line no-inner-declarations
+ async function getDevReactElement(configParam: Config) {
+ const { legacyAssetPaths, bindings } = getBindingsAndLegacyAssetPaths(
+ args,
+ configParam
+ );
- if (assetsOptions && !args.experimentalDevEnv) {
- const debouncedRerender = debounce(async () => {
- rerender(await getDevReactElement(config));
- }, 100);
+ return (
+
+ );
+ }
- assetsWatcher = watch(assetsOptions.directory, {
- persistent: true,
- ignoreInitial: true,
- }).on("all", async (eventName, filePath) => {
- const message = getAssetChangeMessage(eventName, filePath);
+ const devReactElement = render(await getDevReactElement(config));
+ rerender = devReactElement.rerender;
- logger.log(`🌀 ${message}...`);
- debouncedRerender();
- });
- }
+ if (assetsOptions && !args.experimentalDevEnv) {
+ const debouncedRerender = debounce(async () => {
+ rerender(await getDevReactElement(config));
+ }, 100);
- return {
- devReactElement,
- configFileWatcher,
- assetsWatcher,
- stop: async () => {
- devReactElement.unmount();
- await Promise.allSettled([
- configFileWatcher?.close(),
- assetsWatcher?.close(),
- ]);
- },
- };
+ assetsWatcher = watch(assetsOptions.directory, {
+ persistent: true,
+ ignoreInitial: true,
+ }).on("all", async (eventName, filePath) => {
+ const message = getAssetChangeMessage(eventName, filePath);
+
+ logger.log(`🌀 ${message}...`);
+ debouncedRerender();
+ });
+ }
+
+ return {
+ devReactElement,
+ configFileWatcher,
+ assetsWatcher,
+ stop: async () => {
+ devReactElement.unmount();
+ await Promise.allSettled([
+ configFileWatcher?.close(),
+ assetsWatcher?.close(),
+ ]);
+ },
+ };
+ }
} catch (e) {
await Promise.allSettled([
configFileWatcher?.close(),
diff --git a/packages/wrangler/src/dev/create-worker-preview.ts b/packages/wrangler/src/dev/create-worker-preview.ts
index 436cfbd2586f..813dbe72a4a2 100644
--- a/packages/wrangler/src/dev/create-worker-preview.ts
+++ b/packages/wrangler/src/dev/create-worker-preview.ts
@@ -242,19 +242,20 @@ async function createPreviewToken(
const mode: CfPreviewMode = ctx.zone
? {
- routes: ctx.routes
- ? // extract all the route patterns
- ctx.routes.map((route) => {
- if (typeof route === "string") {
- return route;
- }
- if (route.custom_domain) {
- return `${route.pattern}/*`;
- }
- return route.pattern;
- })
- : // if there aren't any patterns, then just match on all routes
- ["*/*"],
+ routes:
+ ctx.routes && ctx.routes.length > 0
+ ? // extract all the route patterns
+ ctx.routes.map((route) => {
+ if (typeof route === "string") {
+ return route;
+ }
+ if (route.custom_domain) {
+ return `${route.pattern}/*`;
+ }
+ return route.pattern;
+ })
+ : // if there aren't any patterns, then just match on all routes
+ ["*/*"],
}
: { workers_dev: true };
diff --git a/packages/wrangler/src/dev/dev.tsx b/packages/wrangler/src/dev/dev.tsx
index c4aa15cb53bb..251ead69beed 100644
--- a/packages/wrangler/src/dev/dev.tsx
+++ b/packages/wrangler/src/dev/dev.tsx
@@ -17,12 +17,10 @@ import {
import { runCustomBuild } from "../deployment-bundle/run-custom-build";
import {
getBoundRegisteredWorkers,
- getRegisteredWorkers,
startWorkerRegistry,
stopWorkerRegistry,
unregisterWorker,
} from "../dev-registry";
-import { getFlag } from "../experimental-flags";
import { logger } from "../logger";
import { isNavigatorDefined } from "../navigator-user-agent";
import openInBrowser from "../open-in-browser";
@@ -140,71 +138,6 @@ function useDevRegistry(
return workers;
}
-/**
- * A react-free version of the above hook
- */
-export async function devRegistry(
- cb: (workers: WorkerRegistry | undefined) => void
-): Promise<(name?: string) => Promise> {
- let previousRegistry: WorkerRegistry | undefined;
-
- let interval: ReturnType;
-
- let hasFailedToFetch = false;
-
- // The new file based registry supports a much more performant listener callback
- if (getFlag("FILE_BASED_REGISTRY")) {
- await startWorkerRegistry(async (registry) => {
- if (!util.isDeepStrictEqual(registry, previousRegistry)) {
- previousRegistry = registry;
- cb(registry);
- }
- });
- } else {
- try {
- await startWorkerRegistry();
- } catch (err) {
- logger.error("failed to start worker registry", err);
- }
- // Else we need to fall back to a polling based approach
- interval = setInterval(async () => {
- try {
- const registry = await getRegisteredWorkers();
- if (!util.isDeepStrictEqual(registry, previousRegistry)) {
- previousRegistry = registry;
- cb(registry);
- }
- } catch (err) {
- if (!hasFailedToFetch) {
- hasFailedToFetch = true;
- logger.warn("Failed to get worker definitions", err);
- }
- }
- }, 300);
- }
-
- return async (name) => {
- interval && clearInterval(interval);
- try {
- const [unregisterResult, stopRegistryResult] = await Promise.allSettled([
- name ? unregisterWorker(name) : Promise.resolve(),
- stopWorkerRegistry(),
- ]);
- if (unregisterResult.status === "rejected") {
- logger.error("Failed to unregister worker", unregisterResult.reason);
- }
- if (stopRegistryResult.status === "rejected") {
- logger.error(
- "Failed to stop worker registry",
- stopRegistryResult.reason
- );
- }
- } catch (err) {
- logger.error("Failed to cleanup dev registry", err);
- }
- };
-}
-
export type DevProps = {
name: string | undefined;
noBundle: boolean;
@@ -485,7 +418,6 @@ function DevSession(props: DevSessionProps) {
capnp: props.bindings.unsafe?.capnp,
metadata: props.bindings.unsafe?.metadata,
},
- assets: props.assets,
} satisfies StartDevWorkerOptions;
}, [
props.routes,
@@ -501,7 +433,6 @@ function DevSession(props: DevSessionProps) {
props.isWorkersSite,
props.local,
props.legacyAssetsConfig,
- props.assets,
props.processEntrypoint,
props.additionalModules,
props.env,
diff --git a/packages/wrangler/src/dev/remote.tsx b/packages/wrangler/src/dev/remote.tsx
index a410a86be0ab..78885bc1a4ce 100644
--- a/packages/wrangler/src/dev/remote.tsx
+++ b/packages/wrangler/src/dev/remote.tsx
@@ -685,11 +685,11 @@ export async function createRemoteWorkerInit(props: {
export async function getWorkerAccountAndContext(props: {
accountId: string;
- env?: string;
- legacyEnv?: boolean;
- host?: string;
+ env: string | undefined;
+ legacyEnv: boolean | undefined;
+ host: string | undefined;
routes: Route[] | undefined;
- sendMetrics?: boolean;
+ sendMetrics: boolean | undefined;
}): Promise<{ workerAccount: CfAccount; workerContext: CfWorkerContext }> {
const workerAccount: CfAccount = {
accountId: props.accountId,
diff --git a/packages/wrangler/src/index.ts b/packages/wrangler/src/index.ts
index 124af7ea536e..e3fc2b4c4af2 100644
--- a/packages/wrangler/src/index.ts
+++ b/packages/wrangler/src/index.ts
@@ -906,8 +906,26 @@ export async function main(argv: string[]): Promise {
logBuildFailure(e.errors, e.warnings);
logger.error(e.message);
} else {
- logger.error(e instanceof Error ? e.message : e);
- if (!(e instanceof UserError)) {
+ let loggableException = e;
+ if (
+ // Is this a StartDevEnv error event? If so, unwrap the cause, which is usually the user-recognisable error
+ e &&
+ typeof e === "object" &&
+ "type" in e &&
+ e.type === "error" &&
+ "cause" in e &&
+ e.cause instanceof Error
+ ) {
+ loggableException = e.cause;
+ }
+
+ logger.error(
+ loggableException instanceof Error
+ ? loggableException.message
+ : loggableException
+ );
+
+ if (!(loggableException instanceof UserError)) {
await logPossibleBugMessage();
}
}