diff --git a/bench/snippets/readdir.mjs b/bench/snippets/readdir.mjs index 7146decb8fa94b..37aefe6ac82b51 100644 --- a/bench/snippets/readdir.mjs +++ b/bench/snippets/readdir.mjs @@ -1,13 +1,50 @@ -import { readdirSync } from "fs"; +import { readdirSync, readdir as readdirCb } from "fs"; +import { readdir } from "fs/promises"; import { bench, run } from "./runner.mjs"; import { argv } from "process"; +import { fileURLToPath } from "url"; +import { relative, resolve } from "path"; +import { createHash } from "crypto"; -const dir = argv.length > 2 ? argv[2] : "/tmp"; +let dir = resolve(argv.length > 2 ? argv[2] : fileURLToPath(new URL("../../node_modules", import.meta.url))); +if (dir.includes(process.cwd())) { + dir = relative(process.cwd(), dir); +} -const count = readdirSync(dir).length; -bench(`readdir("${dir}")`, () => { - readdirSync(dir, { withFileTypes: true }); +const result = await readdir(dir, { recursive: true }); +const count = result.length; +const syncCount = readdirSync(dir, { recursive: true }).length; + +const hash = createHash("sha256").update(result.sort().join("\n")).digest("hex"); + +bench(`await readdir("${dir}", {recursive: true})`, async () => { + await readdir(dir, { recursive: true }); +}); + +bench(`await readdir("${dir}", {recursive: true}) x 10`, async () => { + const promises = [ + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + readdir(dir, { recursive: true }), + ]; + await Promise.all(promises); +}); + +bench(`await readdir("${dir}", {recursive: false})`, async () => { + await readdir(dir, { recursive: false }); }); await run(); -console.log("\n\nFor", count, "files/dirs in", dir); +console.log("\n", count, "files/dirs in", dir, "\n", "SHA256:", hash, "\n"); + +if (count !== syncCount) { + throw new Error(`Mismatched file counts: ${count} async !== ${syncCount} sync`); +} diff --git a/docs/api/import-meta.md b/docs/api/import-meta.md index 37f35884d77e59..2c821f751015df 100644 --- a/docs/api/import-meta.md +++ b/docs/api/import-meta.md @@ -38,6 +38,11 @@ import.meta.resolveSync("zod") --- +- `import.meta.env` +- An alias to `process.env`. + +--- + - `import.meta.resolve{Sync}` - Resolve a module specifier (e.g. `"zod"` or `"./file.tsx"`) to an absolute path. While file would be imported if the specifier were imported from this file? diff --git a/docs/cli/update.md b/docs/cli/update.md index dfda37f014cfbe..209f9c0f85bff3 100644 --- a/docs/cli/update.md +++ b/docs/cli/update.md @@ -4,4 +4,14 @@ To update all dependencies to the latest version _that's compatible with the ver $ bun update ``` -This will not edit your `package.json`. There's currently no command to force-update all dependencies to the latest version regardless version ranges. +## `--force` + +{% callout %} +**Alias** — `-f` +{% /callout %} + +Bun by default respect the version rages defined in your package.json, to ignore this and update to the latest version you can pass in the `force` flag. + +```sh +$ bun update --force +``` \ No newline at end of file diff --git a/docs/guides/ecosystem/edgedb.md b/docs/guides/ecosystem/edgedb.md index d9b89a624c38ec..15e35ec6051c1b 100644 --- a/docs/guides/ecosystem/edgedb.md +++ b/docs/guides/ecosystem/edgedb.md @@ -109,7 +109,7 @@ Applied m1uwekrn4ni4qs7ul7hfar4xemm5kkxlpswolcoyqj3xdhweomwjrq (00001.edgeql) --- -With out schema applied, let's execute some queries using EdgeDB's JavaScript client library. We'll install the client library and EdgeDB's codegen CLI, and create a `seed.ts`.file. +With our schema applied, let's execute some queries using EdgeDB's JavaScript client library. We'll install the client library and EdgeDB's codegen CLI, and create a `seed.ts`.file. ```sh $ bun add edgedb diff --git a/docs/guides/test/migrate-from-jest.md b/docs/guides/test/migrate-from-jest.md index 4adbddc34e42e1..5938ec00ac1b75 100644 --- a/docs/guides/test/migrate-from-jest.md +++ b/docs/guides/test/migrate-from-jest.md @@ -32,7 +32,6 @@ Some notable missing features: - `expect.extend()` - `expect().toMatchInlineSnapshot()` -- `expect().toHaveBeenCalledWith()` - `expect().toHaveReturned()` --- diff --git a/docs/runtime/env.md b/docs/runtime/env.md index e38eabffd0adb8..887c5ab7ab0a71 100644 --- a/docs/runtime/env.md +++ b/docs/runtime/env.md @@ -25,6 +25,16 @@ Or programmatically by assigning a property to `process.env`. process.env.FOO = "hello"; ``` +### Manually specifying `.env` files + +Bun supports `--env-file` to override which specific `.env` file to load. You can use `--env-file` when running scripts in bun's runtime, or when running package.json scripts. + +```sh +bun --env-file=.env.1 src/index.ts + +bun --env-file=.env.abc --env-file=.env.def run build +``` + ### Quotation marks Bun supports double quotes, single quotes, and @@ -75,10 +85,11 @@ The current environment variables can be accessed via `process.env`. process.env.API_TOKEN; // => "secret" ``` -Bun also exposes these variables via `Bun.env`, which is a simple alias of `process.env`. +Bun also exposes these variables via `Bun.env` and `import.meta.env`, which is a simple alias of `process.env`. ```ts Bun.env.API_TOKEN; // => "secret" +import.meta.env.API_TOKEN; // => "secret" ``` To print all currently-set environment variables to the command line, run `bun run env`. This is useful for debugging. diff --git a/docs/test/writing.md b/docs/test/writing.md index b8b0118f7fd0fa..32eb463c084fbe 100644 --- a/docs/test/writing.md +++ b/docs/test/writing.md @@ -313,17 +313,17 @@ Bun implements the following matchers. Full Jest compatibility is on the roadmap --- -- ❌ +- ✅ - [`.toHaveBeenCalledWith()`](https://jestjs.io/docs/expect#tohavebeencalledwitharg1-arg2-) --- -- ❌ +- ✅ - [`.toHaveBeenLastCalledWith()`](https://jestjs.io/docs/expect#tohavebeenlastcalledwitharg1-arg2-) --- -- ❌ +- ✅ - [`.toHaveBeenNthCalledWith()`](https://jestjs.io/docs/expect#tohavebeennthcalledwithnthcall-arg1-arg2-) --- diff --git a/misctools/fetch.zig b/misctools/fetch.zig index 347ce8cde841dd..5e8701eb9c82d6 100644 --- a/misctools/fetch.zig +++ b/misctools/fetch.zig @@ -14,13 +14,13 @@ pub usingnamespace @import("root").bun; const clap = bun.clap; const URL = @import("../src/url.zig").URL; -const Headers = @import("root").bun.HTTP.Headers; +const Headers = @import("root").bun.http.Headers; const Method = @import("../src/http/method.zig").Method; const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType; const HeadersTuple = ColonListType(string, noop_resolver); const path_handler = @import("../src/resolver/resolve_path.zig"); -const HTTPThread = @import("root").bun.HTTP.HTTPThread; -const HTTP = @import("root").bun.HTTP; +const HTTPThread = @import("root").bun.http.HTTPThread; +const HTTP = @import("root").bun.http; fn noop_resolver(in: string) !string { return in; } diff --git a/misctools/http_bench.zig b/misctools/http_bench.zig index 1759e630dddf06..182024828dcf90 100644 --- a/misctools/http_bench.zig +++ b/misctools/http_bench.zig @@ -177,7 +177,7 @@ pub const Arguments = struct { } }; -const HTTP = @import("root").bun.HTTP; +const HTTP = @import("root").bun.http; const NetworkThread = HTTP.NetworkThread; var stdout_: std.fs.File = undefined; diff --git a/misctools/machbench.zig b/misctools/machbench.zig index 5da20b5bad7da3..d340b0b0c6abcb 100644 --- a/misctools/machbench.zig +++ b/misctools/machbench.zig @@ -14,13 +14,13 @@ const clap = @import("../src/deps/zig-clap/clap.zig"); const AsyncIO = @import("root").bun.AsyncIO; const URL = @import("../src/url.zig").URL; -const Headers = @import("root").bun.HTTP.Headers; +const Headers = @import("root").bun.http.Headers; const Method = @import("../src/http/method.zig").Method; const ColonListType = @import("../src/cli/colon_list_type.zig").ColonListType; const HeadersTuple = ColonListType(string, noop_resolver); const path_handler = @import("../src/resolver/resolve_path.zig"); -const NetworkThread = @import("root").bun.HTTP.NetworkThread; -const HTTP = @import("root").bun.HTTP; +const NetworkThread = @import("root").bun.http.NetworkThread; +const HTTP = @import("root").bun.http; fn noop_resolver(in: string) !string { return in; } diff --git a/packages/bun-types/bun-test.d.ts b/packages/bun-types/bun-test.d.ts index c2fabfe81ecde1..f92049f0b5183a 100644 --- a/packages/bun-types/bun-test.d.ts +++ b/packages/bun-types/bun-test.d.ts @@ -1144,7 +1144,15 @@ declare module "bun:test" { /** * Ensure that a mock function is called with specific arguments. */ - // toHaveBeenCalledWith(...expected: Array): void; + toHaveBeenCalledWith(...expected: Array): void; + /** + * Ensure that a mock function is called with specific arguments for the last call. + */ + toHaveBeenLastCalledWith(...expected: Array): void; + /** + * Ensure that a mock function is called with specific arguments for the nth call. + */ + toHaveBeenNthCalledWith(n: number, ...expected: Array): void; }; } diff --git a/packages/bun-types/globals.d.ts b/packages/bun-types/globals.d.ts index bec0a107043683..7a1b77e85b9131 100644 --- a/packages/bun-types/globals.d.ts +++ b/packages/bun-types/globals.d.ts @@ -305,6 +305,14 @@ interface ImportMeta { * Filename of the source file */ readonly file: string; + /** + * The environment variables of the process + * + * ```ts + * import.meta.env === process.env + * ``` + */ + readonly env: import("bun").Env; /** * Resolve a module ID the same as if you imported it * diff --git a/packages/bun-usockets/src/eventing/epoll_kqueue.c b/packages/bun-usockets/src/eventing/epoll_kqueue.c index 997fdf7e01757d..f000342a64d0a0 100644 --- a/packages/bun-usockets/src/eventing/epoll_kqueue.c +++ b/packages/bun-usockets/src/eventing/epoll_kqueue.c @@ -28,6 +28,7 @@ void Bun__internal_dispatch_ready_poll(void* loop, void* poll); /* Cannot include this one on Windows */ #include #include +#include #endif void us_loop_run_bun_tick(struct us_loop_t *loop, int64_t timeoutMs, void*); @@ -602,4 +603,13 @@ void us_internal_async_wakeup(struct us_internal_async *a) { } #endif +int us_socket_get_error(int ssl, struct us_socket_t *s) { + int error = 0; + socklen_t len = sizeof(error); + if (getsockopt(us_poll_fd((struct us_poll_t *) s), SOL_SOCKET, SO_ERROR, (char *) &error, &len) == -1) { + return errno; + } + return error; +} + #endif diff --git a/packages/bun-usockets/src/libusockets.h b/packages/bun-usockets/src/libusockets.h index b343bc376eb5a0..e6d58b8d6e6cc9 100644 --- a/packages/bun-usockets/src/libusockets.h +++ b/packages/bun-usockets/src/libusockets.h @@ -397,6 +397,7 @@ int us_socket_raw_write(int ssl, struct us_socket_t *s, const char *data, int le struct us_socket_t* us_socket_open(int ssl, struct us_socket_t * s, int is_client, char* ip, int ip_length); int us_raw_root_certs(struct us_cert_string_t**out); unsigned int us_get_remote_address_info(char *buf, struct us_socket_t *s, const char **dest, int *port, int *is_ipv6); +int us_socket_get_error(int ssl, struct us_socket_t *s); #ifdef __cplusplus } diff --git a/src/analytics/analytics_thread.zig b/src/analytics/analytics_thread.zig index 1b39f97f02fdad..3fe05940b5ca0e 100644 --- a/src/analytics/analytics_thread.zig +++ b/src/analytics/analytics_thread.zig @@ -12,13 +12,13 @@ const C = bun.C; const sync = @import("../sync.zig"); const std = @import("std"); -const HTTP = @import("root").bun.HTTP; +const HTTP = @import("root").bun.http; const NetworkThread = HTTP.NetworkThread; const URL = @import("../url.zig").URL; const Fs = @import("../fs.zig"); const Analytics = @import("./analytics_schema.zig").analytics; const Writer = @import("./analytics_schema.zig").Writer; -const Headers = @import("root").bun.HTTP.Headers; +const Headers = @import("root").bun.http.Headers; const Futex = @import("../futex.zig"); const Semver = @import("../install/semver.zig"); diff --git a/src/bun.js/api/JSBundler.zig b/src/bun.js/api/JSBundler.zig index 4a4d10f0f2a83a..bac2b3f858a50d 100644 --- a/src/bun.js/api/JSBundler.zig +++ b/src/bun.js/api/JSBundler.zig @@ -1,6 +1,5 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; -const http = @import("../../bun_dev_http_server.zig"); const JavaScript = @import("../javascript.zig"); const QueryStringMap = @import("../../url.zig").QueryStringMap; const CombinedScanner = @import("../../url.zig").CombinedScanner; diff --git a/src/bun.js/api/JSTranspiler.zig b/src/bun.js/api/JSTranspiler.zig index d41458acbee050..b6341aa39694b8 100644 --- a/src/bun.js/api/JSTranspiler.zig +++ b/src/bun.js/api/JSTranspiler.zig @@ -1,6 +1,5 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; -const http = @import("../../bun_dev_http_server.zig"); const JavaScript = @import("../javascript.zig"); const QueryStringMap = @import("../../url.zig").QueryStringMap; const CombinedScanner = @import("../../url.zig").CombinedScanner; diff --git a/src/bun.js/api/bun.zig b/src/bun.js/api/bun.zig index 0f22049fdc3be2..54cf5c5cab9892 100644 --- a/src/bun.js/api/bun.zig +++ b/src/bun.js/api/bun.zig @@ -161,7 +161,7 @@ const Bun = @This(); const default_allocator = @import("root").bun.default_allocator; const bun = @import("root").bun; const Environment = bun.Environment; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const Global = bun.Global; const strings = bun.strings; const string = bun.string; @@ -183,7 +183,6 @@ const ServerEntryPoint = bun.bundler.ServerEntryPoint; const js_printer = bun.js_printer; const js_parser = bun.js_parser; const js_ast = bun.JSAst; -const http = @import("../../bun_dev_http_server.zig"); const NodeFallbackModules = @import("../../node_fallbacks.zig"); const ImportKind = ast.ImportKind; const Analytics = @import("../../analytics/analytics_thread.zig"); diff --git a/src/bun.js/api/bun/dns_resolver.zig b/src/bun.js/api/bun/dns_resolver.zig index f796060f9d964b..c5eac79afbaa37 100644 --- a/src/bun.js/api/bun/dns_resolver.zig +++ b/src/bun.js/api/bun/dns_resolver.zig @@ -2,7 +2,7 @@ const Bun = @This(); const default_allocator = @import("root").bun.default_allocator; const bun = @import("root").bun; const Environment = bun.Environment; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const Global = bun.Global; const strings = bun.strings; const string = bun.string; diff --git a/src/bun.js/api/bun/socket.zig b/src/bun.js/api/bun/socket.zig index 652aaee51c12ce..c1b9267f6f3dd6 100644 --- a/src/bun.js/api/bun/socket.zig +++ b/src/bun.js/api/bun/socket.zig @@ -1,7 +1,7 @@ const default_allocator = @import("root").bun.default_allocator; const bun = @import("root").bun; const Environment = bun.Environment; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const Global = bun.Global; const strings = bun.strings; const string = bun.string; diff --git a/src/bun.js/api/bun/subprocess.zig b/src/bun.js/api/bun/subprocess.zig index bbcb154fc22bdb..9c5a2da22a5609 100644 --- a/src/bun.js/api/bun/subprocess.zig +++ b/src/bun.js/api/bun/subprocess.zig @@ -1,7 +1,7 @@ const default_allocator = @import("root").bun.default_allocator; const bun = @import("root").bun; const Environment = bun.Environment; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const Global = bun.Global; const strings = bun.strings; const string = bun.string; diff --git a/src/bun.js/api/ffi.zig b/src/bun.js/api/ffi.zig index db3ba1666873ed..8371e9999e2137 100644 --- a/src/bun.js/api/ffi.zig +++ b/src/bun.js/api/ffi.zig @@ -3,7 +3,7 @@ const root = @import("root"); const default_allocator = bun.default_allocator; const bun = @import("root").bun; const Environment = bun.Environment; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const Global = bun.Global; const strings = bun.strings; const string = bun.string; @@ -25,7 +25,6 @@ const ServerEntryPoint = bun.bundler.ServerEntryPoint; const js_printer = bun.js_printer; const js_parser = bun.js_parser; const js_ast = bun.JSAst; -const http = @import("../../bun_dev_http_server.zig"); const NodeFallbackModules = @import("../../node_fallbacks.zig"); const ImportKind = ast.ImportKind; const Analytics = @import("../../analytics/analytics_thread.zig"); diff --git a/src/bun.js/api/filesystem_router.zig b/src/bun.js/api/filesystem_router.zig index f3306629842ebc..ee91d53cc5fdaa 100644 --- a/src/bun.js/api/filesystem_router.zig +++ b/src/bun.js/api/filesystem_router.zig @@ -1,6 +1,5 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; -const http = @import("../../bun_dev_http_server.zig"); const JavaScript = @import("../javascript.zig"); const QueryStringMap = @import("../../url.zig").QueryStringMap; const CombinedScanner = @import("../../url.zig").CombinedScanner; diff --git a/src/bun.js/api/server.zig b/src/bun.js/api/server.zig index 73a6a512a209fa..1282aa381dd20f 100644 --- a/src/bun.js/api/server.zig +++ b/src/bun.js/api/server.zig @@ -2,7 +2,7 @@ const Bun = @This(); const default_allocator = @import("root").bun.default_allocator; const bun = @import("root").bun; const Environment = bun.Environment; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const Global = bun.Global; const strings = bun.strings; const string = bun.string; @@ -25,7 +25,6 @@ const ServerEntryPoint = bun.bundler.ServerEntryPoint; const js_printer = bun.js_printer; const js_parser = bun.js_parser; const js_ast = bun.JSAst; -const http = @import("../../bun_dev_http_server.zig"); const NodeFallbackModules = @import("../../node_fallbacks.zig"); const ImportKind = ast.ImportKind; const Analytics = @import("../../analytics/analytics_thread.zig"); @@ -41,7 +40,7 @@ const Request = WebCore.Request; const Response = WebCore.Response; const Headers = WebCore.Headers; const Fetch = WebCore.Fetch; -const HTTP = @import("root").bun.HTTP; +const HTTP = @import("root").bun.http; const FetchEvent = WebCore.FetchEvent; const js = @import("root").bun.JSC.C; const JSC = @import("root").bun.JSC; @@ -3009,9 +3008,11 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp }; var has_content_disposition = false; + var has_content_range = false; if (response.init.headers) |headers_| { has_content_disposition = headers_.fastHas(.ContentDisposition); - needs_content_range = needs_content_range and headers_.fastHas(.ContentRange); + has_content_range = headers_.fastHas(.ContentRange); + needs_content_range = needs_content_range and has_content_range; if (needs_content_range) { status = 206; } @@ -3058,7 +3059,7 @@ fn NewRequestContext(comptime ssl_enabled: bool, comptime debug_mode: bool, comp this.flags.needs_content_length = false; } - if (needs_content_range) { + if (needs_content_range and !has_content_range) { var content_range_buf: [1024]u8 = undefined; resp.writeHeader( diff --git a/src/bun.js/bindings/KeyObject.cpp b/src/bun.js/bindings/KeyObject.cpp index e097c2dfd2ebf5..7b0e57d9771685 100644 --- a/src/bun.js/bindings/KeyObject.cpp +++ b/src/bun.js/bindings/KeyObject.cpp @@ -42,7 +42,14 @@ #include #include #include "JSBuffer.h" - +#include "CryptoAlgorithmHMAC.h" +#include "CryptoAlgorithmEd25519.h" +#include "CryptoAlgorithmRSA_PSS.h" +#include "CryptoAlgorithmRSASSA_PKCS1_v1_5.h" +#include "CryptoAlgorithmECDSA.h" +#include "CryptoAlgorithmEcdsaParams.h" +#include "CryptoAlgorithmRsaPssParams.h" +#include "CryptoAlgorithmRegistry.h" using namespace JSC; using namespace Bun; using JSGlobalObject @@ -1239,6 +1246,492 @@ JSC::EncodedJSValue KeyObject__createSecretKey(JSC::JSGlobalObject* lexicalGloba return JSValue::encode(JSC::jsUndefined()); } +static ExceptionOr> KeyObject__GetBuffer(JSValue bufferArg) +{ + if (!bufferArg.isCell()) { + return Exception { OperationError }; + } + + auto bufferArgCell = bufferArg.asCell(); + auto type = bufferArgCell->type(); + + switch (type) { + case DataViewType: + case Uint8ArrayType: + case Uint8ClampedArrayType: + case Uint16ArrayType: + case Uint32ArrayType: + case Int8ArrayType: + case Int16ArrayType: + case Int32ArrayType: + case Float32ArrayType: + case Float64ArrayType: + case BigInt64ArrayType: + case BigUint64ArrayType: { + JSC::JSArrayBufferView* view = jsCast(bufferArgCell); + + void* data = view->vector(); + size_t byteLength = view->length(); + if (UNLIKELY(!data)) { + break; + } + return Vector((uint8_t*)data, byteLength); + } + case ArrayBufferType: { + auto* jsBuffer = jsDynamicCast(bufferArgCell); + if (UNLIKELY(!jsBuffer)) { + break; + } + auto* buffer = jsBuffer->impl(); + void* data = buffer->data(); + size_t byteLength = buffer->byteLength(); + if (UNLIKELY(!data)) { + break; + } + return Vector((uint8_t*)data, byteLength); + } + default: { + break; + } + } + return Exception { OperationError }; +} +JSC::EncodedJSValue KeyObject__Sign(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame) +{ + auto count = callFrame->argumentCount(); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + if (count < 3) { + JSC::throwTypeError(globalObject, scope, "sign requires 3 arguments"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + + auto* key = jsDynamicCast(callFrame->argument(0)); + if (!key) { + // No JSCryptoKey instance + JSC::throwTypeError(globalObject, scope, "expected CryptoKey as first argument"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + JSValue bufferArg = callFrame->uncheckedArgument(1); + + auto buffer = KeyObject__GetBuffer(bufferArg); + if (buffer.hasException()) { + JSC::throwTypeError(globalObject, scope, "expected Buffer or array-like object as second argument"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto vectorData = buffer.releaseReturnValue(); + auto& wrapped = key->wrapped(); + auto key_type = wrapped.type(); + auto id = wrapped.keyClass(); + + auto hash = WebCore::CryptoAlgorithmIdentifier::SHA_256; + auto algorithm = callFrame->argument(2); + auto customHash = false; + if (!algorithm.isUndefinedOrNull() && !algorithm.isEmpty()) { + customHash = true; + if (!algorithm.isString()) { + JSC::throwTypeError(globalObject, scope, "algorithm is expected to be a string"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto algorithm_str = algorithm.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, encodedJSValue()); + + auto identifier = CryptoAlgorithmRegistry::singleton().identifier(algorithm_str); + if (UNLIKELY(!identifier)) { + JSC::throwTypeError(globalObject, scope, "digest not allowed"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + + switch (*identifier) { + case WebCore::CryptoAlgorithmIdentifier::SHA_1: + case WebCore::CryptoAlgorithmIdentifier::SHA_224: + case WebCore::CryptoAlgorithmIdentifier::SHA_256: + case WebCore::CryptoAlgorithmIdentifier::SHA_384: + case WebCore::CryptoAlgorithmIdentifier::SHA_512: { + + hash = *identifier; + break; + } + default: { + JSC::throwTypeError(globalObject, scope, "digest not allowed"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } + } + + switch (id) { + case CryptoKeyClass::HMAC: { + const auto& hmac = downcast(wrapped); + auto result = (customHash) ? WebCore::CryptoAlgorithmHMAC::platformSignWithAlgorithm(hmac, hash, vectorData) : WebCore::CryptoAlgorithmHMAC::platformSign(hmac, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto resultData = result.releaseReturnValue(); + auto size = resultData.size(); + auto* buffer = jsCast(JSValue::decode(JSBuffer__bufferFromLength(globalObject, size))); + if (size > 0) + memcpy(buffer->vector(), resultData.data(), size); + + return JSC::JSValue::encode(buffer); + } + case CryptoKeyClass::OKP: { + const auto& okpKey = downcast(wrapped); + auto result = WebCore::CryptoAlgorithmEd25519::platformSign(okpKey, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto resultData = result.releaseReturnValue(); + auto size = resultData.size(); + auto* buffer = jsCast(JSValue::decode(JSBuffer__bufferFromLength(globalObject, size))); + if (size > 0) + memcpy(buffer->vector(), resultData.data(), size); + + return JSC::JSValue::encode(buffer); + } + case CryptoKeyClass::EC: { + const auto& ec = downcast(wrapped); + CryptoAlgorithmEcdsaParams params; + params.identifier = CryptoAlgorithmIdentifier::ECDSA; + params.hashIdentifier = hash; + params.encoding = CryptoAlgorithmECDSAEncoding::DER; + + if (count > 3) { + auto encoding = callFrame->argument(3); + if (!encoding.isUndefinedOrNull() && !encoding.isEmpty()) { + if (!encoding.isString()) { + JSC::throwTypeError(globalObject, scope, "dsaEncoding is expected to be a string"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto encoding_str = encoding.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, encodedJSValue()); + + if (encoding_str == "ieee-p1363"_s) { + params.encoding = CryptoAlgorithmECDSAEncoding::IeeeP1363; + } else if (encoding_str == "der"_s) { + params.encoding = CryptoAlgorithmECDSAEncoding::DER; + } else { + JSC::throwTypeError(globalObject, scope, "invalid dsaEncoding"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } + } + auto result = WebCore::CryptoAlgorithmECDSA::platformSign(params, ec, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto resultData = result.releaseReturnValue(); + auto size = resultData.size(); + auto* buffer = jsCast(JSValue::decode(JSBuffer__bufferFromLength(globalObject, size))); + if (size > 0) + memcpy(buffer->vector(), resultData.data(), size); + + return JSC::JSValue::encode(buffer); + } + case CryptoKeyClass::RSA: { + const auto& rsa = downcast(wrapped); + CryptoAlgorithmIdentifier restrict_hash; + bool isRestrictedToHash = rsa.isRestrictedToHash(restrict_hash); + if (isRestrictedToHash && hash != restrict_hash) { + JSC::throwTypeError(globalObject, scope, "digest not allowed"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + switch (rsa.algorithmIdentifier()) { + case CryptoAlgorithmIdentifier::RSASSA_PKCS1_v1_5: { + auto result = (customHash) ? WebCore::CryptoAlgorithmRSASSA_PKCS1_v1_5::platformSignWithAlgorithm(rsa, hash, vectorData) : CryptoAlgorithmRSASSA_PKCS1_v1_5::platformSign(rsa, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto resultData = result.releaseReturnValue(); + auto size = resultData.size(); + auto* buffer = jsCast(JSValue::decode(JSBuffer__bufferFromLength(globalObject, size))); + if (size > 0) + memcpy(buffer->vector(), resultData.data(), size); + + return JSC::JSValue::encode(buffer); + } + case CryptoAlgorithmIdentifier::RSA_PSS: { + CryptoAlgorithmRsaPssParams params; + params.padding = RSA_PKCS1_PADDING; + if (count > 4) { + auto padding = callFrame->argument(4); + if (!padding.isUndefinedOrNull() && !padding.isEmpty()) { + if (!padding.isNumber()) { + JSC::throwTypeError(globalObject, scope, "padding is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + params.padding = padding.toUInt32(globalObject); + } + // requires saltLength + if (params.padding == RSA_PKCS1_PSS_PADDING) { + if (count <= 5) { + JSC::throwTypeError(globalObject, scope, "saltLength is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + + auto saltLength = callFrame->argument(5); + if (saltLength.isUndefinedOrNull() || saltLength.isEmpty() || !saltLength.isNumber()) { + JSC::throwTypeError(globalObject, scope, "saltLength is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + params.saltLength = saltLength.toUInt32(globalObject); + } else if (count > 5) { + auto saltLength = callFrame->argument(5); + if (!saltLength.isUndefinedOrNull() && !saltLength.isEmpty() && !saltLength.isNumber()) { + JSC::throwTypeError(globalObject, scope, "saltLength is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + params.saltLength = saltLength.toUInt32(globalObject); + params.padding = RSA_PKCS1_PSS_PADDING; // if saltLength is provided, padding must be RSA_PKCS1_PSS_PADDING + } + } + params.identifier = CryptoAlgorithmIdentifier::RSA_PSS; + auto result = (customHash) ? WebCore::CryptoAlgorithmRSA_PSS::platformSignWithAlgorithm(params, hash, rsa, vectorData) : CryptoAlgorithmRSA_PSS::platformSign(params, rsa, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto resultData = result.releaseReturnValue(); + auto size = resultData.size(); + auto* buffer = jsCast(JSValue::decode(JSBuffer__bufferFromLength(globalObject, size))); + if (size > 0) + memcpy(buffer->vector(), resultData.data(), size); + + return JSC::JSValue::encode(buffer); + } + default: { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE: Sign not supported for this key type"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } + } + case CryptoKeyClass::AES: { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE: Sign not supported for AES key type"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + case CryptoKeyClass::Raw: { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE: Sign not supported for Raw key type"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + default: { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE: Sign not supported for this key type"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } +} + +JSC::EncodedJSValue KeyObject__Verify(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame) +{ + auto count = callFrame->argumentCount(); + auto& vm = globalObject->vm(); + auto scope = DECLARE_THROW_SCOPE(vm); + + if (count < 4) { + JSC::throwTypeError(globalObject, scope, "verify requires 4 arguments"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + + auto* key = jsDynamicCast(callFrame->argument(0)); + if (!key) { + // No JSCryptoKey instance + JSC::throwTypeError(globalObject, scope, "expected CryptoKey as first argument"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + JSValue bufferArg = callFrame->uncheckedArgument(1); + auto buffer = KeyObject__GetBuffer(bufferArg); + if (buffer.hasException()) { + JSC::throwTypeError(globalObject, scope, "expected data to be Buffer or array-like object as second argument"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto vectorData = buffer.releaseReturnValue(); + + JSValue signatureBufferArg = callFrame->uncheckedArgument(2); + auto signatureBuffer = KeyObject__GetBuffer(signatureBufferArg); + if (signatureBuffer.hasException()) { + JSC::throwTypeError(globalObject, scope, "expected signature to be Buffer or array-like object as second argument"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto signatureData = signatureBuffer.releaseReturnValue(); + + auto& wrapped = key->wrapped(); + auto key_type = wrapped.type(); + auto id = wrapped.keyClass(); + + auto hash = WebCore::CryptoAlgorithmIdentifier::SHA_256; + auto customHash = false; + + auto algorithm = callFrame->argument(3); + if (!algorithm.isUndefinedOrNull() && !algorithm.isEmpty()) { + customHash = true; + if (!algorithm.isString()) { + JSC::throwTypeError(globalObject, scope, "algorithm is expected to be a string"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto algorithm_str = algorithm.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, encodedJSValue()); + + auto identifier = CryptoAlgorithmRegistry::singleton().identifier(algorithm_str); + if (UNLIKELY(!identifier)) { + JSC::throwTypeError(globalObject, scope, "digest not allowed"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + + switch (*identifier) { + case WebCore::CryptoAlgorithmIdentifier::SHA_1: + case WebCore::CryptoAlgorithmIdentifier::SHA_224: + case WebCore::CryptoAlgorithmIdentifier::SHA_256: + case WebCore::CryptoAlgorithmIdentifier::SHA_384: + case WebCore::CryptoAlgorithmIdentifier::SHA_512: { + + hash = *identifier; + break; + } + default: { + JSC::throwTypeError(globalObject, scope, "digest not allowed"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } + } + + switch (id) { + case CryptoKeyClass::HMAC: { + const auto& hmac = downcast(wrapped); + auto result = (customHash) ? WebCore::CryptoAlgorithmHMAC::platformVerifyWithAlgorithm(hmac, hash, signatureData, vectorData) : WebCore::CryptoAlgorithmHMAC::platformVerify(hmac, signatureData, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + return JSC::JSValue::encode(jsBoolean(result.releaseReturnValue())); + } + case CryptoKeyClass::OKP: { + const auto& okpKey = downcast(wrapped); + auto result = WebCore::CryptoAlgorithmEd25519::platformVerify(okpKey, signatureData, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + return JSC::JSValue::encode(jsBoolean(result.releaseReturnValue())); + } + case CryptoKeyClass::EC: { + const auto& ec = downcast(wrapped); + CryptoAlgorithmEcdsaParams params; + params.identifier = CryptoAlgorithmIdentifier::ECDSA; + params.hashIdentifier = hash; + params.encoding = CryptoAlgorithmECDSAEncoding::DER; + + if (count > 4) { + auto encoding = callFrame->argument(4); + if (!encoding.isUndefinedOrNull() && !encoding.isEmpty()) { + if (!encoding.isString()) { + JSC::throwTypeError(globalObject, scope, "dsaEncoding is expected to be a string"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto encoding_str = encoding.toWTFString(globalObject); + RETURN_IF_EXCEPTION(scope, encodedJSValue()); + + if (encoding_str == "ieee-p1363"_s) { + params.encoding = CryptoAlgorithmECDSAEncoding::IeeeP1363; + } else if (encoding_str == "der"_s) { + params.encoding = CryptoAlgorithmECDSAEncoding::DER; + } else { + JSC::throwTypeError(globalObject, scope, "invalid dsaEncoding"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } + } + auto result = WebCore::CryptoAlgorithmECDSA::platformVerify(params, ec, signatureData, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + return JSC::JSValue::encode(jsBoolean(result.releaseReturnValue())); + } + case CryptoKeyClass::RSA: { + const auto& rsa = downcast(wrapped); + CryptoAlgorithmIdentifier restrict_hash; + bool isRestrictedToHash = rsa.isRestrictedToHash(restrict_hash); + if (isRestrictedToHash && hash != restrict_hash) { + JSC::throwTypeError(globalObject, scope, "digest not allowed"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + switch (rsa.algorithmIdentifier()) { + case CryptoAlgorithmIdentifier::RSASSA_PKCS1_v1_5: { + auto result = (customHash) ? WebCore::CryptoAlgorithmRSASSA_PKCS1_v1_5::platformVerifyWithAlgorithm(rsa, hash, signatureData, vectorData) : CryptoAlgorithmRSASSA_PKCS1_v1_5::platformVerify(rsa, signatureData, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + return JSC::JSValue::encode(jsBoolean(result.releaseReturnValue())); + } + case CryptoAlgorithmIdentifier::RSA_PSS: { + CryptoAlgorithmRsaPssParams params; + params.padding = RSA_PKCS1_PADDING; + if (count > 5) { + + auto padding = callFrame->argument(5); + if (!padding.isUndefinedOrNull() && !padding.isEmpty()) { + if (!padding.isNumber()) { + JSC::throwTypeError(globalObject, scope, "padding is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + params.padding = padding.toUInt32(globalObject); + } + // requires saltLength + if (params.padding == RSA_PKCS1_PSS_PADDING) { + if (count <= 6) { + JSC::throwTypeError(globalObject, scope, "saltLength is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + + auto saltLength = callFrame->argument(6); + if (saltLength.isUndefinedOrNull() || saltLength.isEmpty() || !saltLength.isNumber()) { + JSC::throwTypeError(globalObject, scope, "saltLength is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + params.saltLength = saltLength.toUInt32(globalObject); + } else if (count > 6) { + auto saltLength = callFrame->argument(6); + if (!saltLength.isUndefinedOrNull() && !saltLength.isEmpty() && !saltLength.isNumber()) { + JSC::throwTypeError(globalObject, scope, "saltLength is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + params.saltLength = saltLength.toUInt32(globalObject); + params.padding = RSA_PKCS1_PSS_PADDING; // if saltLength is provided, padding must be RSA_PKCS1_PSS_PADDING + } + } + params.identifier = CryptoAlgorithmIdentifier::RSA_PSS; + auto result = (customHash) ? WebCore::CryptoAlgorithmRSA_PSS::platformVerifyWithAlgorithm(params, hash, rsa, signatureData, vectorData) : CryptoAlgorithmRSA_PSS::platformVerify(params, rsa, signatureData, vectorData); + if (result.hasException()) { + WebCore::propagateException(*globalObject, scope, result.releaseException()); + return JSC::JSValue::encode(JSC::JSValue {}); + } + return JSC::JSValue::encode(jsBoolean(result.releaseReturnValue())); + } + default: { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE: Verify not supported for RSA key type"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } + } + case CryptoKeyClass::AES: { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE: Verify not supported for AES key type"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + case CryptoKeyClass::Raw: { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE: Verify not supported for Raw key type"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + default: { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_INVALID_KEY_OBJECT_TYPE: Verify not supported for this key type"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } +} + JSC::EncodedJSValue KeyObject__Exports(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame) { @@ -1316,11 +1809,16 @@ JSC::EncodedJSValue KeyObject__Exports(JSC::JSGlobalObject* globalObject, JSC::C case CryptoKeyClass::RSA: { const auto& rsa = downcast(wrapped); if (string == "jwk"_s) { + if (rsa.algorithmIdentifier() == CryptoAlgorithmIdentifier::RSA_PSS) { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_JWK_UNSUPPORTED_KEY_TYPE: encryption is not supported for jwk format"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } const JsonWebKey& jwkValue = rsa.exportJwk(); Zig::GlobalObject* domGlobalObject = reinterpret_cast(globalObject); return JSC::JSValue::encode(WebCore::convertDictionaryToJS(*globalObject, *domGlobalObject, jwkValue, true)); } else { WTF::String type = "pkcs1"_s; + if (!typeJSValue.isUndefinedOrNull() && !typeJSValue.isEmpty()) { if (!typeJSValue.isString()) { JSC::throwTypeError(globalObject, scope, "type must be a string"_s); @@ -1329,6 +1827,12 @@ JSC::EncodedJSValue KeyObject__Exports(JSC::JSGlobalObject* globalObject, JSC::C type = typeJSValue.toWTFString(globalObject); RETURN_IF_EXCEPTION(scope, encodedJSValue()); } + if (type == "pkcs1"_s) { + if (rsa.algorithmIdentifier() == CryptoAlgorithmIdentifier::RSA_PSS) { + JSC::throwTypeError(globalObject, scope, "ERR_CRYPTO_JWK_UNSUPPORTED_KEY_TYPE: encryption is not supported for jwk format"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } auto* bio = BIO_new(BIO_s_mem()); auto* rsaKey = rsa.platformKey(); @@ -2044,7 +2548,6 @@ JSC::EncodedJSValue KeyObject__generateKeyPairSync(JSC::JSGlobalObject* lexicalG Zig::GlobalObject* zigGlobalObject = reinterpret_cast(lexicalGlobalObject); auto* structure = zigGlobalObject->JSCryptoKeyStructure(); - // TODO: rsa-pss if (type_str == "rsa"_s) { if (count == 1) { JSC::throwTypeError(lexicalGlobalObject, scope, "options.modulusLength are required for rsa"_s); @@ -2068,6 +2571,7 @@ JSC::EncodedJSValue KeyObject__generateKeyPairSync(JSC::JSGlobalObject* lexicalG JSC::throwTypeError(lexicalGlobalObject, scope, "options.publicExponent is expected to be a number"_s); return JSC::JSValue::encode(JSC::JSValue {}); } + uint8_t publicExponentArray[4]; publicExponentArray[0] = (uint8_t)(publicExponent >> 24); publicExponentArray[1] = (uint8_t)(publicExponent >> 16); @@ -2091,6 +2595,91 @@ JSC::EncodedJSValue KeyObject__generateKeyPairSync(JSC::JSGlobalObject* lexicalG // this is actually sync CryptoKeyRSA::generatePair(CryptoAlgorithmIdentifier::RSA_OAEP, CryptoAlgorithmIdentifier::SHA_1, false, modulusLength, Vector((uint8_t*)&publicExponentArray, 4), true, CryptoKeyUsageEncrypt | CryptoKeyUsageDecrypt, WTFMove(keyPairCallback), WTFMove(failureCallback), zigGlobalObject->scriptExecutionContext()); return JSValue::encode(returnValue); + } + if (type_str == "rsa-pss"_s) { + if (count == 1) { + JSC::throwTypeError(lexicalGlobalObject, scope, "options.modulusLength are required for rsa"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto* options = jsDynamicCast(callFrame->argument(1)); + if (options == nullptr) { + JSC::throwTypeError(lexicalGlobalObject, scope, "options is expected to be a object"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto modulusLengthJS = options->getIfPropertyExists(lexicalGlobalObject, PropertyName(Identifier::fromString(vm, "modulusLength"_s))); + if (!modulusLengthJS.isNumber()) { + JSC::throwTypeError(lexicalGlobalObject, scope, "options.modulusLength is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + auto publicExponentJS = options->getIfPropertyExists(lexicalGlobalObject, PropertyName(Identifier::fromString(vm, "publicExponent"_s))); + uint32_t publicExponent = 0x10001; + if (publicExponentJS.isNumber()) { + publicExponent = publicExponentJS.toUInt32(lexicalGlobalObject); + } else if (!publicExponentJS.isUndefinedOrNull() && !publicExponentJS.isEmpty()) { + JSC::throwTypeError(lexicalGlobalObject, scope, "options.publicExponent is expected to be a number"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + uint8_t publicExponentArray[4]; + publicExponentArray[0] = (uint8_t)(publicExponent >> 24); + publicExponentArray[1] = (uint8_t)(publicExponent >> 16); + publicExponentArray[2] = (uint8_t)(publicExponent >> 8); + publicExponentArray[3] = (uint8_t)publicExponent; + + int modulusLength = modulusLengthJS.toUInt32(lexicalGlobalObject); + auto returnValue = JSC::JSValue {}; + auto keyPairCallback = [&](CryptoKeyPair&& pair) { + pair.publicKey->setUsagesBitmap(pair.publicKey->usagesBitmap() & CryptoKeyUsageVerify); + pair.privateKey->setUsagesBitmap(pair.privateKey->usagesBitmap() & CryptoKeyUsageSign); + + auto obj = JSC::constructEmptyObject(lexicalGlobalObject, lexicalGlobalObject->objectPrototype(), 2); + obj->putDirect(vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "publicKey"_s)), JSCryptoKey::create(structure, zigGlobalObject, pair.publicKey.releaseNonNull()), 0); + obj->putDirect(vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "privateKey"_s)), JSCryptoKey::create(structure, zigGlobalObject, pair.privateKey.releaseNonNull()), 0); + returnValue = obj; + }; + + auto hashAlgoJS = options->getIfPropertyExists(lexicalGlobalObject, PropertyName(Identifier::fromString(vm, "hashAlgorithm"_s))); + auto hasHash = false; + auto hash = CryptoAlgorithmIdentifier::SHA_1; + if (!hashAlgoJS.isUndefinedOrNull() && !hashAlgoJS.isEmpty()) { + if (!hashAlgoJS.isString()) { + JSC::throwTypeError(lexicalGlobalObject, scope, "options.hashAlgorithm is expected to be a string"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + hasHash = true; + auto hashAlgo = hashAlgoJS.toWTFString(lexicalGlobalObject); + RETURN_IF_EXCEPTION(scope, encodedJSValue()); + + auto identifier = CryptoAlgorithmRegistry::singleton().identifier(hashAlgo); + if (UNLIKELY(!identifier)) { + JSC::throwTypeError(lexicalGlobalObject, scope, "options.hashAlgorithm is invalid"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + + switch (*identifier) { + case WebCore::CryptoAlgorithmIdentifier::SHA_1: + case WebCore::CryptoAlgorithmIdentifier::SHA_224: + case WebCore::CryptoAlgorithmIdentifier::SHA_256: + case WebCore::CryptoAlgorithmIdentifier::SHA_384: + case WebCore::CryptoAlgorithmIdentifier::SHA_512: { + + hash = *identifier; + break; + } + default: { + JSC::throwTypeError(lexicalGlobalObject, scope, "options.hashAlgorithm is invalid"_s); + return JSC::JSValue::encode(JSC::JSValue {}); + } + } + } + + auto saltLengthJS = options->getIfPropertyExists(lexicalGlobalObject, PropertyName(Identifier::fromString(vm, "hashAlgorithm"_s))); + + auto failureCallback = [&]() { + throwException(lexicalGlobalObject, scope, createTypeError(lexicalGlobalObject, "Failed to generate key pair"_s)); + }; + // this is actually sync + CryptoKeyRSA::generatePair(CryptoAlgorithmIdentifier::RSA_PSS, hash, hasHash, modulusLength, Vector((uint8_t*)&publicExponentArray, 4), true, CryptoKeyUsageEncrypt | CryptoKeyUsageDecrypt, WTFMove(keyPairCallback), WTFMove(failureCallback), zigGlobalObject->scriptExecutionContext()); + return JSValue::encode(returnValue); } else if (type_str == "ec"_s) { if (count == 1) { JSC::throwTypeError(lexicalGlobalObject, scope, "options.namedCurve is required for ec"_s); @@ -2152,7 +2741,7 @@ JSC::EncodedJSValue KeyObject__generateKeyPairSync(JSC::JSGlobalObject* lexicalG obj->putDirect(vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "privateKey"_s)), JSCryptoKey::create(structure, zigGlobalObject, pair.privateKey.releaseNonNull()), 0); return JSValue::encode(obj); } else { - throwException(lexicalGlobalObject, scope, createTypeError(lexicalGlobalObject, "algorithm should be 'rsa', 'ec', 'x25519' or 'ed25519'"_s)); + throwException(lexicalGlobalObject, scope, createTypeError(lexicalGlobalObject, "algorithm should be 'rsa', 'rsa-pss', 'ec', 'x25519' or 'ed25519'"_s)); return JSValue::encode(JSC::jsUndefined()); } return JSValue::encode(JSC::jsUndefined()); diff --git a/src/bun.js/bindings/KeyObject.h b/src/bun.js/bindings/KeyObject.h index c9b172e3b7b01e..bb28847ae91432 100644 --- a/src/bun.js/bindings/KeyObject.h +++ b/src/bun.js/bindings/KeyObject.h @@ -15,4 +15,7 @@ JSC::EncodedJSValue KeyObject__createPublicKey(JSC::JSGlobalObject* lexicalGloba JSC::EncodedJSValue KeyObject__createPrivateKey(JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* callFrame); JSC::EncodedJSValue KeyObject__generateKeySync(JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* callFrame); JSC::EncodedJSValue KeyObject__generateKeyPairSync(JSC::JSGlobalObject* lexicalGlobalObject, JSC::CallFrame* callFrame); +JSC::EncodedJSValue KeyObject__Sign(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame); +JSC::EncodedJSValue KeyObject__Verify(JSC::JSGlobalObject* globalObject, JSC::CallFrame* callFrame); + } \ No newline at end of file diff --git a/src/bun.js/bindings/ZigGlobalObject.cpp b/src/bun.js/bindings/ZigGlobalObject.cpp index 5516cb716e8400..9a138e95352b93 100644 --- a/src/bun.js/bindings/ZigGlobalObject.cpp +++ b/src/bun.js/bindings/ZigGlobalObject.cpp @@ -874,8 +874,9 @@ GlobalObject::~GlobalObject() finalizer(toNapi(this), napiInstanceData, napiInstanceDataFinalizerHint); } - delete m_subtleCrypto; - scriptExecutionContext()->removeFromContextsMap(); + if (auto *ctx = scriptExecutionContext()) { + ctx->removeFromContextsMap(); + } } void GlobalObject::destroy(JSCell* cell) @@ -1754,6 +1755,9 @@ JSC_DEFINE_HOST_FUNCTION(functionLazyLoad, obj->putDirect(vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "generateKeyPairSync"_s)), JSC::JSFunction::create(vm, globalObject, 2, "generateKeyPairSync"_s, KeyObject__generateKeyPairSync, ImplementationVisibility::Public, NoIntrinsic), 0); + obj->putDirect(vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "sign"_s)), JSC::JSFunction::create(vm, globalObject, 3, "sign"_s, KeyObject__Sign, ImplementationVisibility::Public, NoIntrinsic), 0); + obj->putDirect(vm, JSC::PropertyName(JSC::Identifier::fromString(vm, "verify"_s)), JSC::JSFunction::create(vm, globalObject, 4, "verify"_s, KeyObject__Verify, ImplementationVisibility::Public, NoIntrinsic), 0); + return JSValue::encode(obj); } diff --git a/src/bun.js/bindings/napi.cpp b/src/bun.js/bindings/napi.cpp index 146b1f9cc08028..47353f331eb777 100644 --- a/src/bun.js/bindings/napi.cpp +++ b/src/bun.js/bindings/napi.cpp @@ -53,6 +53,7 @@ #include #include "napi_external.h" +#include // #include using namespace JSC; @@ -446,6 +447,38 @@ extern "C" napi_status napi_set_named_property(napi_env env, napi_value object, return napi_ok; } +extern "C" napi_status napi_create_arraybuffer(napi_env env, + size_t byte_length, void** data, + napi_value* result) + +{ + JSC::JSGlobalObject* globalObject = toJS(env); + if (UNLIKELY(!globalObject || !result)) { + return napi_invalid_arg; + } + + auto& vm = globalObject->vm(); + + auto scope = DECLARE_CATCH_SCOPE(vm); + + // Node probably doesn't create uninitialized array buffers + // but the node-api docs don't specify whether memory is initialized or not. + RefPtr arrayBuffer = ArrayBuffer::tryCreateUninitialized(byte_length, 1); + + if (!arrayBuffer) { + return napi_invalid_arg; + } + + auto* jsArrayBuffer = JSC::JSArrayBuffer::create(vm, globalObject->arrayBufferStructure(), WTFMove(arrayBuffer)); + RETURN_IF_EXCEPTION(scope, napi_generic_failure); + + if (LIKELY(data && jsArrayBuffer->impl())) { + *data = jsArrayBuffer->impl()->data(); + } + *result = toNapi(jsArrayBuffer); + return napi_ok; +} + // This is more efficient than using WTF::String::FromUTF8 // it doesn't copy the string // but it's only safe to use if we are not setting a property diff --git a/src/bun.js/bindings/webcore/JSEventEmitter.cpp b/src/bun.js/bindings/webcore/JSEventEmitter.cpp index e10c1fd65f69f0..43920bb0ef3a11 100644 --- a/src/bun.js/bindings/webcore/JSEventEmitter.cpp +++ b/src/bun.js/bindings/webcore/JSEventEmitter.cpp @@ -234,8 +234,6 @@ inline JSC::EncodedJSValue JSEventEmitter::addListener(JSC::JSGlobalObject* lexi EnsureStillAliveScope argument1 = callFrame->uncheckedArgument(1); auto listener = convert>>(*lexicalGlobalObject, argument1.value(), *castedThis, [](JSC::JSGlobalObject& lexicalGlobalObject, JSC::ThrowScope& scope) { throwArgumentMustBeObjectError(lexicalGlobalObject, scope, 1, "listener", "EventEmitter", "addListener"); }); RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); - auto result = JSValue::encode(toJS(*lexicalGlobalObject, throwScope, [&]() -> decltype(auto) { return impl.addListenerForBindings(WTFMove(eventType), WTFMove(listener), once, prepend); })); - RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); JSC::Identifier newListenerEventType = JSC::Identifier::fromString(vm, "newListener"_s); JSC::MarkedArgumentBuffer args; @@ -245,6 +243,9 @@ inline JSC::EncodedJSValue JSEventEmitter::addListener(JSC::JSGlobalObject* lexi auto result2 = JSValue::encode(toJS(*lexicalGlobalObject, throwScope, [&]() -> decltype(auto) { return impl.emitForBindings(WTFMove(newListenerEventType), WTFMove(args)); })); RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); + auto result = JSValue::encode(toJS(*lexicalGlobalObject, throwScope, [&]() -> decltype(auto) { return impl.addListenerForBindings(WTFMove(eventType), WTFMove(listener), once, prepend); })); + RETURN_IF_EXCEPTION(throwScope, encodedJSValue()); + vm.writeBarrier(&static_cast(*castedThis), argument1.value()); impl.setThisObject(actualThis); RELEASE_AND_RETURN(throwScope, JSValue::encode(actualThis)); diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSA.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSA.h index b7c7889f8e0321..00687cd9a9d886 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSA.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSA.h @@ -40,6 +40,8 @@ class CryptoAlgorithmECDSA final : public CryptoAlgorithm { static constexpr CryptoAlgorithmIdentifier s_identifier = CryptoAlgorithmIdentifier::ECDSA; static Ref create(); + static ExceptionOr> platformSign(const CryptoAlgorithmEcdsaParams&, const CryptoKeyEC&, const Vector&); + static ExceptionOr platformVerify(const CryptoAlgorithmEcdsaParams&, const CryptoKeyEC&, const Vector&, const Vector&); private: CryptoAlgorithmECDSA() = default; CryptoAlgorithmIdentifier identifier() const final; @@ -50,8 +52,6 @@ class CryptoAlgorithmECDSA final : public CryptoAlgorithm { void importKey(CryptoKeyFormat, KeyData&&, const CryptoAlgorithmParameters&, bool extractable, CryptoKeyUsageBitmap, KeyCallback&&, ExceptionCallback&&) final; void exportKey(CryptoKeyFormat, Ref&&, KeyDataCallback&&, ExceptionCallback&&) final; - static ExceptionOr> platformSign(const CryptoAlgorithmEcdsaParams&, const CryptoKeyEC&, const Vector&); - static ExceptionOr platformVerify(const CryptoAlgorithmEcdsaParams&, const CryptoKeyEC&, const Vector&, const Vector&); }; } // namespace WebCore diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSAOpenSSL.cpp b/src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSAOpenSSL.cpp index 46aec1869e1501..de6f773f560118 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSAOpenSSL.cpp +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmECDSAOpenSSL.cpp @@ -56,46 +56,80 @@ ExceptionOr> CryptoAlgorithmECDSA::platformSign(const CryptoAlgo if (!sig) return Exception { OperationError }; - const BIGNUM* r; - const BIGNUM* s; - ECDSA_SIG_get0(sig.get(), &r, &s); - - // Concatenate r and s, expanding r and s to keySizeInBytes. - Vector signature = convertToBytesExpand(r, keySizeInBytes); - signature.appendVector(convertToBytesExpand(s, keySizeInBytes)); - - return signature; + if (parameters.encoding == CryptoAlgorithmECDSAEncoding::DER) { + int derSigLength = i2d_ECDSA_SIG(sig.get(), nullptr); + if (derSigLength <= 0) + return Exception { OperationError }; + Vector signature(derSigLength); + uint8_t* p = signature.data(); + if(i2d_ECDSA_SIG(sig.get(), &p) != derSigLength) + return Exception { OperationError }; + return signature; + } else { + + const BIGNUM* r; + const BIGNUM* s; + ECDSA_SIG_get0(sig.get(), &r, &s); + + // Concatenate r and s, expanding r and s to keySizeInBytes. + Vector signature = convertToBytesExpand(r, keySizeInBytes); + signature.appendVector(convertToBytesExpand(s, keySizeInBytes)); + return signature; + } } ExceptionOr CryptoAlgorithmECDSA::platformVerify(const CryptoAlgorithmEcdsaParams& parameters, const CryptoKeyEC& key, const Vector& signature, const Vector& data) { - size_t keySizeInBytes = (key.keySizeInBits() + 7) / 8; - - // Bail if the signature size isn't double the key size (i.e. concatenated r and s components). - if (signature.size() != keySizeInBytes * 2) - return false; - - auto sig = ECDSASigPtr(ECDSA_SIG_new()); - auto r = BN_bin2bn(signature.data(), keySizeInBytes, nullptr); - auto s = BN_bin2bn(signature.data() + keySizeInBytes, keySizeInBytes, nullptr); - - if (!ECDSA_SIG_set0(sig.get(), r, s)) - return Exception { OperationError }; - - const EVP_MD* md = digestAlgorithm(parameters.hashIdentifier); - if (!md) - return Exception { NotSupportedError }; - - std::optional> digest = calculateDigest(md, data); - if (!digest) - return Exception { OperationError }; - - EC_KEY* ecKey = EVP_PKEY_get0_EC_KEY(key.platformKey()); - if (!ecKey) - return Exception { OperationError }; - - int ret = ECDSA_do_verify(digest->data(), digest->size(), sig.get(), ecKey); - return ret == 1; + if (parameters.encoding == CryptoAlgorithmECDSAEncoding::DER) { + const uint8_t* p = signature.data(); + + auto sig = ECDSASigPtr(d2i_ECDSA_SIG(nullptr, &p, signature.size())); + if (!sig) + return Exception { OperationError }; + + const EVP_MD* md = digestAlgorithm(parameters.hashIdentifier); + if (!md) + return Exception { NotSupportedError }; + + std::optional> digest = calculateDigest(md, data); + if (!digest) + return Exception { OperationError }; + + EC_KEY* ecKey = EVP_PKEY_get0_EC_KEY(key.platformKey()); + if (!ecKey) + return Exception { OperationError }; + + int ret = ECDSA_do_verify(digest->data(), digest->size(), sig.get(), ecKey); + return ret == 1; + } else { + size_t keySizeInBytes = (key.keySizeInBits() + 7) / 8; + + // Bail if the signature size isn't double the key size (i.e. concatenated r and s components). + if (signature.size() != keySizeInBytes * 2) + return false; + + auto sig = ECDSASigPtr(ECDSA_SIG_new()); + auto r = BN_bin2bn(signature.data(), keySizeInBytes, nullptr); + auto s = BN_bin2bn(signature.data() + keySizeInBytes, keySizeInBytes, nullptr); + + if (!ECDSA_SIG_set0(sig.get(), r, s)) + return Exception { OperationError }; + + const EVP_MD* md = digestAlgorithm(parameters.hashIdentifier); + if (!md) + return Exception { NotSupportedError }; + + std::optional> digest = calculateDigest(md, data); + if (!digest) + return Exception { OperationError }; + + EC_KEY* ecKey = EVP_PKEY_get0_EC_KEY(key.platformKey()); + if (!ecKey) + return Exception { OperationError }; + + int ret = ECDSA_do_verify(digest->data(), digest->size(), sig.get(), ecKey); + return ret == 1; + } } } // namespace WebCore diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmEcdsaParams.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmEcdsaParams.h index e08de2802d2105..18155fdb2d7128 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmEcdsaParams.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmEcdsaParams.h @@ -34,11 +34,17 @@ namespace WebCore { +enum CryptoAlgorithmECDSAEncoding { + IeeeP1363, + DER, +}; class CryptoAlgorithmEcdsaParams final : public CryptoAlgorithmParameters { public: // FIXME: Consider merging hash and hashIdentifier. std::variant, String> hash; CryptoAlgorithmIdentifier hashIdentifier; + // WebCrypto default is IeeeP1363. + CryptoAlgorithmECDSAEncoding encoding { CryptoAlgorithmECDSAEncoding::IeeeP1363 }; Class parametersClass() const final { return Class::EcdsaParams; } @@ -47,7 +53,7 @@ class CryptoAlgorithmEcdsaParams final : public CryptoAlgorithmParameters { CryptoAlgorithmEcdsaParams result; result.identifier = identifier; result.hashIdentifier = hashIdentifier; - + result.encoding = encoding; return result; } }; diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmEd25519.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmEd25519.h index b96d5783173ad1..9b2eb8cc64462c 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmEd25519.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmEd25519.h @@ -37,6 +37,8 @@ class CryptoAlgorithmEd25519 final : public CryptoAlgorithm { static constexpr CryptoAlgorithmIdentifier s_identifier = CryptoAlgorithmIdentifier::Ed25519; static Ref create(); + static ExceptionOr> platformSign(const CryptoKeyOKP&, const Vector&); + static ExceptionOr platformVerify(const CryptoKeyOKP&, const Vector&, const Vector&); private: CryptoAlgorithmEd25519() = default; CryptoAlgorithmIdentifier identifier() const final; @@ -46,9 +48,6 @@ class CryptoAlgorithmEd25519 final : public CryptoAlgorithm { void verify(const CryptoAlgorithmParameters&, Ref&&, Vector&& signature, Vector&&, BoolCallback&&, ExceptionCallback&&, ScriptExecutionContext&, WorkQueue&) final; void importKey(CryptoKeyFormat, KeyData&&, const CryptoAlgorithmParameters&, bool extractable, CryptoKeyUsageBitmap, KeyCallback&&, ExceptionCallback&&) final; void exportKey(CryptoKeyFormat, Ref&&, KeyDataCallback&&, ExceptionCallback&&) final; - - static ExceptionOr> platformSign(const CryptoKeyOKP&, const Vector&); - static ExceptionOr platformVerify(const CryptoKeyOKP&, const Vector&, const Vector&); }; inline CryptoAlgorithmIdentifier CryptoAlgorithmEd25519::identifier() const diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMAC.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMAC.h index 9f1084698ab43f..22064797ae8caf 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMAC.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMAC.h @@ -41,7 +41,10 @@ class CryptoAlgorithmHMAC final : public CryptoAlgorithm { // Operations can be performed directly. static ExceptionOr> platformSign(const CryptoKeyHMAC&, const Vector&); + static ExceptionOr> platformSignWithAlgorithm(const CryptoKeyHMAC&, CryptoAlgorithmIdentifier, const Vector&); static ExceptionOr platformVerify(const CryptoKeyHMAC&, const Vector&, const Vector&); + static ExceptionOr platformVerifyWithAlgorithm(const CryptoKeyHMAC&, CryptoAlgorithmIdentifier, const Vector&, const Vector&); + private: CryptoAlgorithmHMAC() = default; diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp index ad35cafe5aa7f5..c71751b3608ebd 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmHMACOpenSSL.cpp @@ -59,6 +59,18 @@ static std::optional> calculateSignature(const EVP_MD* algorithm return cipherText; } +ExceptionOr> CryptoAlgorithmHMAC::platformSignWithAlgorithm(const CryptoKeyHMAC& key, CryptoAlgorithmIdentifier algorithmIdentifier, const Vector& data) { + + auto algorithm = digestAlgorithm(algorithmIdentifier); + if (!algorithm) + return Exception { OperationError }; + + auto result = calculateSignature(algorithm, key.key(), data.data(), data.size()); + if (!result) + return Exception { OperationError }; + return WTFMove(*result); +} + ExceptionOr> CryptoAlgorithmHMAC::platformSign(const CryptoKeyHMAC& key, const Vector& data) { auto algorithm = digestAlgorithm(key.hashAlgorithmIdentifier()); @@ -71,6 +83,18 @@ ExceptionOr> CryptoAlgorithmHMAC::platformSign(const CryptoKeyHM return WTFMove(*result); } +ExceptionOr CryptoAlgorithmHMAC::platformVerifyWithAlgorithm(const CryptoKeyHMAC& key, CryptoAlgorithmIdentifier algorithmIdentifier, const Vector& signature, const Vector& data) { + + auto algorithm = digestAlgorithm(algorithmIdentifier); + if (!algorithm) + return Exception { OperationError }; + + auto expectedSignature = calculateSignature(algorithm, key.key(), data.data(), data.size()); + if (!expectedSignature) + return Exception { OperationError }; + // Using a constant time comparison to prevent timing attacks. + return signature.size() == expectedSignature->size() && !constantTimeMemcmp(expectedSignature->data(), signature.data(), expectedSignature->size()); +} ExceptionOr CryptoAlgorithmHMAC::platformVerify(const CryptoKeyHMAC& key, const Vector& signature, const Vector& data) { auto algorithm = digestAlgorithm(key.hashAlgorithmIdentifier()); diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5.h index d185d3a4bda2da..a08f08854a1a67 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5.h @@ -39,6 +39,13 @@ class CryptoAlgorithmRSASSA_PKCS1_v1_5 final : public CryptoAlgorithm { static constexpr CryptoAlgorithmIdentifier s_identifier = CryptoAlgorithmIdentifier::RSASSA_PKCS1_v1_5; static Ref create(); + static ExceptionOr> platformSign(const CryptoKeyRSA&, const Vector&); + static ExceptionOr> platformSignWithAlgorithm(const CryptoKeyRSA&, CryptoAlgorithmIdentifier, const Vector&); + + static ExceptionOr platformVerify(const CryptoKeyRSA&, const Vector&, const Vector&); + static ExceptionOr platformVerifyWithAlgorithm(const CryptoKeyRSA&, CryptoAlgorithmIdentifier, const Vector&, const Vector&); + + private: CryptoAlgorithmRSASSA_PKCS1_v1_5() = default; CryptoAlgorithmIdentifier identifier() const final; @@ -48,9 +55,6 @@ class CryptoAlgorithmRSASSA_PKCS1_v1_5 final : public CryptoAlgorithm { void generateKey(const CryptoAlgorithmParameters&, bool extractable, CryptoKeyUsageBitmap, KeyOrKeyPairCallback&&, ExceptionCallback&&, ScriptExecutionContext&) final; void importKey(CryptoKeyFormat, KeyData&&, const CryptoAlgorithmParameters&, bool extractable, CryptoKeyUsageBitmap, KeyCallback&&, ExceptionCallback&&) final; void exportKey(CryptoKeyFormat, Ref&&, KeyDataCallback&&, ExceptionCallback&&) final; - - static ExceptionOr> platformSign(const CryptoKeyRSA&, const Vector&); - static ExceptionOr platformVerify(const CryptoKeyRSA&, const Vector&, const Vector&); }; } // namespace WebCore diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5OpenSSL.cpp b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5OpenSSL.cpp index acfeee790fcd00..f31585a3a44a13 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5OpenSSL.cpp +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSASSA_PKCS1_v1_5OpenSSL.cpp @@ -33,11 +33,8 @@ namespace WebCore { -ExceptionOr> CryptoAlgorithmRSASSA_PKCS1_v1_5::platformSign(const CryptoKeyRSA& key, const Vector& data) -{ - const EVP_MD* md = digestAlgorithm(key.hashAlgorithmIdentifier()); - if (!md) - return Exception { NotSupportedError }; + +static ExceptionOr> signWithEVP_MD(const CryptoKeyRSA& key, const EVP_MD* md, const Vector& data) { std::optional> digest = calculateDigest(md, data); if (!digest) @@ -68,12 +65,25 @@ ExceptionOr> CryptoAlgorithmRSASSA_PKCS1_v1_5::platformSign(cons return signature; } -ExceptionOr CryptoAlgorithmRSASSA_PKCS1_v1_5::platformVerify(const CryptoKeyRSA& key, const Vector& signature, const Vector& data) +ExceptionOr> CryptoAlgorithmRSASSA_PKCS1_v1_5::platformSignWithAlgorithm(const CryptoKeyRSA& key, CryptoAlgorithmIdentifier algorithm, const Vector& data) { + + const EVP_MD* md = digestAlgorithm(algorithm); + if (!md) + return Exception { NotSupportedError }; + + return signWithEVP_MD(key, md, data); +} +ExceptionOr> CryptoAlgorithmRSASSA_PKCS1_v1_5::platformSign(const CryptoKeyRSA& key, const Vector& data) { const EVP_MD* md = digestAlgorithm(key.hashAlgorithmIdentifier()); if (!md) return Exception { NotSupportedError }; + return signWithEVP_MD(key, md, data); +} + + +static ExceptionOr verifyWithEVP_MD(const CryptoKeyRSA& key, const EVP_MD* md, const Vector& signature, const Vector& data) { std::optional> digest = calculateDigest(md, data); if (!digest) return Exception { OperationError }; @@ -96,6 +106,24 @@ ExceptionOr CryptoAlgorithmRSASSA_PKCS1_v1_5::platformVerify(const CryptoK return ret == 1; } +ExceptionOr CryptoAlgorithmRSASSA_PKCS1_v1_5::platformVerifyWithAlgorithm(const CryptoKeyRSA& key, CryptoAlgorithmIdentifier algorithm, const Vector& signature, const Vector& data) { + const EVP_MD* md = digestAlgorithm(algorithm); + if (!md) + return Exception { NotSupportedError }; + + return verifyWithEVP_MD(key, md, signature, data); +} + + +ExceptionOr CryptoAlgorithmRSASSA_PKCS1_v1_5::platformVerify(const CryptoKeyRSA& key, const Vector& signature, const Vector& data) +{ + const EVP_MD* md = digestAlgorithm(key.hashAlgorithmIdentifier()); + if (!md) + return Exception { NotSupportedError }; + + return verifyWithEVP_MD(key, md, signature, data); +} + } // namespace WebCore #endif // ENABLE(WEB_CRYPTO) diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSS.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSS.h index b1f7f772ce4227..5b79811796dd90 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSS.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSS.h @@ -40,6 +40,13 @@ class CryptoAlgorithmRSA_PSS final : public CryptoAlgorithm { static constexpr CryptoAlgorithmIdentifier s_identifier = CryptoAlgorithmIdentifier::RSA_PSS; static Ref create(); + static ExceptionOr> platformSign(const CryptoAlgorithmRsaPssParams&, const CryptoKeyRSA&, const Vector&); + static ExceptionOr> platformSignWithAlgorithm(const CryptoAlgorithmRsaPssParams&, CryptoAlgorithmIdentifier, const CryptoKeyRSA&, const Vector&); + + static ExceptionOr platformVerify(const CryptoAlgorithmRsaPssParams&, const CryptoKeyRSA&, const Vector&, const Vector&); + static ExceptionOr platformVerifyWithAlgorithm(const CryptoAlgorithmRsaPssParams&, CryptoAlgorithmIdentifier, const CryptoKeyRSA&, const Vector&, const Vector&); + + private: CryptoAlgorithmRSA_PSS() = default; CryptoAlgorithmIdentifier identifier() const final; @@ -50,8 +57,6 @@ class CryptoAlgorithmRSA_PSS final : public CryptoAlgorithm { void importKey(CryptoKeyFormat, KeyData&&, const CryptoAlgorithmParameters&, bool extractable, CryptoKeyUsageBitmap, KeyCallback&&, ExceptionCallback&&) final; void exportKey(CryptoKeyFormat, Ref&&, KeyDataCallback&&, ExceptionCallback&&) final; - static ExceptionOr> platformSign(const CryptoAlgorithmRsaPssParams&, const CryptoKeyRSA&, const Vector&); - static ExceptionOr platformVerify(const CryptoAlgorithmRsaPssParams&, const CryptoKeyRSA&, const Vector&, const Vector&); }; } // namespace WebCore diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSSOpenSSL.cpp b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSSOpenSSL.cpp index 149560a392fffe..c768bc519c6193 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSSOpenSSL.cpp +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRSA_PSSOpenSSL.cpp @@ -34,13 +34,12 @@ namespace WebCore { -ExceptionOr> CryptoAlgorithmRSA_PSS::platformSign(const CryptoAlgorithmRsaPssParams& parameters, const CryptoKeyRSA& key, const Vector& data) +static ExceptionOr> signWithMD(const CryptoAlgorithmRsaPssParams& parameters, const CryptoKeyRSA& key, const Vector& data, const EVP_MD* md) { -#if 1 // defined(EVP_PKEY_CTX_set_rsa_pss_saltlen) && defined(EVP_PKEY_CTX_set_rsa_mgf1_md) - const EVP_MD* md = digestAlgorithm(key.hashAlgorithmIdentifier()); - if (!md) - return Exception { NotSupportedError }; - + auto padding = parameters.padding; + if(padding == 0) { + padding = RSA_PKCS1_PSS_PADDING; + } std::optional> digest = calculateDigest(md, data); if (!digest) return Exception { OperationError }; @@ -52,11 +51,13 @@ ExceptionOr> CryptoAlgorithmRSA_PSS::platformSign(const CryptoAl if (EVP_PKEY_sign_init(ctx.get()) <= 0) return Exception { OperationError }; - if (EVP_PKEY_CTX_set_rsa_padding(ctx.get(), RSA_PKCS1_PSS_PADDING) <= 0) + if (EVP_PKEY_CTX_set_rsa_padding(ctx.get(), padding) <= 0) return Exception { OperationError }; - if (EVP_PKEY_CTX_set_rsa_pss_saltlen(ctx.get(), parameters.saltLength) <= 0) - return Exception { OperationError }; + if(padding == RSA_PKCS1_PSS_PADDING) { + if (EVP_PKEY_CTX_set_rsa_pss_saltlen(ctx.get(), parameters.saltLength) <= 0) + return Exception { OperationError }; + } if (EVP_PKEY_CTX_set_signature_md(ctx.get(), md) <= 0) return Exception { OperationError }; @@ -74,21 +75,40 @@ ExceptionOr> CryptoAlgorithmRSA_PSS::platformSign(const CryptoAl signature.shrink(signatureLen); return signature; +} +ExceptionOr> CryptoAlgorithmRSA_PSS::platformSignWithAlgorithm(const CryptoAlgorithmRsaPssParams& parameters, CryptoAlgorithmIdentifier hash, const CryptoKeyRSA& key, const Vector& data) +{ +#if 1 // defined(EVP_PKEY_CTX_set_rsa_pss_saltlen) && defined(EVP_PKEY_CTX_set_rsa_mgf1_md) + const EVP_MD* md = digestAlgorithm(hash); + if (!md) + return Exception { NotSupportedError }; + + return signWithMD(parameters, key, data, md); #else return Exception { NotSupportedError }; #endif } -ExceptionOr CryptoAlgorithmRSA_PSS::platformVerify(const CryptoAlgorithmRsaPssParams& parameters, const CryptoKeyRSA& key, const Vector& signature, const Vector& data) + +ExceptionOr> CryptoAlgorithmRSA_PSS::platformSign(const CryptoAlgorithmRsaPssParams& parameters, const CryptoKeyRSA& key, const Vector& data) { -#if 1 // defined(EVP_PKEY_CTX_set_rsa_pss_saltlen) && defined(EVP_PKEY_CTX_set_rsa_mgf1_md) +#if 1 // defined(EVP_PKEY_CTX_set_rsa_pss_saltlen) && defined(EVP_PKEY_CTX_set_rsa_mgf1_md) const EVP_MD* md = digestAlgorithm(key.hashAlgorithmIdentifier()); if (!md) return Exception { NotSupportedError }; - std::optional> digest = calculateDigest(md, data); - if (!digest) - return Exception { OperationError }; + return signWithMD(parameters, key, data, md); +#else + return Exception { NotSupportedError }; +#endif +} + +static ExceptionOr verifyWithMD(const CryptoAlgorithmRsaPssParams& parameters, const CryptoKeyRSA& key, const Vector& signature, const Vector& data, const EVP_MD* md) +{ + auto padding = parameters.padding; + if(padding == 0) { + padding = RSA_PKCS1_PSS_PADDING; + } auto ctx = EvpPKeyCtxPtr(EVP_PKEY_CTX_new(key.platformKey(), nullptr)); if (!ctx) @@ -97,11 +117,13 @@ ExceptionOr CryptoAlgorithmRSA_PSS::platformVerify(const CryptoAlgorithmRs if (EVP_PKEY_verify_init(ctx.get()) <= 0) return Exception { OperationError }; - if (EVP_PKEY_CTX_set_rsa_padding(ctx.get(), RSA_PKCS1_PSS_PADDING) <= 0) + if (EVP_PKEY_CTX_set_rsa_padding(ctx.get(), padding) <= 0) return Exception { OperationError }; - if (EVP_PKEY_CTX_set_rsa_pss_saltlen(ctx.get(), parameters.saltLength) <= 0) - return Exception { OperationError }; + if(padding == RSA_PKCS1_PSS_PADDING) { + if (EVP_PKEY_CTX_set_rsa_pss_saltlen(ctx.get(), parameters.saltLength) <= 0) + return Exception { OperationError }; + } if (EVP_PKEY_CTX_set_signature_md(ctx.get(), md) <= 0) return Exception { OperationError }; @@ -109,9 +131,32 @@ ExceptionOr CryptoAlgorithmRSA_PSS::platformVerify(const CryptoAlgorithmRs if (EVP_PKEY_CTX_set_rsa_mgf1_md(ctx.get(), md) <= 0) return Exception { OperationError }; + std::optional> digest = calculateDigest(md, data); + if (!digest) + return Exception { OperationError }; + int ret = EVP_PKEY_verify(ctx.get(), signature.data(), signature.size(), digest->data(), digest->size()); return ret == 1; +} +ExceptionOr CryptoAlgorithmRSA_PSS::platformVerifyWithAlgorithm(const CryptoAlgorithmRsaPssParams& parameters, CryptoAlgorithmIdentifier hash, const CryptoKeyRSA& key, const Vector& signature, const Vector& data) +{ + const EVP_MD* md = digestAlgorithm(hash); + if (!md) + return Exception { NotSupportedError }; + + return verifyWithMD(parameters, key, signature, data, md); + +} + +ExceptionOr CryptoAlgorithmRSA_PSS::platformVerify(const CryptoAlgorithmRsaPssParams& parameters, const CryptoKeyRSA& key, const Vector& signature, const Vector& data) +{ +#if 1 // defined(EVP_PKEY_CTX_set_rsa_pss_saltlen) && defined(EVP_PKEY_CTX_set_rsa_mgf1_md) + const EVP_MD* md = digestAlgorithm(key.hashAlgorithmIdentifier()); + if (!md) + return Exception { NotSupportedError }; + + return verifyWithMD(parameters, key, signature, data, md); #else return Exception { NotSupportedError }; #endif diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.cpp b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.cpp index 92c90fbe41144a..d8192bc9201daa 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.cpp +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.cpp @@ -90,7 +90,8 @@ void CryptoAlgorithmRegistry::registerAlgorithm(const String& name, CryptoAlgori Locker locker { m_lock }; ASSERT(!m_identifiers.contains(name)); - ASSERT(!m_constructors.contains(static_cast(identifier))); + // hashs can contains 2 names (SHA-256 and SHA256) + // ASSERT(!m_constructors.contains(static_cast(identifier))); m_identifiers.add(name, identifier); m_constructors.add(static_cast(identifier), std::make_pair(name, constructor)); diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.h index 49d75b2afeea2b..44506300dcff56 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistry.h @@ -60,6 +60,11 @@ class CryptoAlgorithmRegistry { { registerAlgorithm(AlgorithmClass::s_name, AlgorithmClass::s_identifier, AlgorithmClass::create); } + template void registerAlgorithmWithAlternativeName() + { + registerAlgorithm(AlgorithmClass::s_name, AlgorithmClass::s_identifier, AlgorithmClass::create); + registerAlgorithm(AlgorithmClass::s_alternative_name, AlgorithmClass::s_identifier, AlgorithmClass::create); + } void registerAlgorithm(const String& name, CryptoAlgorithmIdentifier, CryptoAlgorithmConstructor); diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistryOpenSSL.cpp b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistryOpenSSL.cpp index 8e588d8bf2a236..42dce3a00296c3 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistryOpenSSL.cpp +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRegistryOpenSSL.cpp @@ -67,11 +67,11 @@ void CryptoAlgorithmRegistry::platformRegisterAlgorithms() registerAlgorithm(); registerAlgorithm(); registerAlgorithm(); - registerAlgorithm(); - registerAlgorithm(); - registerAlgorithm(); - registerAlgorithm(); - registerAlgorithm(); + registerAlgorithmWithAlternativeName(); + registerAlgorithmWithAlternativeName(); + registerAlgorithmWithAlternativeName(); + registerAlgorithmWithAlternativeName(); + registerAlgorithmWithAlternativeName(); registerAlgorithm(); } diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRsaPssParams.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRsaPssParams.h index 412f4ce0534e7e..a1cda97d5fd04f 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmRsaPssParams.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmRsaPssParams.h @@ -34,6 +34,7 @@ namespace WebCore { class CryptoAlgorithmRsaPssParams final : public CryptoAlgorithmParameters { public: size_t saltLength; + size_t padding = 0; // 0 = default Class parametersClass() const final { return Class::RsaPssParams; } @@ -42,6 +43,7 @@ class CryptoAlgorithmRsaPssParams final : public CryptoAlgorithmParameters { CryptoAlgorithmRsaPssParams result; result.identifier = identifier; result.saltLength = saltLength; + result.padding = padding; return result; } diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA1.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA1.h index 1fd775a813643c..7728da1d5e288f 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA1.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA1.h @@ -34,6 +34,8 @@ namespace WebCore { class CryptoAlgorithmSHA1 final : public CryptoAlgorithm { public: static constexpr ASCIILiteral s_name = "SHA-1"_s; + static constexpr ASCIILiteral s_alternative_name = "SHA1"_s; + static constexpr CryptoAlgorithmIdentifier s_identifier = CryptoAlgorithmIdentifier::SHA_1; static Ref create(); diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.h index 493e162ab3652e..d574706d478d44 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA224.h @@ -34,6 +34,8 @@ namespace WebCore { class CryptoAlgorithmSHA224 final : public CryptoAlgorithm { public: static constexpr ASCIILiteral s_name = "SHA-224"_s; + static constexpr ASCIILiteral s_alternative_name = "SHA224"_s; + static constexpr CryptoAlgorithmIdentifier s_identifier = CryptoAlgorithmIdentifier::SHA_224; static Ref create(); diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.h index f5a8543c82c554..c24db6a849fab4 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA256.h @@ -34,6 +34,8 @@ namespace WebCore { class CryptoAlgorithmSHA256 final : public CryptoAlgorithm { public: static constexpr ASCIILiteral s_name = "SHA-256"_s; + static constexpr ASCIILiteral s_alternative_name = "SHA256"_s; + static const CryptoAlgorithmIdentifier s_identifier = CryptoAlgorithmIdentifier::SHA_256; static Ref create(); diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.h index e5bf2232acf2da..c6dac1a524bc61 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA384.h @@ -34,6 +34,8 @@ namespace WebCore { class CryptoAlgorithmSHA384 final : public CryptoAlgorithm { public: static constexpr ASCIILiteral s_name = "SHA-384"_s; + static constexpr ASCIILiteral s_alternative_name = "SHA384"_s; + static constexpr CryptoAlgorithmIdentifier s_identifier = CryptoAlgorithmIdentifier::SHA_384; static Ref create(); diff --git a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.h b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.h index 05f98fc4a8666d..968dd4c4d6290e 100644 --- a/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.h +++ b/src/bun.js/bindings/webcrypto/CryptoAlgorithmSHA512.h @@ -34,6 +34,8 @@ namespace WebCore { class CryptoAlgorithmSHA512 final : public CryptoAlgorithm { public: static constexpr ASCIILiteral s_name = "SHA-512"_s; + static constexpr ASCIILiteral s_alternative_name = "SHA512"_s; + static constexpr CryptoAlgorithmIdentifier s_identifier = CryptoAlgorithmIdentifier::SHA_512; static Ref create(); diff --git a/src/bun.js/config.zig b/src/bun.js/config.zig index e8fdaeba8c48b1..079c0b8f642565 100644 --- a/src/bun.js/config.zig +++ b/src/bun.js/config.zig @@ -18,7 +18,6 @@ const Api = @import("../api/schema.zig").Api; const options = @import("../options.zig"); const Bundler = bun.bundler.ServeBundler; const js_printer = bun.js_printer; -const http = @import("../bun_dev_http_server.zig"); pub const DefaultBunDefines = struct { pub const Keys = struct { diff --git a/src/bun.js/event_loop.zig b/src/bun.js/event_loop.zig index 75f6852db6eb97..9b44db8e2dba12 100644 --- a/src/bun.js/event_loop.zig +++ b/src/bun.js/event_loop.zig @@ -21,7 +21,7 @@ const JSValue = JSC.JSValue; const js = JSC.C; pub const WorkPool = @import("../work_pool.zig").WorkPool; pub const WorkPoolTask = @import("../work_pool.zig").Task; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const uws = @import("root").bun.uws; const Async = bun.Async; @@ -317,6 +317,7 @@ const Write = JSC.Node.Async.write; const Truncate = JSC.Node.Async.truncate; const FTruncate = JSC.Node.Async.ftruncate; const Readdir = JSC.Node.Async.readdir; +const ReaddirRecursive = JSC.Node.Async.readdir_recursive; const Readv = JSC.Node.Async.readv; const Writev = JSC.Node.Async.writev; const Close = JSC.Node.Async.close; @@ -375,6 +376,7 @@ pub const Task = TaggedPointerUnion(.{ Truncate, FTruncate, Readdir, + ReaddirRecursive, Close, Rm, Rmdir, @@ -821,6 +823,10 @@ pub const EventLoop = struct { var any: *Readdir = task.get(Readdir).?; any.runFromJSThread(); }, + @field(Task.Tag, typeBaseName(@typeName(ReaddirRecursive))) => { + var any: *ReaddirRecursive = task.get(ReaddirRecursive).?; + any.runFromJSThread(); + }, @field(Task.Tag, typeBaseName(@typeName(Close))) => { var any: *Close = task.get(Close).?; any.runFromJSThread(); diff --git a/src/bun.js/javascript.zig b/src/bun.js/javascript.zig index 6f77fc08184bb0..4e8b0902d4612f 100644 --- a/src/bun.js/javascript.zig +++ b/src/bun.js/javascript.zig @@ -14,7 +14,7 @@ const StoredFileDescriptorType = bun.StoredFileDescriptorType; const ErrorableString = bun.JSC.ErrorableString; const Arena = @import("../mimalloc_arena.zig").Arena; const C = bun.C; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const IO = @import("root").bun.AsyncIO; const Allocator = std.mem.Allocator; const IdentityContext = @import("../identity_context.zig").IdentityContext; @@ -32,7 +32,6 @@ const ServerEntryPoint = bun.bundler.ServerEntryPoint; const js_printer = bun.js_printer; const js_parser = bun.js_parser; const js_ast = bun.JSAst; -const http = @import("../bun_dev_http_server.zig"); const NodeFallbackModules = @import("../node_fallbacks.zig"); const ImportKind = ast.ImportKind; const Analytics = @import("../analytics/analytics_thread.zig"); @@ -628,7 +627,7 @@ pub const VirtualMachine = struct { return VMHolder.vm.?; } - pub fn mimeType(this: *VirtualMachine, str: []const u8) ?bun.HTTP.MimeType { + pub fn mimeType(this: *VirtualMachine, str: []const u8) ?bun.http.MimeType { return this.rareData().mimeTypeFromString(this.allocator, str); } diff --git a/src/bun.js/module_loader.zig b/src/bun.js/module_loader.zig index 1d4754a0b22d10..12f0559d812461 100644 --- a/src/bun.js/module_loader.zig +++ b/src/bun.js/module_loader.zig @@ -13,7 +13,7 @@ const default_allocator = bun.default_allocator; const StoredFileDescriptorType = bun.StoredFileDescriptorType; const Arena = @import("../mimalloc_arena.zig").Arena; const C = bun.C; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const IO = @import("root").bun.AsyncIO; const Allocator = std.mem.Allocator; const IdentityContext = @import("../identity_context.zig").IdentityContext; @@ -31,7 +31,6 @@ const ServerEntryPoint = bun.bundler.ServerEntryPoint; const js_printer = bun.js_printer; const js_parser = bun.js_parser; const js_ast = bun.JSAst; -const http = @import("../bun_dev_http_server.zig"); const NodeFallbackModules = @import("../node_fallbacks.zig"); const ImportKind = ast.ImportKind; const Analytics = @import("../analytics/analytics_thread.zig"); @@ -1378,7 +1377,7 @@ pub const ModuleLoader = struct { .js, .jsx, .ts, .tsx, .json, .toml, .text => { jsc_vm.transpiled_count += 1; jsc_vm.bundler.resetStore(); - const hash = http.Watcher.getHash(path.text); + const hash = JSC.Watcher.getHash(path.text); const is_main = jsc_vm.main.len == path.text.len and jsc_vm.main_hash == hash and strings.eqlLong(jsc_vm.main, path.text, false); diff --git a/src/bun.js/node/node_fs.zig b/src/bun.js/node/node_fs.zig index f4c0b589a621f0..a1bac64fecf452 100644 --- a/src/bun.js/node/node_fs.zig +++ b/src/bun.js/node/node_fs.zig @@ -102,6 +102,8 @@ pub const Async = struct { pub const cp = AsyncCpTask; + pub const readdir_recursive = AsyncReaddirRecursiveTask; + fn NewAsyncFSTask(comptime ReturnType: type, comptime ArgumentType: type, comptime Function: anytype) type { return struct { promise: JSC.JSPromise.Strong, @@ -317,6 +319,349 @@ pub const AsyncCpTask = struct { } }; +pub const AsyncReaddirRecursiveTask = struct { + promise: JSC.JSPromise.Strong, + args: Arguments.Readdir, + globalObject: *JSC.JSGlobalObject, + task: JSC.WorkPoolTask = .{ .callback = &workPoolCallback }, + ref: bun.Async.KeepAlive = .{}, + tracker: JSC.AsyncTaskTracker, + + // It's not 100% clear this one is necessary + has_result: std.atomic.Atomic(bool), + + subtask_count: std.atomic.Atomic(usize), + + /// The final result list + result_list: ResultListEntry.Value = undefined, + + /// When joining the result list, we use this to preallocate the joined array. + result_list_count: std.atomic.Atomic(usize) = std.atomic.Atomic(usize).init(0), + + /// A lockless queue of result lists. + /// + /// Using a lockless queue instead of mutex + joining the lists as we go was a meaningful performance improvement + result_list_queue: ResultListEntry.Queue = ResultListEntry.Queue{}, + + /// All the subtasks will use this fd to open files + root_fd: FileDescriptor = bun.invalid_fd, + + /// This isued when joining the file paths for error messages + root_path: PathString = PathString.empty, + + pending_err: ?Syscall.Error = null, + pending_err_mutex: bun.Lock = bun.Lock.init(), + + pub const ResultListEntry = struct { + pub const Value = union(Return.Readdir.Tag) { + with_file_types: std.ArrayList(Dirent), + buffers: std.ArrayList(Buffer), + files: std.ArrayList(bun.String), + + pub fn deinit(this: *@This()) void { + switch (this.*) { + .with_file_types => |*res| { + for (res.items) |item| { + item.name.deref(); + } + res.clearAndFree(); + }, + .buffers => |*res| { + for (res.items) |item| { + bun.default_allocator.free(item.buffer.byteSlice()); + } + res.clearAndFree(); + }, + .files => |*res| { + for (res.items) |item| { + item.deref(); + } + + res.clearAndFree(); + }, + } + } + }; + next: ?*ResultListEntry = null, + value: Value, + + pub const Queue = bun.UnboundedQueue(ResultListEntry, .next); + }; + + pub const Subtask = struct { + readdir_task: *AsyncReaddirRecursiveTask, + basename: bun.PathString = bun.PathString.empty, + task: JSC.WorkPoolTask = .{ .callback = call }, + + pub fn call(task: *JSC.WorkPoolTask) void { + var this: *Subtask = @fieldParentPtr(Subtask, "task", task); + defer { + bun.default_allocator.free(this.basename.sliceAssumeZ()); + bun.default_allocator.destroy(this); + } + var buf: [bun.MAX_PATH_BYTES]u8 = undefined; + this.readdir_task.performWork(this.basename.sliceAssumeZ(), &buf, false); + } + }; + + pub fn enqueue( + readdir_task: *AsyncReaddirRecursiveTask, + basename: [:0]const u8, + ) void { + var task = bun.default_allocator.create(Subtask) catch bun.outOfMemory(); + task.* = Subtask{ + .readdir_task = readdir_task, + .basename = bun.PathString.init(bun.default_allocator.dupeZ(u8, basename) catch bun.outOfMemory()), + }; + std.debug.assert(readdir_task.subtask_count.fetchAdd(1, .Monotonic) > 0); + JSC.WorkPool.schedule(&task.task); + } + + pub fn create( + globalObject: *JSC.JSGlobalObject, + args: Arguments.Readdir, + vm: *JSC.VirtualMachine, + ) JSC.JSValue { + if (comptime Environment.isWindows) { + globalObject.throwTODO("fs.promises.readdir is not implemented on Windows yet"); + return .zero; + } + + var task = bun.default_allocator.create(AsyncReaddirRecursiveTask) catch bun.outOfMemory(); + task.* = AsyncReaddirRecursiveTask{ + .promise = JSC.JSPromise.Strong.init(globalObject), + .args = args, + .has_result = .{ .value = false }, + .globalObject = globalObject, + .tracker = JSC.AsyncTaskTracker.init(vm), + .subtask_count = .{ .value = 1 }, + .root_path = PathString.init(bun.default_allocator.dupeZ(u8, args.path.slice()) catch bun.outOfMemory()), + .result_list = switch (args.tag()) { + .files => .{ .files = std.ArrayList(bun.String).init(bun.default_allocator) }, + .with_file_types => .{ .with_file_types = std.ArrayList(Dirent).init(bun.default_allocator) }, + .buffers => .{ .buffers = std.ArrayList(Buffer).init(bun.default_allocator) }, + }, + }; + task.ref.ref(vm); + task.args.toThreadSafe(); + task.tracker.didSchedule(globalObject); + + JSC.WorkPool.schedule(&task.task); + + return task.promise.value(); + } + + pub fn performWork(this: *AsyncReaddirRecursiveTask, basename: [:0]const u8, buf: *[bun.MAX_PATH_BYTES]u8, comptime is_root: bool) void { + switch (this.args.tag()) { + inline else => |tag| { + const ResultType = comptime switch (tag) { + .files => bun.String, + .with_file_types => Dirent, + .buffers => Buffer, + }; + var stack = std.heap.stackFallback(8192, bun.default_allocator); + + // This is a stack-local copy to avoid resizing heap-allocated arrays in the common case of a small directory + var entries = std.ArrayList(ResultType).init(stack.get()); + + defer entries.deinit(); + + switch (NodeFS.readdirWithEntriesRecursiveAsync( + buf, + this.args, + this, + basename, + ResultType, + &entries, + is_root, + )) { + .err => |err| { + for (entries.items) |*item| { + switch (comptime ResultType) { + bun.String => item.deref(), + Dirent => item.name.deref(), + Buffer => bun.default_allocator.free(item.buffer.byteSlice()), + else => unreachable, + } + } + + { + this.pending_err_mutex.lock(); + defer this.pending_err_mutex.unlock(); + if (this.pending_err == null) { + const err_path = if (err.path.len > 0) err.path else this.args.path.slice(); + this.pending_err = err.withPath(bun.default_allocator.dupe(u8, err_path) catch ""); + } + } + + if (this.subtask_count.fetchSub(1, .Monotonic) == 1) { + this.finishConcurrently(); + } + }, + .result => { + this.writeResults(ResultType, &entries); + }, + } + }, + } + } + + fn workPoolCallback(task: *JSC.WorkPoolTask) void { + var this: *AsyncReaddirRecursiveTask = @fieldParentPtr(AsyncReaddirRecursiveTask, "task", task); + var buf: [bun.MAX_PATH_BYTES]u8 = undefined; + this.performWork(this.root_path.sliceAssumeZ(), &buf, true); + } + + pub fn writeResults(this: *AsyncReaddirRecursiveTask, comptime ResultType: type, result: *std.ArrayList(ResultType)) void { + if (result.items.len > 0) { + const Field = comptime switch (ResultType) { + bun.String => .files, + Dirent => .with_file_types, + Buffer => .buffers, + else => unreachable, + }; + var list = bun.default_allocator.create(ResultListEntry) catch bun.outOfMemory(); + errdefer { + bun.default_allocator.destroy(list); + } + var clone = std.ArrayList(ResultType).initCapacity(bun.default_allocator, result.items.len) catch bun.outOfMemory(); + clone.appendSliceAssumeCapacity(result.items); + _ = this.result_list_count.fetchAdd(clone.items.len, .Monotonic); + list.* = ResultListEntry{ .next = null, .value = @unionInit(ResultListEntry.Value, @tagName(Field), clone) }; + this.result_list_queue.push(list); + } + + if (this.subtask_count.fetchSub(1, .Monotonic) == 1) { + this.finishConcurrently(); + } + } + + /// May be called from any thread (the subtasks) + pub fn finishConcurrently(this: *AsyncReaddirRecursiveTask) void { + if (this.has_result.compareAndSwap(false, true, .Monotonic, .Monotonic)) |_| { + return; + } + + std.debug.assert(this.subtask_count.load(.Monotonic) == 0); + + const root_fd = this.root_fd; + if (root_fd != bun.invalid_fd) { + this.root_fd = bun.invalid_fd; + _ = Syscall.close(root_fd); + bun.default_allocator.free(this.root_path.slice()); + this.root_path = PathString.empty; + } + + if (this.pending_err != null) { + this.clearResultList(); + } + + { + var list = this.result_list_queue.popBatch(); + var iter = list.iterator(); + + // we have to free only the previous one because the next value will + // be read by the iterator. + var to_destroy: ?*ResultListEntry = null; + + switch (this.args.tag()) { + inline else => |tag| { + var results = &@field(this.result_list, @tagName(tag)); + results.ensureTotalCapacityPrecise(this.result_list_count.swap(0, .Monotonic)) catch bun.outOfMemory(); + while (iter.next()) |val| { + if (to_destroy) |dest| { + bun.default_allocator.destroy(dest); + } + to_destroy = val; + + var to_copy = &@field(val.value, @tagName(tag)); + results.appendSliceAssumeCapacity(to_copy.items); + to_copy.clearAndFree(); + } + + if (to_destroy) |dest| { + bun.default_allocator.destroy(dest); + } + }, + } + } + + this.globalObject.bunVMConcurrently().enqueueTaskConcurrent(JSC.ConcurrentTask.create(JSC.Task.init(this))); + } + + fn clearResultList(this: *AsyncReaddirRecursiveTask) void { + this.result_list.deinit(); + var batch = this.result_list_queue.popBatch(); + var iter = batch.iterator(); + var to_destroy: ?*ResultListEntry = null; + + while (iter.next()) |val| { + val.value.deinit(); + if (to_destroy) |dest| { + bun.default_allocator.destroy(dest); + } + to_destroy = val; + } + if (to_destroy) |dest| { + bun.default_allocator.destroy(dest); + } + this.result_list_count.store(0, .Monotonic); + } + + pub fn runFromJSThread(this: *AsyncReaddirRecursiveTask) void { + var globalObject = this.globalObject; + var success = this.pending_err == null; + const result = if (this.pending_err) |*err| err.toJSC(globalObject) else brk: { + const res = switch (this.result_list) { + .with_file_types => |*res| Return.Readdir{ .with_file_types = res.moveToUnmanaged().items }, + .buffers => |*res| Return.Readdir{ .buffers = res.moveToUnmanaged().items }, + .files => |*res| Return.Readdir{ .files = res.moveToUnmanaged().items }, + }; + var exceptionref: JSC.C.JSValueRef = null; + const out = res.toJS(globalObject, &exceptionref); + const exception = JSC.JSValue.c(exceptionref); + if (exception != .zero) { + success = false; + break :brk exception; + } + + break :brk out.?.value(); + }; + var promise_value = this.promise.value(); + var promise = this.promise.get(); + promise_value.ensureStillAlive(); + + const tracker = this.tracker; + tracker.willDispatch(globalObject); + defer tracker.didDispatch(globalObject); + + this.deinit(); + switch (success) { + false => { + promise.reject(globalObject, result); + }, + true => { + promise.resolve(globalObject, result); + }, + } + } + + pub fn deinit(this: *AsyncReaddirRecursiveTask) void { + std.debug.assert(this.root_fd == bun.invalid_fd); // should already have closed it + if (this.pending_err) |*err| { + bun.default_allocator.free(err.path); + } + + this.ref.unref(this.globalObject.bunVM()); + this.args.deinit(); + bun.default_allocator.free(this.root_path.slice()); + this.clearResultList(); + this.promise.strong.deinit(); + + bun.default_allocator.destroy(this); + } +}; + /// This task is used by `AsyncCpTask/fs.promises.cp` to copy a single file. /// When clonefile cannot be used, this task is started once per file. pub const AsyncCpSingleFileTask = struct { @@ -1644,6 +1989,7 @@ pub const Arguments = struct { path: PathLike, encoding: Encoding = Encoding.utf8, with_file_types: bool = false, + recursive: bool = false, pub fn deinit(this: Readdir) void { this.path.deinit(); @@ -1657,6 +2003,16 @@ pub const Arguments = struct { this.path.toThreadSafe(); } + pub fn tag(this: *const Readdir) Return.Readdir.Tag { + return switch (this.encoding) { + .buffer => .buffers, + else => if (this.with_file_types) + .with_file_types + else + .files, + }; + } + pub fn fromJS(ctx: JSC.C.JSContextRef, arguments: *ArgumentsSlice, exception: JSC.C.ExceptionRef) ?Readdir { const path = PathLike.fromJS(ctx, arguments, exception) orelse { if (exception.* == null) { @@ -1674,6 +2030,7 @@ pub const Arguments = struct { var encoding = Encoding.utf8; var with_file_types = false; + var recursive = false; if (arguments.next()) |val| { arguments.eat(); @@ -1688,6 +2045,13 @@ pub const Arguments = struct { encoding = Encoding.fromJS(encoding_, ctx.ptr()) orelse Encoding.utf8; } + if (val.getOptional(ctx.ptr(), "recursive", bool) catch { + path.deinit(); + return null; + }) |recursive_| { + recursive = recursive_; + } + if (val.getOptional(ctx.ptr(), "withFileTypes", bool) catch { path.deinit(); return null; @@ -1703,6 +2067,7 @@ pub const Arguments = struct { .path = path, .encoding = encoding, .with_file_types = with_file_types, + .recursive = recursive, }; } }; @@ -4121,62 +4486,31 @@ pub const NodeFS = struct { } pub fn readdir(this: *NodeFS, args: Arguments.Readdir, comptime flavor: Flavor) Maybe(Return.Readdir) { - return switch (args.encoding) { - .buffer => _readdir( - &this.sync_error_buf, - args, - Buffer, - flavor, - ), - else => { - if (!args.with_file_types) { - return _readdir( - &this.sync_error_buf, - args, - bun.String, - flavor, - ); - } + if (comptime flavor != .sync) { + if (args.recursive) { + @panic("Assertion failure: this code path should never be reached."); + } + } - return _readdir( - &this.sync_error_buf, - args, - Dirent, - flavor, - ); + return switch (args.recursive) { + inline else => |recursive| switch (args.tag()) { + .buffers => _readdir(&this.sync_error_buf, args, Buffer, recursive, flavor), + .with_file_types => _readdir(&this.sync_error_buf, args, Dirent, recursive, flavor), + .files => _readdir(&this.sync_error_buf, args, bun.String, recursive, flavor), }, }; } - pub fn _readdir( - buf: *[bun.MAX_PATH_BYTES]u8, + fn readdirWithEntries( args: Arguments.Readdir, + fd: bun.FileDescriptor, comptime ExpectedType: type, - comptime _: Flavor, - ) Maybe(Return.Readdir) { - const file_type = comptime switch (ExpectedType) { - Dirent => "with_file_types", - bun.String => "files", - Buffer => "buffers", - else => unreachable, - }; - - var path = args.path.sliceZ(buf); - const flags = os.O.DIRECTORY | os.O.RDONLY; - const fd = switch (Syscall.open(path, flags, 0)) { - .err => |err| return .{ - .err = err.withPath(args.path.slice()), - }, - .result => |fd_| fd_, - }; - defer { - _ = Syscall.close(fd); - } - - var entries = std.ArrayList(ExpectedType).init(bun.default_allocator); + entries: *std.ArrayList(ExpectedType), + ) Maybe(void) { var dir = std.fs.Dir{ .fd = bun.fdcast(fd) }; var iterator = DirIterator.iterate(dir); var entry = iterator.next(); + while (switch (entry) { .err => |err| { for (entries.items) |*item| { @@ -4208,19 +4542,335 @@ pub const NodeFS = struct { entries.append(.{ .name = bun.String.create(utf8_name), .kind = current.kind, - }) catch unreachable; + }) catch bun.outOfMemory(); }, Buffer => { - entries.append(Buffer.fromString(utf8_name, bun.default_allocator) catch unreachable) catch unreachable; + entries.append(Buffer.fromString(utf8_name, bun.default_allocator) catch bun.outOfMemory()) catch bun.outOfMemory(); }, bun.String => { - entries.append(bun.String.create(utf8_name)) catch unreachable; + entries.append(bun.String.create(utf8_name)) catch bun.outOfMemory(); }, else => unreachable, } } - return .{ .result = @unionInit(Return.Readdir, file_type, entries.items) }; + return Maybe(void).success; + } + + pub fn readdirWithEntriesRecursiveAsync( + buf: *[bun.MAX_PATH_BYTES]u8, + args: Arguments.Readdir, + async_task: *AsyncReaddirRecursiveTask, + basename: [:0]const u8, + comptime ExpectedType: type, + entries: *std.ArrayList(ExpectedType), + comptime is_root: bool, + ) Maybe(void) { + const flags = os.O.DIRECTORY | os.O.RDONLY; + const fd = switch (Syscall.openat(if (comptime is_root) bun.toFD(std.fs.cwd().fd) else async_task.root_fd, basename, flags, 0)) { + .err => |err| { + if (comptime !is_root) { + switch (err.getErrno()) { + // These things can happen and there's nothing we can do about it. + // + // This is different than what Node does, at the time of writing. + // Node doesn't gracefully handle errors like these. It fails the entire operation. + .NOENT, .NOTDIR, .PERM => { + return Maybe(void).success; + }, + else => {}, + } + + const path_parts = [_]string{ async_task.root_path.slice(), basename }; + return .{ + .err = err.withPath(bun.path.joinZBuf(buf, &path_parts, .auto)), + }; + } + + return .{ + .err = err.withPath(args.path.slice()), + }; + }, + .result => |fd_| fd_, + }; + + if (comptime is_root) { + async_task.root_fd = fd; + } + + defer { + if (comptime !is_root) { + _ = Syscall.close(fd); + } + } + + var iterator = DirIterator.iterate(.{ .fd = bun.fdcast(fd) }); + var entry = iterator.next(); + + while (switch (entry) { + .err => |err| { + if (comptime !is_root) { + const path_parts = [_]string{ async_task.root_path.slice(), basename }; + return .{ + .err = err.withPath(bun.path.joinZBuf(buf, &path_parts, .auto)), + }; + } + + return .{ + .err = err.withPath(args.path.slice()), + }; + }, + .result => |ent| ent, + }) |current| : (entry = iterator.next()) { + const utf8_name = current.name.slice(); + + const name_to_copy: [:0]const u8 = brk: { + if (async_task.root_path.sliceAssumeZ().ptr == basename.ptr) { + break :brk @ptrCast(utf8_name); + } + + const path_parts = [_]string{ basename, utf8_name }; + break :brk bun.path.joinZBuf(buf, &path_parts, .auto); + }; + + enqueue: { + switch (current.kind) { + // a symlink might be a directory or might not be + // if it's not a directory, the task will fail at that point. + .sym_link, + + // we know for sure it's a directory + .directory, + => { + // if the name is too long, we can't enqueue it regardless + // the operating system would just return ENAMETOOLONG + // + // Technically, we could work around that due to the + // usage of openat, but then we risk leaving too many + // file descriptors open. + if (current.name.len + 1 + name_to_copy.len > bun.MAX_PATH_BYTES) break :enqueue; + + async_task.enqueue(name_to_copy); + }, + else => {}, + } + } + + switch (comptime ExpectedType) { + Dirent => { + entries.append(.{ + .name = bun.String.create(name_to_copy), + .kind = current.kind, + }) catch bun.outOfMemory(); + }, + Buffer => { + entries.append(Buffer.fromString(name_to_copy, bun.default_allocator) catch bun.outOfMemory()) catch bun.outOfMemory(); + }, + bun.String => { + entries.append(bun.String.create(name_to_copy)) catch bun.outOfMemory(); + }, + else => bun.outOfMemory(), + } + } + + return Maybe(void).success; + } + + fn readdirWithEntriesRecursiveSync( + buf: *[bun.MAX_PATH_BYTES]u8, + args: Arguments.Readdir, + root_basename: [:0]const u8, + comptime ExpectedType: type, + entries: *std.ArrayList(ExpectedType), + ) Maybe(void) { + var iterator_stack = std.heap.stackFallback(128, bun.default_allocator); + var stack = std.fifo.LinearFifo([:0]const u8, .{ .Dynamic = {} }).init(iterator_stack.get()); + var basename_stack = std.heap.stackFallback(8192 * 2, bun.default_allocator); + const basename_allocator = basename_stack.get(); + defer { + while (stack.readItem()) |name| { + basename_allocator.free(name); + } + stack.deinit(); + } + + stack.writeItem(root_basename) catch unreachable; + var root_fd: bun.FileDescriptor = bun.invalid_fd; + + defer { + // all other paths are relative to the root directory + // so we can only close it once we're 100% done + if (root_fd != bun.invalid_fd) { + _ = Syscall.close(root_fd); + } + } + + while (stack.readItem()) |basename| { + defer { + if (root_basename.ptr != basename.ptr) { + basename_allocator.free(basename); + } + } + + const flags = os.O.DIRECTORY | os.O.RDONLY; + const fd = switch (Syscall.openat(if (root_fd == bun.invalid_fd) std.fs.cwd().fd else root_fd, basename, flags, 0)) { + .err => |err| { + if (root_fd == bun.invalid_fd) { + return .{ + .err = err.withPath(args.path.slice()), + }; + } + + switch (err.getErrno()) { + // These things can happen and there's nothing we can do about it. + // + // This is different than what Node does, at the time of writing. + // Node doesn't gracefully handle errors like these. It fails the entire operation. + .NOENT, .NOTDIR, .PERM => continue, + else => { + const path_parts = [_]string{ args.path.slice(), basename }; + return .{ + .err = err.withPath(bun.default_allocator.dupe(u8, bun.path.joinZBuf(buf, &path_parts, .auto)) catch ""), + }; + }, + } + }, + .result => |fd_| fd_, + }; + if (root_fd == bun.invalid_fd) { + root_fd = fd; + } + + defer { + if (fd != root_fd) { + _ = Syscall.close(fd); + } + } + + var iterator = DirIterator.iterate(.{ .fd = bun.fdcast(fd) }); + var entry = iterator.next(); + + while (switch (entry) { + .err => |err| { + return .{ + .err = err.withPath(args.path.slice()), + }; + }, + .result => |ent| ent, + }) |current| : (entry = iterator.next()) { + const utf8_name = current.name.slice(); + + const name_to_copy = brk: { + if (root_basename.ptr == basename.ptr) { + break :brk utf8_name; + } + + const path_parts = [_]string{ basename, utf8_name }; + break :brk bun.path.joinZBuf(buf, &path_parts, .auto); + }; + + enqueue: { + switch (current.kind) { + // a symlink might be a directory or might not be + // if it's not a directory, the task will fail at that point. + .sym_link, + + // we know for sure it's a directory + .directory, + => { + if (current.name.len + 1 + name_to_copy.len > bun.MAX_PATH_BYTES) break :enqueue; + stack.writeItem(basename_allocator.dupeZ(u8, name_to_copy) catch break :enqueue) catch break :enqueue; + }, + else => {}, + } + } + + switch (comptime ExpectedType) { + Dirent => { + entries.append(.{ + .name = bun.String.create(name_to_copy), + .kind = current.kind, + }) catch bun.outOfMemory(); + }, + Buffer => { + entries.append(Buffer.fromString(name_to_copy, bun.default_allocator) catch bun.outOfMemory()) catch bun.outOfMemory(); + }, + bun.String => { + entries.append(bun.String.create(name_to_copy)) catch bun.outOfMemory(); + }, + else => @compileError("Impossible"), + } + } + } + + return Maybe(void).success; + } + + fn _readdir( + buf: *[bun.MAX_PATH_BYTES]u8, + args: Arguments.Readdir, + comptime ExpectedType: type, + comptime recursive: bool, + comptime flavor: Flavor, + ) Maybe(Return.Readdir) { + const file_type = comptime switch (ExpectedType) { + Dirent => "with_file_types", + bun.String => "files", + Buffer => "buffers", + else => unreachable, + }; + + var path = args.path.sliceZ(buf); + if (comptime recursive and flavor == .sync) { + var buf_to_pass: [bun.MAX_PATH_BYTES]u8 = undefined; + + var entries = std.ArrayList(ExpectedType).init(bun.default_allocator); + return switch (readdirWithEntriesRecursiveSync(&buf_to_pass, args, path, ExpectedType, &entries)) { + .err => |err| { + for (entries.items) |*result| { + switch (comptime ExpectedType) { + Dirent => { + result.name.deref(); + }, + Buffer => { + result.destroy(); + }, + bun.String => { + result.deref(); + }, + else => unreachable, + } + } + + entries.deinit(); + + return .{ + .err = err, + }; + }, + .result => .{ .result = @unionInit(Return.Readdir, file_type, entries.items) }, + }; + } + + if (comptime recursive) { + @panic("This code path should never be reached. It should only go through readdirWithEntriesRecursiveAsync."); + } + + const flags = os.O.DIRECTORY | os.O.RDONLY; + const fd = switch (Syscall.open(path, flags, 0)) { + .err => |err| return .{ + .err = err.withPath(args.path.slice()), + }, + .result => |fd_| fd_, + }; + + var entries = std.ArrayList(ExpectedType).init(bun.default_allocator); + return switch (readdirWithEntries(args, fd, ExpectedType, &entries)) { + .err => |err| return .{ + .err = err, + }, + .result => .{ .result = @unionInit(Return.Readdir, file_type, entries.items) }, + }; } pub const StringType = enum { diff --git a/src/bun.js/node/node_fs_binding.zig b/src/bun.js/node/node_fs_binding.zig index 967acbe5373880..228c971e7f1376 100644 --- a/src/bun.js/node/node_fs_binding.zig +++ b/src/bun.js/node/node_fs_binding.zig @@ -139,6 +139,12 @@ fn call(comptime FunctionEnum: NodeFSFunctionEnum) NodeFSFunction { if (comptime FunctionEnum == .cp) { return Task.create(globalObject, args, globalObject.bunVM(), slice.arena); } else { + if (comptime FunctionEnum == .readdir) { + if (args.recursive) { + return JSC.Node.Async.readdir_recursive.create(globalObject, args, globalObject.bunVM()); + } + } + return Task.create(globalObject, args, globalObject.bunVM()); } } diff --git a/src/bun.js/rare_data.zig b/src/bun.js/rare_data.zig index f97e94d8db3698..e4aa7a696013f6 100644 --- a/src/bun.js/rare_data.zig +++ b/src/bun.js/rare_data.zig @@ -36,7 +36,7 @@ global_dns_data: ?*JSC.DNS.GlobalData = null, spawn_ipc_usockets_context: ?*uws.SocketContext = null, -mime_types: ?bun.HTTP.MimeType.Map = null, +mime_types: ?bun.http.MimeType.Map = null, node_fs_stat_watcher_scheduler: ?*StatWatcherScheduler = null, @@ -74,9 +74,9 @@ pub fn hotMap(this: *RareData, allocator: std.mem.Allocator) *HotMap { return &this.hot_map.?; } -pub fn mimeTypeFromString(this: *RareData, allocator: std.mem.Allocator, str: []const u8) ?bun.HTTP.MimeType { +pub fn mimeTypeFromString(this: *RareData, allocator: std.mem.Allocator, str: []const u8) ?bun.http.MimeType { if (this.mime_types == null) { - this.mime_types = bun.HTTP.MimeType.createHashTable( + this.mime_types = bun.http.MimeType.createHashTable( allocator, ) catch @panic("Out of memory"); } diff --git a/src/bun.js/test/expect.zig b/src/bun.js/test/expect.zig index e6dc378d8894fd..2a43cb9eaa2c41 100644 --- a/src/bun.js/test/expect.zig +++ b/src/bun.js/test/expect.zig @@ -3348,29 +3348,19 @@ pub const Expect = struct { if (pass) return thisValue; // handle failure - var formatter = JSC.ZigConsoleClient.Formatter{ .globalThis = globalObject, .quote_strings = true }; if (not) { const signature = comptime getSignature("toHaveBeenCalled", "", true); - const fmt = signature ++ "\n\nExpected: not {any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{calls.toFmt(globalObject, &formatter)}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{calls.toFmt(globalObject, &formatter)}); - return .zero; - } else { - const signature = comptime getSignature("toHaveBeenCalled", "", false); - const fmt = signature ++ "\n\nExpected {any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{calls.toFmt(globalObject, &formatter)}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{calls.toFmt(globalObject, &formatter)}); + const fmt = signature ++ "\n\n" ++ "Expected number of calls: 0\n" ++ "Received number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{calls.getLength(globalObject)}); return .zero; } - unreachable; + const signature = comptime getSignature("toHaveBeenCalled", "", false); + const fmt = signature ++ "\n\n" ++ "Expected number of calls: \\>= 1\n" ++ "Received number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{calls.getLength(globalObject)}); + return .zero; } + pub fn toHaveBeenCalledTimes(this: *Expect, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue { JSC.markBinding(@src()); @@ -3389,8 +3379,8 @@ pub const Expect = struct { return .zero; } - if (arguments.len < 1 or !arguments[0].isAnyInt()) { - globalObject.throwInvalidArguments("toHaveBeenCalledTimes() requires 1 integer argument", .{}); + if (arguments.len < 1 or !arguments[0].isUInt32AsAnyInt()) { + globalObject.throwInvalidArguments("toHaveBeenCalledTimes() requires 1 non-negative integer argument", .{}); return .zero; } @@ -3403,28 +3393,17 @@ pub const Expect = struct { if (pass) return thisValue; // handle failure - var formatter = JSC.ZigConsoleClient.Formatter{ .globalThis = globalObject, .quote_strings = true }; if (not) { const signature = comptime getSignature("toHaveBeenCalledTimes", "expected", true); - const fmt = signature ++ "\n\nExpected: not {any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{calls.toFmt(globalObject, &formatter)}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{calls.toFmt(globalObject, &formatter)}); - return .zero; - } else { - const signature = comptime getSignature("toHaveBeenCalledTimes", "expected", false); - const fmt = signature ++ "\n\nExpected {any}\n"; - if (Output.enable_ansi_colors) { - globalObject.throw(Output.prettyFmt(fmt, true), .{calls.toFmt(globalObject, &formatter)}); - return .zero; - } - globalObject.throw(Output.prettyFmt(fmt, false), .{calls.toFmt(globalObject, &formatter)}); + const fmt = signature ++ "\n\n" ++ "Expected number of calls: not {any}\n" ++ "Received number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{ times, calls.getLength(globalObject) }); return .zero; } - unreachable; + const signature = comptime getSignature("toHaveBeenCalledTimes", "expected", false); + const fmt = signature ++ "\n\n" ++ "Expected number of calls: {any}\n" ++ "Received number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{ times, calls.getLength(globalObject) }); + return .zero; } pub fn toMatchObject(this: *Expect, globalObject: *JSC.JSGlobalObject, callFrame: *JSC.CallFrame) callconv(.C) JSValue { @@ -3493,9 +3472,208 @@ pub const Expect = struct { return .zero; } - pub const toHaveBeenCalledWith = notImplementedJSCFn; - pub const toHaveBeenLastCalledWith = notImplementedJSCFn; - pub const toHaveBeenNthCalledWith = notImplementedJSCFn; + pub fn toHaveBeenCalledWith(this: *Expect, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue { + JSC.markBinding(@src()); + + const thisValue = callframe.this(); + const arguments_ = callframe.argumentsPtr()[0..callframe.argumentsCount()]; + const arguments: []const JSValue = arguments_.ptr[0..arguments_.len]; + defer this.postMatch(globalObject); + const value: JSValue = this.getValue(globalObject, thisValue, "toHaveBeenCalledWith", "expected") orelse return .zero; + + active_test_expectation_counter.actual += 1; + + const calls = JSMockFunction__getCalls(value); + + if (calls == .zero or !calls.jsType().isArray()) { + globalObject.throw("Expected value must be a mock function: {}", .{value}); + return .zero; + } + + var pass = false; + + if (calls.getLength(globalObject) > 0) { + var itr = calls.arrayIterator(globalObject); + while (itr.next()) |callItem| { + if (callItem == .zero or !callItem.jsType().isArray()) { + globalObject.throw("Expected value must be a mock function with calls: {}", .{value}); + return .zero; + } + + if (callItem.getLength(globalObject) != arguments.len) { + continue; + } + + var callItr = callItem.arrayIterator(globalObject); + var match = true; + while (callItr.next()) |callArg| { + if (!callArg.jestDeepEquals(arguments[callItr.i - 1], globalObject)) { + match = false; + break; + } + } + + if (match) { + pass = true; + break; + } + } + } + + const not = this.flags.not; + if (not) pass = !pass; + if (pass) return thisValue; + + // handle failure + if (not) { + const signature = comptime getSignature("toHaveBeenCalledWith", "expected", true); + const fmt = signature ++ "\n\n" ++ "Number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{calls.getLength(globalObject)}); + return .zero; + } + + const signature = comptime getSignature("toHaveBeenCalledWith", "expected", false); + const fmt = signature ++ "\n\n" ++ "Number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{calls.getLength(globalObject)}); + return .zero; + } + + pub fn toHaveBeenLastCalledWith(this: *Expect, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue { + JSC.markBinding(@src()); + + const thisValue = callframe.this(); + const arguments_ = callframe.argumentsPtr()[0..callframe.argumentsCount()]; + const arguments: []const JSValue = arguments_.ptr[0..arguments_.len]; + defer this.postMatch(globalObject); + const value: JSValue = this.getValue(globalObject, thisValue, "toHaveBeenLastCalledWith", "expected") orelse return .zero; + + active_test_expectation_counter.actual += 1; + + const calls = JSMockFunction__getCalls(value); + + if (calls == .zero or !calls.jsType().isArray()) { + globalObject.throw("Expected value must be a mock function: {}", .{value}); + return .zero; + } + + const totalCalls = @as(u32, @intCast(calls.getLength(globalObject))); + var lastCallValue: JSValue = .zero; + + var pass = totalCalls > 0; + + if (pass) { + lastCallValue = calls.getIndex(globalObject, totalCalls - 1); + + if (lastCallValue == .zero or !lastCallValue.jsType().isArray()) { + globalObject.throw("Expected value must be a mock function with calls: {}", .{value}); + return .zero; + } + + if (lastCallValue.getLength(globalObject) != arguments.len) { + pass = false; + } else { + var itr = lastCallValue.arrayIterator(globalObject); + while (itr.next()) |callArg| { + if (!callArg.jestDeepEquals(arguments[itr.i - 1], globalObject)) { + pass = false; + break; + } + } + } + } + + const not = this.flags.not; + if (not) pass = !pass; + if (pass) return thisValue; + + // handle failure + var formatter = JSC.ZigConsoleClient.Formatter{ .globalThis = globalObject, .quote_strings = true }; + const received_fmt = lastCallValue.toFmt(globalObject, &formatter); + + if (not) { + const signature = comptime getSignature("toHaveBeenLastCalledWith", "expected", true); + const fmt = signature ++ "\n\n" ++ "Received: {any}" ++ "\n\n" ++ "Number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{ received_fmt, totalCalls }); + return .zero; + } + + const signature = comptime getSignature("toHaveBeenLastCalledWith", "expected", false); + const fmt = signature ++ "\n\n" ++ "Received: {any}" ++ "\n\n" ++ "Number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{ received_fmt, totalCalls }); + return .zero; + } + + pub fn toHaveBeenNthCalledWith(this: *Expect, globalObject: *JSC.JSGlobalObject, callframe: *JSC.CallFrame) callconv(.C) JSC.JSValue { + JSC.markBinding(@src()); + + const thisValue = callframe.this(); + const arguments_ = callframe.argumentsPtr()[0..callframe.argumentsCount()]; + const arguments: []const JSValue = arguments_.ptr[0..arguments_.len]; + defer this.postMatch(globalObject); + const value: JSValue = this.getValue(globalObject, thisValue, "toHaveBeenNthCalledWith", "expected") orelse return .zero; + + active_test_expectation_counter.actual += 1; + + const calls = JSMockFunction__getCalls(value); + + if (calls == .zero or !calls.jsType().isArray()) { + globalObject.throw("Expected value must be a mock function: {}", .{value}); + return .zero; + } + + const nthCallNum = if (arguments.len > 0 and arguments[0].isUInt32AsAnyInt()) arguments[0].coerce(i32, globalObject) else 0; + if (nthCallNum < 1) { + globalObject.throwInvalidArguments("toHaveBeenNthCalledWith() requires a positive integer argument", .{}); + return .zero; + } + + const totalCalls = calls.getLength(globalObject); + var nthCallValue: JSValue = .zero; + + var pass = totalCalls >= nthCallNum; + + if (pass) { + nthCallValue = calls.getIndex(globalObject, @as(u32, @intCast(nthCallNum)) - 1); + + if (nthCallValue == .zero or !nthCallValue.jsType().isArray()) { + globalObject.throw("Expected value must be a mock function with calls: {}", .{value}); + return .zero; + } + + if (nthCallValue.getLength(globalObject) != (arguments.len - 1)) { + pass = false; + } else { + var itr = nthCallValue.arrayIterator(globalObject); + while (itr.next()) |callArg| { + if (!callArg.jestDeepEquals(arguments[itr.i], globalObject)) { + pass = false; + break; + } + } + } + } + + const not = this.flags.not; + if (not) pass = !pass; + if (pass) return thisValue; + + // handle failure + var formatter = JSC.ZigConsoleClient.Formatter{ .globalThis = globalObject, .quote_strings = true }; + const received_fmt = nthCallValue.toFmt(globalObject, &formatter); + + if (not) { + const signature = comptime getSignature("toHaveBeenNthCalledWith", "expected", true); + const fmt = signature ++ "\n\n" ++ "n: {any}\n" ++ "Received: {any}" ++ "\n\n" ++ "Number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{ nthCallNum, received_fmt, totalCalls }); + return .zero; + } + + const signature = comptime getSignature("toHaveBeenNthCalledWith", "expected", false); + const fmt = signature ++ "\n\n" ++ "n: {any}\n" ++ "Received: {any}" ++ "\n\n" ++ "Number of calls: {any}\n"; + globalObject.throwPretty(fmt, .{ nthCallNum, received_fmt, totalCalls }); + return .zero; + } + pub const toHaveReturnedTimes = notImplementedJSCFn; pub const toHaveReturnedWith = notImplementedJSCFn; pub const toHaveLastReturnedWith = notImplementedJSCFn; diff --git a/src/bun.js/test/jest.classes.ts b/src/bun.js/test/jest.classes.ts index 017c446eea0b35..5f421918aba6ed 100644 --- a/src/bun.js/test/jest.classes.ts +++ b/src/bun.js/test/jest.classes.ts @@ -146,15 +146,12 @@ export default [ }, toHaveBeenCalledWith: { fn: "toHaveBeenCalledWith", - length: 1, }, toHaveBeenLastCalledWith: { fn: "toHaveBeenLastCalledWith", - length: 1, }, toHaveBeenNthCalledWith: { fn: "toHaveBeenNthCalledWith", - length: 1, }, toHaveReturnedTimes: { fn: "toHaveReturnedTimes", diff --git a/src/bun.js/test/jest.zig b/src/bun.js/test/jest.zig index 751a57361e9963..9844c93edd0a12 100644 --- a/src/bun.js/test/jest.zig +++ b/src/bun.js/test/jest.zig @@ -3,9 +3,9 @@ const bun = @import("root").bun; const js_parser = bun.js_parser; const js_ast = bun.JSAst; const Api = @import("../../api/schema.zig").Api; -const MimeType = @import("../../bun_dev_http_server.zig").MimeType; +const MimeType = bun.http.MimeType; const ZigURL = @import("../../url.zig").URL; -const HTTPClient = @import("root").bun.HTTP; +const HTTPClient = @import("root").bun.http; const NetworkThread = HTTPClient.NetworkThread; const Environment = bun.Environment; diff --git a/src/bun.js/webcore/blob.zig b/src/bun.js/webcore/blob.zig index 03b6bb4881f78d..455a515008b42e 100644 --- a/src/bun.js/webcore/blob.zig +++ b/src/bun.js/webcore/blob.zig @@ -1,10 +1,10 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; const bun = @import("root").bun; -const MimeType = @import("../../bun_dev_http_server.zig").MimeType; +const MimeType = http.MimeType; const ZigURL = @import("../../url.zig").URL; -const HTTPClient = @import("root").bun.HTTP; -const NetworkThread = HTTPClient.NetworkThread; +const http = @import("root").bun.http; +const NetworkThread = http.NetworkThread; const AsyncIO = NetworkThread.AsyncIO; const JSC = @import("root").bun.JSC; const js = JSC.C; @@ -1433,7 +1433,7 @@ pub const Blob = struct { this.deref(); } - pub fn initFile(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?HTTPClient.MimeType, allocator: std.mem.Allocator) !*Store { + pub fn initFile(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?http.MimeType, allocator: std.mem.Allocator) !*Store { var store = try allocator.create(Blob.Store); store.* = .{ .data = .{ @@ -1445,7 +1445,7 @@ pub const Blob = struct { if (sliced.len > 0) { var extname = std.fs.path.extension(sliced); extname = std.mem.trim(u8, extname, "."); - if (HTTPClient.MimeType.byExtensionNoDefault(extname)) |mime| { + if (http.MimeType.byExtensionNoDefault(extname)) |mime| { break :brk mime; } } @@ -1609,12 +1609,12 @@ pub const Blob = struct { } } - const WrappedOpenCallback = *const fn (*State, *HTTPClient.NetworkThread.Completion, AsyncIO.OpenError!bun.FileDescriptor) void; + const WrappedOpenCallback = *const fn (*State, *http.NetworkThread.Completion, AsyncIO.OpenError!bun.FileDescriptor) void; fn OpenCallbackWrapper(comptime Callback: OpenCallback) WrappedOpenCallback { return struct { const callback = Callback; const StateHolder = State; - pub fn onOpen(state: *State, completion: *HTTPClient.NetworkThread.Completion, result: AsyncIO.OpenError!bun.FileDescriptor) void { + pub fn onOpen(state: *State, completion: *http.NetworkThread.Completion, result: AsyncIO.OpenError!bun.FileDescriptor) void { var this = state.context; var path_buffer = completion.operation.open.path; defer bun.default_allocator.free(bun.span(path_buffer)); @@ -1712,7 +1712,7 @@ pub const Blob = struct { this.opened_fd = null_fd; } - pub fn onClose(closer: *Closer, _: *HTTPClient.NetworkThread.Completion, _: AsyncIO.CloseError!void) void { + pub fn onClose(closer: *Closer, _: *http.NetworkThread.Completion, _: AsyncIO.CloseError!void) void { bun.default_allocator.destroy(closer); } }; @@ -1725,13 +1725,13 @@ pub const Blob = struct { offset: SizeType = 0, max_length: SizeType = Blob.max_size, opened_fd: bun.FileDescriptor = null_fd, - read_completion: HTTPClient.NetworkThread.Completion = undefined, + read_completion: http.NetworkThread.Completion = undefined, read_len: SizeType = 0, read_off: SizeType = 0, read_eof: bool = false, size: SizeType = 0, buffer: []u8 = undefined, - task: HTTPClient.NetworkThread.Task = undefined, + task: http.NetworkThread.Task = undefined, system_error: ?JSC.SystemError = null, errno: ?anyerror = null, onCompleteCtx: *anyopaque = undefined, @@ -1844,10 +1844,10 @@ pub const Blob = struct { this.runAsync(task); } - pub fn onRead(this: *ReadFile, completion: *HTTPClient.NetworkThread.Completion, result: AsyncIO.ReadError!usize) void { + pub fn onRead(this: *ReadFile, completion: *http.NetworkThread.Completion, result: AsyncIO.ReadError!usize) void { defer this.doReadLoop(); const read_len = @as(SizeType, @truncate(result catch |err| { - if (@hasField(HTTPClient.NetworkThread.Completion, "result")) { + if (@hasField(http.NetworkThread.Completion, "result")) { this.errno = AsyncIO.asError(-completion.result); this.system_error = (bun.sys.Error{ .errno = @as(bun.sys.Error.Int, @intCast(-completion.result)), @@ -1999,8 +1999,8 @@ pub const Blob = struct { opened_fd: bun.FileDescriptor = null_fd, system_error: ?JSC.SystemError = null, errno: ?anyerror = null, - write_completion: HTTPClient.NetworkThread.Completion = undefined, - task: HTTPClient.NetworkThread.Task = undefined, + write_completion: http.NetworkThread.Completion = undefined, + task: http.NetworkThread.Task = undefined, io_task: ?*WriteFileTask = null, onCompleteCtx: *anyopaque = undefined, @@ -2104,7 +2104,7 @@ pub const Blob = struct { this.runAsync(); } - pub fn onWrite(this: *WriteFile, _: *HTTPClient.NetworkThread.Completion, result: AsyncIO.WriteError!usize) void { + pub fn onWrite(this: *WriteFile, _: *http.NetworkThread.Completion, result: AsyncIO.WriteError!usize) void { defer this.doWriteLoop(); this.wrote += @as(SizeType, @truncate(result catch |errno| { this.errno = errno; @@ -2692,7 +2692,7 @@ pub const Blob = struct { pub const FileStore = struct { pathlike: JSC.Node.PathOrFileDescriptor, - mime_type: HTTPClient.MimeType = HTTPClient.MimeType.other, + mime_type: http.MimeType = http.MimeType.other, is_atty: ?bool = null, mode: bun.Mode = 0, seekable: ?bool = null, @@ -2712,8 +2712,8 @@ pub const Blob = struct { return null; } - pub fn init(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?HTTPClient.MimeType) FileStore { - return .{ .pathlike = pathlike, .mime_type = mime_type orelse HTTPClient.MimeType.other }; + pub fn init(pathlike: JSC.Node.PathOrFileDescriptor, mime_type: ?http.MimeType) FileStore { + return .{ .pathlike = pathlike, .mime_type = mime_type orelse http.MimeType.other }; } }; @@ -3074,7 +3074,7 @@ pub const Blob = struct { return blob_.toJS(globalThis); } - pub fn getMimeType(this: *const Blob) ?bun.HTTP.MimeType { + pub fn getMimeType(this: *const Blob) ?bun.http.MimeType { if (this.store) |store| { return store.mime_type; } diff --git a/src/bun.js/webcore/body.zig b/src/bun.js/webcore/body.zig index 0770cf35e248a2..ae8a3352e5089a 100644 --- a/src/bun.js/webcore/body.zig +++ b/src/bun.js/webcore/body.zig @@ -1,9 +1,9 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; const bun = @import("root").bun; -const MimeType = @import("../../bun_dev_http_server.zig").MimeType; +const MimeType = bun.http.MimeType; const ZigURL = @import("../../url.zig").URL; -const HTTPClient = @import("root").bun.HTTP; +const HTTPClient = @import("root").bun.http; const NetworkThread = HTTPClient.NetworkThread; const AsyncIO = NetworkThread.AsyncIO; const JSC = @import("root").bun.JSC; diff --git a/src/bun.js/webcore/encoding.zig b/src/bun.js/webcore/encoding.zig index 2f973792ed3db6..3f1a7611eb8956 100644 --- a/src/bun.js/webcore/encoding.zig +++ b/src/bun.js/webcore/encoding.zig @@ -1,8 +1,8 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; -const MimeType = @import("../../bun_dev_http_server.zig").MimeType; +const MimeType = bun.http.MimeType; const ZigURL = @import("../../url.zig").URL; -const HTTPClient = @import("root").bun.HTTP; +const HTTPClient = @import("root").bun.http; const NetworkThread = HTTPClient.NetworkThread; const JSC = @import("root").bun.JSC; diff --git a/src/bun.js/webcore/request.zig b/src/bun.js/webcore/request.zig index f0e3672f3993da..02d9eb07b6d616 100644 --- a/src/bun.js/webcore/request.zig +++ b/src/bun.js/webcore/request.zig @@ -1,10 +1,9 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; const bun = @import("root").bun; -const RequestContext = @import("../../bun_dev_http_server.zig").RequestContext; -const MimeType = @import("../../bun_dev_http_server.zig").MimeType; +const MimeType = bun.http.MimeType; const ZigURL = @import("../../url.zig").URL; -const HTTPClient = @import("root").bun.HTTP; +const HTTPClient = @import("root").bun.http; const NetworkThread = HTTPClient.NetworkThread; const AsyncIO = NetworkThread.AsyncIO; const JSC = @import("root").bun.JSC; @@ -189,17 +188,6 @@ pub const Request = struct { try writer.writeAll("}"); } - pub fn fromRequestContext(ctx: *RequestContext) !Request { - if (comptime Environment.isWindows) unreachable; - var req = Request{ - .url = bun.String.create(ctx.full_url), - .body = try InitRequestBodyValue(.{ .Null = {} }), - .method = ctx.method, - .headers = FetchHeaders.createFromPicoHeaders(ctx.request.headers), - }; - return req; - } - pub fn mimeType(this: *const Request) string { if (this.headers) |headers| { if (headers.fastGet(.ContentType)) |content_type| { diff --git a/src/bun.js/webcore/response.zig b/src/bun.js/webcore/response.zig index a6e090c1ee676f..7aa0e1d7c458bd 100644 --- a/src/bun.js/webcore/response.zig +++ b/src/bun.js/webcore/response.zig @@ -1,12 +1,11 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; const bun = @import("root").bun; -const RequestContext = @import("../../bun_dev_http_server.zig").RequestContext; -const MimeType = @import("../../bun_dev_http_server.zig").MimeType; +const MimeType = bun.http.MimeType; const ZigURL = @import("../../url.zig").URL; -const HTTPClient = @import("root").bun.HTTP; -const FetchRedirect = HTTPClient.FetchRedirect; -const NetworkThread = HTTPClient.NetworkThread; +const http = @import("root").bun.http; +const FetchRedirect = http.FetchRedirect; +const NetworkThread = http.NetworkThread; const AsyncIO = NetworkThread.AsyncIO; const JSC = @import("root").bun.JSC; const js = JSC.C; @@ -303,56 +302,6 @@ pub const Response = struct { allocator.destroy(this); } - pub fn mimeType(response: *const Response, request_ctx_: ?*const RequestContext) string { - if (comptime Environment.isWindows) unreachable; - return mimeTypeWithDefault(response, MimeType.other, request_ctx_); - } - - pub fn mimeTypeWithDefault(response: *const Response, default: MimeType, request_ctx_: ?*const RequestContext) string { - if (comptime Environment.isWindows) unreachable; - - if (response.header(.ContentType)) |content_type| { - return content_type; - } - - if (request_ctx_) |request_ctx| { - if (request_ctx.url.extname.len > 0) { - return MimeType.byExtension(request_ctx.url.extname).value; - } - } - - switch (response.body.value) { - .Blob => |blob| { - if (blob.content_type.len > 0) { - return blob.content_type; - } - - // auto-detect HTML if unspecified - if (strings.hasPrefixComptime(response.body.value.slice(), "")) { - return MimeType.html.value; - } - - return default.value; - }, - .WTFStringImpl => |str| { - if (bun.String.init(str).hasPrefixComptime("")) { - return MimeType.html.value; - } - - return default.value; - }, - .InternalBlob => { - // auto-detect HTML if unspecified - if (strings.hasPrefixComptime(response.body.value.slice(), "")) { - return MimeType.html.value; - } - - return response.body.value.InternalBlob.contentType(); - }, - .Null, .Used, .Locked, .Empty, .Error => return default.value, - } - } - pub fn getContentType( this: *Response, ) ?ZigString.Slice { @@ -767,9 +716,9 @@ pub const Fetch = struct { pub const FetchTasklet = struct { const log = Output.scoped(.FetchTasklet, false); - http: ?*HTTPClient.AsyncHTTP = null, - result: HTTPClient.HTTPClientResult = .{}, - metadata: ?HTTPClient.HTTPResponseMetadata = null, + http: ?*http.AsyncHTTP = null, + result: http.HTTPClientResult = .{}, + metadata: ?http.HTTPResponseMetadata = null, javascript_vm: *VirtualMachine = undefined, global_this: *JSGlobalObject = undefined, request_body: HTTPRequestBody = undefined, @@ -790,15 +739,15 @@ pub const Fetch = struct { /// when Content-Length is provided this represents the whole size of the request /// If chunked encoded this will represent the total received size (ignoring the chunk headers) /// If is not chunked encoded and Content-Length is not provided this will be unknown - body_size: HTTPClient.HTTPClientResult.BodySize = .unknown, + body_size: http.HTTPClientResult.BodySize = .unknown, /// This is url + proxy memory buffer and is owned by FetchTasklet /// We always clone url and proxy (if informed) url_proxy_buffer: []const u8 = "", signal: ?*JSC.WebCore.AbortSignal = null, - signals: HTTPClient.Signals = .{}, - signal_store: HTTPClient.Signals.Store = .{}, + signals: http.Signals = .{}, + signal_store: http.Signals.Store = .{}, has_schedule_callback: std.atomic.Atomic(bool) = std.atomic.Atomic(bool).init(false), // must be stored because AbortSignal stores reason weakly @@ -817,7 +766,7 @@ pub const Fetch = struct { pub const HTTPRequestBody = union(enum) { AnyBlob: AnyBlob, - Sendfile: HTTPClient.Sendfile, + Sendfile: http.Sendfile, pub fn store(this: *HTTPRequestBody) ?*JSC.WebCore.Blob.Store { return switch (this.*) { @@ -899,7 +848,7 @@ pub const Fetch = struct { var reporter = this.memory_reporter; const allocator = reporter.allocator(); - if (this.http) |http| allocator.destroy(http); + if (this.http) |http_| allocator.destroy(http_); allocator.destroy(this); // reporter.assert(); bun.default_allocator.destroy(reporter); @@ -1201,7 +1150,7 @@ pub const Fetch = struct { globalThis.bunVM().enqueueTask(JSC.Task.init(&holder.task)); } - pub fn checkServerIdentity(this: *FetchTasklet, certificate_info: HTTPClient.CertificateInfo) bool { + pub fn checkServerIdentity(this: *FetchTasklet, certificate_info: http.CertificateInfo) bool { if (this.check_server_identity.get()) |check_server_identity| { check_server_identity.ensureStillAlive(); if (certificate_info.cert.len > 0) { @@ -1229,7 +1178,7 @@ pub const Fetch = struct { // we need to abort the request if (this.http != null) { - HTTPClient.http_thread.scheduleShutdown(this.http.?); + http.http_thread.scheduleShutdown(this.http.?); } this.result.fail = error.ERR_TLS_CERT_ALTNAME_INVALID; return false; @@ -1268,8 +1217,8 @@ pub const Fetch = struct { // some times we don't have metadata so we also check http.url if (this.metadata) |metadata| { path = bun.String.create(metadata.url); - } else if (this.http) |http| { - path = bun.String.create(http.url.href); + } else if (this.http) |http_| { + path = bun.String.create(http_.url.href); } else { path = bun.String.empty; } @@ -1363,8 +1312,8 @@ pub const Fetch = struct { pub fn onStartStreamingRequestBodyCallback(ctx: *anyopaque) JSC.WebCore.DrainResult { const this = bun.cast(*FetchTasklet, ctx); - if (this.http) |http| { - http.enableBodyStreaming(); + if (this.http) |http_| { + http_.enableBodyStreaming(); } if (this.signal_store.aborted.load(.Monotonic)) { return JSC.WebCore.DrainResult{ @@ -1499,7 +1448,7 @@ pub const Fetch = struct { .capacity = 0, }, }, - .http = try allocator.create(HTTPClient.AsyncHTTP), + .http = try allocator.create(http.AsyncHTTP), .javascript_vm = jsc_vm, .request_body = fetch_options.body, .global_this = globalThis, @@ -1540,7 +1489,7 @@ pub const Fetch = struct { } } - fetch_tasklet.http.?.* = HTTPClient.AsyncHTTP.init( + fetch_tasklet.http.?.* = http.AsyncHTTP.init( fetch_options.memory_reporter.allocator(), fetch_options.method, fetch_options.url, @@ -1549,7 +1498,7 @@ pub const Fetch = struct { &fetch_tasklet.response_buffer, fetch_tasklet.request_body.slice(), fetch_options.timeout, - HTTPClient.HTTPClientResult.Callback.New( + http.HTTPClientResult.Callback.New( *FetchTasklet, FetchTasklet.callback, ).init( @@ -1596,7 +1545,7 @@ pub const Fetch = struct { this.tracker.didCancel(this.global_this); if (this.http != null) { - HTTPClient.http_thread.scheduleShutdown(this.http.?); + http.http_thread.scheduleShutdown(this.http.?); } } @@ -1628,7 +1577,7 @@ pub const Fetch = struct { fetch_options: FetchOptions, promise: JSC.JSPromise.Strong, ) !*FetchTasklet { - try HTTPClient.HTTPThread.init(); + try http.HTTPThread.init(); var node = try get( allocator, global, @@ -1640,12 +1589,12 @@ pub const Fetch = struct { node.http.?.schedule(allocator, &batch); node.poll_ref.ref(global.bunVM()); - HTTPClient.http_thread.schedule(batch); + http.http_thread.schedule(batch); return node; } - pub fn callback(task: *FetchTasklet, result: HTTPClient.HTTPClientResult) void { + pub fn callback(task: *FetchTasklet, result: http.HTTPClientResult) void { task.mutex.lock(); defer task.mutex.unlock(); log("callback success {} has_more {} bytes {}", .{ result.isSuccess(), result.has_more, result.body.?.list.items.len }); @@ -1691,7 +1640,7 @@ pub const Fetch = struct { var blob = Blob.init(data, allocator, globalThis); var allocated = false; - const mime_type = bun.HTTP.MimeType.init(data_url.mime_type, allocator, &allocated); + const mime_type = bun.http.MimeType.init(data_url.mime_type, allocator, &allocated); blob.content_type = mime_type.value; if (allocated) { blob.content_type_allocated = true; @@ -2285,7 +2234,7 @@ pub const Fetch = struct { .result => |fd| fd, }; - if (proxy == null and bun.HTTP.Sendfile.isEligible(url)) { + if (proxy == null and bun.http.Sendfile.isEligible(url)) { use_sendfile: { const stat: bun.Stat = switch (bun.sys.fstat(opened_fd)) { .result => |result| result, @@ -2413,7 +2362,7 @@ pub const Fetch = struct { // https://developer.mozilla.org/en-US/docs/Web/API/Headers pub const Headers = struct { - pub usingnamespace HTTPClient.Headers; + pub usingnamespace http.Headers; entries: Headers.Entries = .{}, buf: std.ArrayListUnmanaged(u8) = .{}, allocator: std.mem.Allocator, diff --git a/src/bun.js/webcore/streams.zig b/src/bun.js/webcore/streams.zig index 04860ec1cf03c6..208fef2b6ed7d2 100644 --- a/src/bun.js/webcore/streams.zig +++ b/src/bun.js/webcore/streams.zig @@ -1,9 +1,9 @@ const std = @import("std"); const Api = @import("../../api/schema.zig").Api; const bun = @import("root").bun; -const MimeType = @import("../../bun_dev_http_server.zig").MimeType; +const MimeType = HTTPClient.MimeType; const ZigURL = @import("../../url.zig").URL; -const HTTPClient = @import("root").bun.HTTP; +const HTTPClient = bun.http; const NetworkThread = HTTPClient.NetworkThread; const AsyncIO = NetworkThread.AsyncIO; const JSC = @import("root").bun.JSC; diff --git a/src/bun.zig b/src/bun.zig index 74c7997ff94bce..cdee41dc80c2db 100644 --- a/src/bun.zig +++ b/src/bun.zig @@ -664,7 +664,8 @@ pub fn StringEnum(comptime Type: type, comptime Map: anytype, value: []const u8) pub const Bunfig = @import("./bunfig.zig").Bunfig; -pub const HTTPThread = @import("./http_client_async.zig").HTTPThread; +pub const HTTPThread = @import("./http.zig").HTTPThread; +pub const http = @import("./http.zig"); pub const Analytics = @import("./analytics/analytics_thread.zig"); @@ -728,7 +729,6 @@ pub const JSC = @import("root").JavaScriptCore; pub const AsyncIO = @import("async_io"); pub const logger = @import("./logger.zig"); -pub const HTTP = @import("./http_client_async.zig"); pub const ThreadPool = @import("./thread_pool.zig"); pub const picohttp = @import("./deps/picohttp.zig"); pub const uws = @import("./deps/uws.zig"); @@ -2052,3 +2052,7 @@ pub fn exitThread() noreturn { @panic("Unsupported platform"); } } + +pub fn outOfMemory() noreturn { + @panic("Out of memory"); +} diff --git a/src/bun_dev_http_server.zig b/src/bun_dev_http_server.zig deleted file mode 100644 index 1684adbc0eaf95..00000000000000 --- a/src/bun_dev_http_server.zig +++ /dev/null @@ -1,3405 +0,0 @@ -// const c = @import("./c.zig"); -const std = @import("std"); -const bun = @import("root").bun; -const string = bun.string; -const Output = bun.Output; -const Global = bun.Global; -const Environment = bun.Environment; -const strings = bun.strings; -const MutableString = bun.MutableString; -const FeatureFlags = bun.FeatureFlags; -const stringZ = bun.stringZ; -const StoredFileDescriptorType = bun.StoredFileDescriptorType; -const default_allocator = bun.default_allocator; -const C = bun.C; -const Api = @import("./api/schema.zig").Api; -const ApiReader = @import("./api/schema.zig").Reader; -const ApiWriter = @import("./api/schema.zig").Writer; -const ByteApiWriter = @import("./api/schema.zig").ByteWriter; -const NewApiWriter = @import("./api/schema.zig").Writer; -const js_ast = bun.JSAst; -const bundler = bun.bundler; -const logger = @import("root").bun.logger; -const Fs = @import("./fs.zig"); -const Options = @import("./options.zig"); -const Fallback = @import("./runtime.zig").Fallback; -const ErrorCSS = @import("./runtime.zig").ErrorCSS; -const ErrorJS = @import("./runtime.zig").ErrorJS; -const Runtime = @import("./runtime.zig").Runtime; -const Css = @import("css_scanner.zig"); -const resolve_path = @import("./resolver/resolve_path.zig"); -const OutputFile = Options.OutputFile; -const DotEnv = @import("./env_loader.zig"); -const mimalloc = @import("./allocators/mimalloc.zig"); -const MacroMap = @import("./resolver/package_json.zig").MacroMap; -const Analytics = @import("./analytics/analytics_thread.zig"); -const Arena = @import("root").bun.ArenaAllocator; -const ThreadlocalArena = @import("./mimalloc_arena.zig").Arena; -const JSON = bun.JSON; -const DateTime = bun.DateTime; -const ThreadPool = @import("root").bun.ThreadPool; -const SourceMap = @import("./sourcemap/sourcemap.zig"); -const ObjectPool = @import("./pool.zig").ObjectPool; -const Lock = @import("./lock.zig").Lock; -const RequestDataPool = ObjectPool([32_000]u8, null, false, 1); -const ResolveWatcher = @import("./resolver/resolver.zig").ResolveWatcher; -const constStrToU8 = bun.constStrToU8; - -pub const MutableStringAPIWriter = NewApiWriter(*MutableString); - -const os = std.os; - -const picohttp = @import("root").bun.picohttp; -const Header = picohttp.Header; -const Request = picohttp.Request; -const Response = picohttp.Response; -pub const Headers = picohttp.Headers; -pub const MimeType = @import("./http/mime_type.zig"); -const Bundler = bundler.Bundler; -const Websocket = @import("./http/websocket.zig"); -const JSPrinter = bun.js_printer; -const watcher = @import("./watcher.zig"); -threadlocal var req_headers_buf: [100]picohttp.Header = undefined; -threadlocal var res_headers_buf: [100]picohttp.Header = undefined; -const sync = @import("./sync.zig"); -const JavaScript = @import("root").bun.JSC; -const JavaScriptCore = JavaScriptCore.C; -const Syscall = bun.sys; -const Router = @import("./router.zig"); -pub const Watcher = watcher.NewWatcher(*Server); -const ZigURL = @import("./url.zig").URL; - -const HTTPStatusCode = u10; -const URLPath = @import("./http/url_path.zig"); -const Method = @import("./http/method.zig").Method; - -const SOCKET_FLAGS: u32 = if (Environment.isLinux) - os.SOCK.CLOEXEC | os.MSG.NOSIGNAL -else - os.SOCK.CLOEXEC; - -fn iovec(buf: []const u8) os.iovec_const { - return os.iovec_const{ - .iov_base = buf.ptr, - .iov_len = buf.len, - }; -} - -fn disableSIGPIPESoClosingTheTabDoesntCrash(conn: anytype) void { - if (comptime !Environment.isMac) return; - std.os.setsockopt( - conn.handle, - std.os.SOL.SOCKET, - std.os.SO.NOSIGPIPE, - &std.mem.toBytes(@as(c_int, 1)), - ) catch {}; -} -var http_editor_context: EditorContext = EditorContext{}; - -const PosixRequestContext = struct { - request: Request, - method: Method, - url: URLPath, - conn: std.net.Stream, - allocator: std.mem.Allocator, - arena: ThreadlocalArena, - req_body_node: *RequestDataPool.Node = undefined, - log: logger.Log, - bundler: *Bundler, - keep_alive: bool = true, - status: ?HTTPStatusCode = null, - has_written_last_header: bool = false, - has_called_done: bool = false, - mime_type: MimeType = MimeType.other, - to_plain_text: bool = false, - controlled: bool = false, - watcher: *Watcher, - timer: std.time.Timer, - matched_route: ?Router.Match = null, - origin: ZigURL, - datetime_buf: [512]u8 = undefined, - - full_url: [:0]const u8 = "", - res_headers_count: usize = 0, - - /// --disable-bun.js propagates here - pub var fallback_only = false; - - const default_favicon = @embedFile("favicon.png"); - const default_favicon_shasum = "68d5047bec9a8cd56e2e8999d74cad7ba448dce9"; - pub fn sendFavicon(ctx: *RequestContext) !void { - ctx.appendHeader("Content-Type", MimeType.byExtension("png").value); - ctx.appendHeader("ETag", default_favicon_shasum); - ctx.appendHeader("Age", "0"); - ctx.appendHeader("Cache-Control", "public, max-age=3600"); - - if (ctx.header("If-None-Match")) |etag_header| { - if (strings.eqlLong(default_favicon_shasum, etag_header, true)) { - try ctx.sendNotModified(); - return; - } - } - - defer ctx.done(); - - try ctx.writeStatus(200); - try ctx.prepareToSendBody(default_favicon.len, false); - try ctx.writeBodyBuf(default_favicon); - } - - fn parseOrigin(this: *RequestContext) void { - var protocol: ?string = null; - var host: ?string = null; - - // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Forwarded - if (this.header("Forwarded")) |forwarded| { - if (strings.indexOf(forwarded, "host=")) |host_start| { - const host_i = host_start + "host=".len; - const host_ = forwarded[host_i..][0 .. strings.indexOfChar(forwarded[host_i..], ';') orelse forwarded[host_i..].len]; - if (host_.len > 0) { - host = host_; - } - } - - if (strings.indexOf(forwarded, "proto=")) |protocol_start| { - const protocol_i = protocol_start + "proto=".len; - if (strings.eqlComptime(forwarded[protocol_i..][0 .. strings.indexOfChar(forwarded[protocol_i..], ';') orelse forwarded[protocol_i..].len], "https")) { - protocol = "https"; - } else { - protocol = "http"; - } - } - } - - if (protocol == null) { - determine_protocol: { - // Upgrade-Insecure-Requests doesn't work - // Browsers send this header to clients that are not running HTTPS - // We need to use protocol-relative URLs in import statements and in websocket handler, we need to send the absolute URL it received - // That will be our fix - // // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Upgrade-Insecure-Requests - // if (this.header("Upgrade-Insecure-Requests") != null) { - // protocol = "https"; - // break :determine_protocol; - // } - - // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/X-Forwarded-Proto - if (this.header("X-Forwarded-Proto")) |proto| { - if (strings.eqlComptime(proto, "https")) { - protocol = "https"; - break :determine_protocol; - } - } - - // Microsoft IIS - if (this.header("Front-End-Https")) |proto| { - if (strings.eqlComptime(proto, "on")) { - protocol = "https"; - break :determine_protocol; - } - } - } - } - - if (host == null) { - determine_host: { - if (this.header("X-Forwarded-Host")) |_host| { - host = _host; - break :determine_host; - } - } - - if (protocol == null) { - if (this.header("Origin")) |origin| { - this.origin = ZigURL.parse(origin); - return; - } - } - } - - if (host != null or protocol != null) { - // Proxies like Caddy might only send X-Forwarded-Proto if the host matches - const display_protocol = protocol orelse @as(string, "http"); - var display_host = host orelse - (if (protocol != null) this.header("Host") else null) orelse - @as(string, this.origin.host); - - var display_port = if (this.origin.port.len > 0) this.origin.port else @as(string, "3000"); - - if (strings.indexOfChar(display_host, ':')) |colon| { - display_port = display_host[colon + 1 .. display_host.len]; - display_host = display_host[0..colon]; - } else if (this.bundler.options.origin.port_was_automatically_set and protocol != null) { - if (strings.eqlComptime(display_protocol, "https")) { - display_port = "443"; - } else { - display_port = "80"; - } - } - this.origin = ZigURL.parse(std.fmt.allocPrint(this.allocator, "{s}://{s}:{s}/", .{ display_protocol, display_host, display_port }) catch unreachable); - } - } - - pub fn getFullURL(this: *RequestContext) [:0]const u8 { - if (this.full_url.len == 0) { - if (this.origin.isAbsolute()) { - this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}{s}", .{ this.origin.origin, this.request.path }) catch unreachable; - } else { - this.full_url = this.allocator.dupeZ(u8, this.request.path) catch unreachable; - } - } - - return this.full_url; - } - - pub fn getFullURLForSourceMap(this: *RequestContext) [:0]const u8 { - if (this.full_url.len == 0) { - if (this.origin.isAbsolute()) { - this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}{s}.map", .{ this.origin.origin, this.request.path }) catch unreachable; - } else { - this.full_url = std.fmt.allocPrintZ(this.allocator, "{s}.map", .{this.request.path}) catch unreachable; - } - } - - return this.full_url; - } - - pub fn handleRedirect(this: *RequestContext, url: string) !void { - this.appendHeader("Location", url); - defer this.done(); - try this.writeStatus(302); - try this.flushHeaders(); - } - - pub fn header(ctx: *RequestContext, comptime name: anytype) ?[]const u8 { - return (ctx.headerEntry(name) orelse return null).value; - } - - pub fn headerEntry(ctx: *RequestContext, comptime name: anytype) ?Header { - for (ctx.request.headers) |head| { - if (strings.eqlCaseInsensitiveASCII(head.name, name, true)) { - return head; - } - } - - return null; - } - - pub fn headerEntryFirst(ctx: *RequestContext, comptime name: []const string) ?Header { - for (ctx.request.headers) |head| { - inline for (name) |match| { - if (strings.eqlCaseInsensitiveASCII(head.name, match, true)) { - return head; - } - } - } - - return null; - } - - pub fn renderFallback( - this: *RequestContext, - allocator: std.mem.Allocator, - bundler_: *Bundler, - step: Api.FallbackStep, - log: *logger.Log, - err: anyerror, - exceptions: []Api.JsException, - comptime fmt: string, - args: anytype, - ) !void { - var route_index: i32 = -1; - const routes: Api.StringMap = if (bundler_.router != null) brk: { - const router = &bundler_.router.?; - break :brk Api.StringMap{ - .keys = router.getNames() catch unreachable, - .values = router.getPublicPaths() catch unreachable, - }; - } else std.mem.zeroes(Api.StringMap); - var preload: string = ""; - - var params: Api.StringMap = std.mem.zeroes(Api.StringMap); - if (fallback_entry_point_created == false) { - defer fallback_entry_point_created = true; - defer bundler_.resetStore(); - - // You'd think: hey we're just importing a file - // Do we really need to run it through the transpiler and linking and printing? - // The answer, however, is yes. - // What if you're importing a fallback that's in node_modules? - try fallback_entry_point.generate(bundler_.options.framework.?.fallback.path, Bundler, bundler_); - - const bundler_parse_options = Bundler.ParseOptions{ - .allocator = default_allocator, - .path = fallback_entry_point.source.path, - .loader = .js, - .macro_remappings = .{}, - .dirname_fd = 0, - .jsx = bundler_.options.jsx, - }; - - var tmp = bundler_.parse( - bundler_parse_options, - @as(?*bundler.FallbackEntryPoint, &fallback_entry_point), - ); - if (tmp) |*result| { - try bundler_.linker.link( - fallback_entry_point.source.path, - result, - this.origin, - .absolute_url, - false, - false, - ); - - var buffer_writer = try JSPrinter.BufferWriter.init(default_allocator); - var writer = JSPrinter.BufferPrinter.init(buffer_writer); - _ = try bundler_.print( - result.*, - @TypeOf(&writer), - &writer, - .esm, - ); - var slice = writer.ctx.buffer.toOwnedSliceLeaky(); - - fallback_entry_point.built_code = try default_allocator.dupe(u8, slice); - - writer.ctx.buffer.deinit(); - } - } - - this.appendHeader("Content-Type", MimeType.html.value); - - var link_stack_buf: [2048]u8 = undefined; - - var remaining: []u8 = link_stack_buf[0..]; - - if (this.bundler.options.node_modules_bundle_url.len > 0) { - add_preload: { - const node_modules_preload_header_value = std.fmt.bufPrint(remaining, "<{s}>; rel=modulepreload", .{ - this.bundler.options.node_modules_bundle_url, - }) catch break :add_preload; - - this.appendHeader("Link", node_modules_preload_header_value); - remaining = remaining[node_modules_preload_header_value.len..]; - } - } - - if (this.matched_route) |match| { - if (match.params.len > 0) { - params.keys = match.params.items(.name); - params.values = match.params.items(.value); - } - - if (this.bundler.router.?.routeIndexByHash(match.hash)) |ind| { - route_index = @as(i32, @intCast(ind)); - } - - module_preload: { - if (strings.hasPrefix(match.file_path, Fs.FileSystem.instance.top_level_dir)) { - var stream = std.io.fixedBufferStream(remaining); - var writer = stream.writer(); - writer.writeAll("<") catch break :module_preload; - writer.writeAll(std.mem.trimRight(u8, this.bundler.options.origin.href, "/")) catch break :module_preload; - writer.writeAll("/") catch break :module_preload; - - if (this.bundler.options.routes.asset_prefix_path.len > 0) { - writer.writeAll(std.mem.trim(u8, this.bundler.options.routes.asset_prefix_path, "/")) catch break :module_preload; - } - - // include that trailing slash - // this should never overflow because the directory will be "/" if it's a root - if (comptime Environment.isDebug) std.debug.assert(Fs.FileSystem.instance.top_level_dir.len > 0); - - writer.writeAll(match.file_path[Fs.FileSystem.instance.top_level_dir.len - 1 ..]) catch break :module_preload; - - writer.writeAll(">; rel=modulepreload") catch break :module_preload; - - this.appendHeader( - "Link", - remaining[0..stream.pos], - ); - remaining = remaining[stream.pos..]; - } - } - } - - var fallback_container = try allocator.create(Api.FallbackMessageContainer); - defer allocator.destroy(fallback_container); - fallback_container.* = Api.FallbackMessageContainer{ - .message = try std.fmt.allocPrint(allocator, fmt, args), - .router = if (routes.keys.len > 0) - Api.Router{ .route = route_index, .params = params, .routes = routes } - else - null, - .reason = step, - .cwd = this.bundler.fs.top_level_dir, - .problems = Api.Problems{ - .code = @as(u16, @truncate(@intFromError(err))), - .name = @errorName(err), - .exceptions = exceptions, - .build = try log.toAPI(allocator), - }, - }; - defer allocator.free(fallback_container.message.?); - - defer this.done(); - - if (RequestContext.fallback_only) { - try this.writeStatus(200); - } else { - try this.writeStatus(500); - } - - if (comptime fmt.len > 0) Output.prettyErrorln(fmt, args); - Output.flush(); - - var bb = std.ArrayList(u8).init(allocator); - defer bb.deinit(); - var bb_writer = bb.writer(); - - try Fallback.render( - allocator, - fallback_container, - preload, - fallback_entry_point.built_code, - @TypeOf(bb_writer), - bb_writer, - ); - try this.prepareToSendBody(bb.items.len, false); - try this.writeBodyBuf(bb.items); - } - - fn matchPublicFolder(this: *RequestContext, comptime extensionless: bool) ?bundler.ServeResult { - if (!this.bundler.options.routes.static_dir_enabled) return null; - const relative_path = this.url.path; - var extension = this.url.extname; - var tmp_buildfile_buf = Bundler.tmp_buildfile_buf[0..]; - - // On Windows, we don't keep the directory handle open forever because Windows doesn't like that. - const public_dir: std.fs.Dir = this.bundler.options.routes.static_dir_handle orelse std.fs.openDirAbsolute(this.bundler.options.routes.static_dir, .{}) catch |err| { - this.bundler.log.addErrorFmt(null, logger.Loc.Empty, this.allocator, "Opening public directory failed: {s}", .{@errorName(err)}) catch unreachable; - Output.printErrorln("Opening public directory failed: {s}", .{@errorName(err)}); - this.bundler.options.routes.static_dir_enabled = false; - return null; - }; - - var relative_unrooted_path: []u8 = resolve_path.normalizeString(relative_path, false, .auto); - - var _file: ?std.fs.File = null; - - // Is it the index file? - if (relative_unrooted_path.len == 0) { - // bun.copy(u8, &tmp_buildfile_buf, relative_unrooted_path); - // bun.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/" - // Search for /index.html - if (this.bundler.options.routes.single_page_app_routing and - this.bundler.options.routes.single_page_app_fd != 0) - { - this.sendSinglePageHTML() catch {}; - return null; - } else if (public_dir.openFile("index.html", .{})) |file| { - var index_path = "index.html".*; - relative_unrooted_path = &(index_path); - _file = file; - extension = "html"; - } else |_| {} - - // Okay is it actually a full path? - } else if (extension.len > 0 and (!extensionless or strings.eqlComptime(extension, "html"))) { - if (public_dir.openFile(relative_unrooted_path, .{})) |file| { - _file = file; - } else |_| {} - } - - // Try some weird stuff. - while (_file == null and relative_unrooted_path.len > 1) { - // When no extension is provided, it might be html - if (extension.len == 0) { - bun.copy(u8, tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]); - bun.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], ".html"); - - if (public_dir.openFile(tmp_buildfile_buf[0 .. relative_unrooted_path.len + ".html".len], .{})) |file| { - _file = file; - extension = "html"; - break; - } else |_| {} - - var _path: []u8 = undefined; - if (relative_unrooted_path[relative_unrooted_path.len - 1] == '/') { - bun.copy(u8, tmp_buildfile_buf, relative_unrooted_path[0 .. relative_unrooted_path.len - 1]); - bun.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len - 1 ..], "/index.html"); - _path = tmp_buildfile_buf[0 .. relative_unrooted_path.len - 1 + "/index.html".len]; - } else { - bun.copy(u8, tmp_buildfile_buf, relative_unrooted_path[0..relative_unrooted_path.len]); - bun.copy(u8, tmp_buildfile_buf[relative_unrooted_path.len..], "/index.html"); - - _path = tmp_buildfile_buf[0 .. relative_unrooted_path.len + "/index.html".len]; - } - - if (extensionless and !strings.eqlComptime(std.fs.path.extension(_path), ".html")) { - break; - } - - if (public_dir.openFile(_path, .{})) |file| { - const __path = _path; - relative_unrooted_path = __path; - extension = "html"; - _file = file; - break; - } else |_| {} - } - - break; - } - - if (_file) |*file| { - var stat = file.stat() catch return null; - var absolute_path = resolve_path.joinAbs(this.bundler.options.routes.static_dir, .auto, relative_unrooted_path); - - if (stat.kind == .sym_link) { - file.* = std.fs.openFileAbsolute(absolute_path, .{ .mode = .read_only }) catch return null; - - absolute_path = bun.getFdPath( - file.handle, - &Bundler.tmp_buildfile_buf, - ) catch return null; - - stat = file.stat() catch return null; - } - - if (stat.kind != .file) { - file.close(); - return null; - } - - var output_file = OutputFile.initFile(file.*, absolute_path, stat.size); - output_file.value.copy.close_handle_on_complete = true; - output_file.value.copy.autowatch = false; - - // if it wasn't a symlink, we never got the absolute path - // so it could still be missing a file extension - var ext = std.fs.path.extension(absolute_path); - if (ext.len > 0) ext = ext[1..]; - - // even if it was an absolute path, the file extension could just be a dot, like "foo." - if (ext.len == 0) ext = extension; - - return bundler.ServeResult{ - .file = output_file, - .mime_type = MimeType.byExtension(ext), - }; - } - - return null; - } - - pub fn printStatusLine(comptime code: HTTPStatusCode) []const u8 { - const status_text = switch (code) { - 101 => "ACTIVATING WEBSOCKET", - 200 => "YAY", - 201 => "NEW", - 204 => "VERY CONTENT", - 206 => "MUCH CONTENT", - 304 => "NOT MODIFIED", - 300...303, 305...399 => "REDIRECT", - 404 => "Not Found", - 403 => "Not Allowed!", - 401 => "Login", - 402 => "Pay Me", - 400, 405...499 => "bad request :(", - 500...599 => "ERR", - else => @compileError("Invalid code passed to printStatusLine"), - }; - - return std.fmt.comptimePrint("HTTP/1.1 {d} {s}\r\n", .{ code, status_text }); - } - - pub fn printStatusLineError(err: anyerror, buf: []u8) []const u8 { - return std.fmt.bufPrint(buf, "HTTP/1.1 500 {s}\r\n", .{@errorName(err)}) catch unreachable; - } - - pub fn prepareToSendBody( - ctx: *RequestContext, - length: usize, - comptime chunked: bool, - ) !void { - var content_length_header_buf: [64]u8 = undefined; - defer { - if (Environment.allow_assert) { - std.debug.assert(!ctx.has_written_last_header); - ctx.has_written_last_header = true; - } - } - - if (chunked) { - ctx.appendHeader("Transfer-Encoding", "Chunked"); - } else { - ctx.appendHeader("Content-Length", content_length_header_buf[0..std.fmt.formatIntBuf(&content_length_header_buf, length, 10, .upper, .{})]); - } - - try ctx.flushHeaders(); - } - - pub fn clearHeaders( - this: *RequestContext, - ) !void { - this.res_headers_count = 0; - } - - pub fn appendHeaderSlow(this: *RequestContext, name: string, value: string) !void { - res_headers_buf[this.res_headers_count] = picohttp.Header{ .name = name, .value = value }; - this.res_headers_count += 1; - } - - threadlocal var resp_header_out_buf: [4096]u8 = undefined; - pub fn flushHeaders(ctx: *RequestContext) !void { - if (ctx.res_headers_count == 0) return; - - const headers: []picohttp.Header = res_headers_buf[0..ctx.res_headers_count]; - defer ctx.res_headers_count = 0; - var writer = std.io.fixedBufferStream(&resp_header_out_buf); - for (headers) |head| { - _ = writer.write(head.name) catch 0; - _ = writer.write(": ") catch 0; - _ = writer.write(head.value) catch 0; - _ = writer.write("\r\n") catch 0; - } - - _ = writer.write("\r\n") catch 0; - - _ = try ctx.writeSocket(writer.getWritten(), SOCKET_FLAGS); - } - - const AsyncIO = @import("root").bun.AsyncIO; - pub fn writeSocket(ctx: *RequestContext, buf_: anytype, _: anytype) !usize { - var total: usize = 0; - var buf: []const u8 = buf_; - while (buf.len > 0) { - switch (Syscall.send(ctx.conn.handle, buf, SOCKET_FLAGS)) { - .err => |err| { - const erro = AsyncIO.asError(err.getErrno()); - if (erro == error.EBADF or erro == error.ECONNABORTED or erro == error.ECONNREFUSED) { - return error.SocketClosed; - } - const msg = err.toSystemError().message.toUTF8(bun.default_allocator); - defer msg.deinit(); - Output.prettyErrorln("send() error: {s}", .{msg.slice()}); - - return erro; - }, - .result => |written| { - if (written == 0) { - return error.SocketClosed; - } - - buf = buf[written..]; - total += written; - }, - } - } - - return total; - } - - pub fn writeBodyBuf(ctx: *RequestContext, body: []const u8) !void { - _ = try ctx.writeSocket(body, SOCKET_FLAGS); - } - - pub fn writeStatus(ctx: *RequestContext, comptime code: HTTPStatusCode) !void { - _ = try ctx.writeSocket(comptime printStatusLine(code), SOCKET_FLAGS); - ctx.status = code; - } - - pub fn writeStatusError(ctx: *RequestContext, err: anyerror) !void { - var status_line_error_buf: [1024]u8 = undefined; - _ = try ctx.writeSocket(printStatusLineError(err, &status_line_error_buf), SOCKET_FLAGS); - ctx.status = @as(HTTPStatusCode, 500); - } - - threadlocal var status_buf: [std.fmt.count("HTTP/1.1 {d} {s}\r\n", .{ 200, "OK" })]u8 = undefined; - pub fn writeStatusSlow(ctx: *RequestContext, code: u16) !void { - _ = try ctx.writeSocket( - try std.fmt.bufPrint( - &status_buf, - "HTTP/1.1 {d} {s}\r\n", - .{ code, if (code > 299) "HM" else "OK" }, - ), - SOCKET_FLAGS, - ); - - ctx.status = @as(HTTPStatusCode, @truncate(code)); - } - - pub fn init( - this: *RequestContext, - req: Request, - arena: ThreadlocalArena, - conn: std.net.Stream, - bundler_: *Bundler, - watcher_: *Watcher, - timer: std.time.Timer, - ) !void { - this.* = RequestContext{ - .request = req, - .arena = arena, - .bundler = bundler_, - .log = undefined, - .url = try URLPath.parse(req.path), - .conn = conn, - .allocator = arena.allocator(), - .method = Method.which(req.method) orelse return error.InvalidMethod, - .watcher = watcher_, - .timer = timer, - .origin = bundler_.options.origin, - }; - } - - // not all browsers send this - pub const BrowserNavigation = enum { - yes, - no, - maybe, - }; - - pub inline fn isBrowserNavigation(req: *RequestContext) BrowserNavigation { - if (req.header("Sec-Fetch-Mode")) |mode| { - return switch (strings.eqlComptime(mode, "navigate")) { - true => BrowserNavigation.yes, - false => BrowserNavigation.no, - }; - } - - return .maybe; - } - - pub fn sendNotFound(req: *RequestContext) !void { - std.debug.assert(!req.has_called_done); - - defer req.done(); - try req.writeStatus(404); - try req.flushHeaders(); - } - - pub fn sendInternalError(ctx: *RequestContext, err: anytype) !void { - defer ctx.done(); - try ctx.writeStatusError(err); - const printed = std.fmt.bufPrint(&error_buf, "error: {s}\nPlease see your terminal for more details", .{@errorName(err)}) catch |err2| brk: { - if (Environment.isDebug or Environment.isTest) { - Global.panic("error while printing error: {s}", .{@errorName(err2)}); - } - - break :brk "Internal error"; - }; - - try ctx.prepareToSendBody(printed.len, false); - try ctx.writeBodyBuf(printed); - } - - threadlocal var error_buf: [4096]u8 = undefined; - - pub fn sendNotModified(ctx: *RequestContext) !void { - defer ctx.done(); - try ctx.writeStatus(304); - try ctx.flushHeaders(); - } - - pub fn sendNoContent(ctx: *RequestContext) !void { - defer ctx.done(); - try ctx.writeStatus(204); - try ctx.flushHeaders(); - } - - pub fn appendHeader(ctx: *RequestContext, comptime key: string, value: string) void { - if (comptime Environment.allow_assert) std.debug.assert(!ctx.has_written_last_header); - if (comptime Environment.allow_assert) std.debug.assert(ctx.res_headers_count < res_headers_buf.len); - res_headers_buf[ctx.res_headers_count] = Header{ .name = key, .value = value }; - ctx.res_headers_count += 1; - } - const file_chunk_size = 16384; - const chunk_preamble_len: usize = brk: { - var buf: [64]u8 = undefined; - break :brk std.fmt.bufPrintIntToSlice(&buf, file_chunk_size, 16, true, .{}).len; - }; - - threadlocal var file_chunk_buf: [chunk_preamble_len + 2]u8 = undefined; - threadlocal var symlink_buffer: [bun.MAX_PATH_BYTES]u8 = undefined; - threadlocal var weak_etag_buffer: [100]u8 = undefined; - threadlocal var strong_etag_buffer: [100]u8 = undefined; - threadlocal var weak_etag_tmp_buffer: [100]u8 = undefined; - - pub fn done(ctx: *RequestContext) void { - std.debug.assert(!ctx.has_called_done); - std.os.closeSocket(ctx.conn.handle); - ctx.has_called_done = true; - } - - pub fn sendBadRequest(ctx: *RequestContext) !void { - try ctx.writeStatus(400); - ctx.done(); - } - - pub fn sendSinglePageHTML(ctx: *RequestContext) !void { - std.debug.assert(ctx.bundler.options.routes.single_page_app_fd > 0); - const file = std.fs.File{ .handle = ctx.bundler.options.routes.single_page_app_fd }; - return try sendHTMLFile(ctx, file); - } - - pub fn sendHTMLFile(ctx: *RequestContext, file: std.fs.File) !void { - ctx.appendHeader("Content-Type", MimeType.html.value); - ctx.appendHeader("Cache-Control", "no-store, no-cache, must-revalidate, max-age=0"); - - defer ctx.done(); - - const stats = file.stat() catch |err| { - Output.prettyErrorln("Error {s} reading index.html", .{@errorName(err)}); - ctx.writeStatus(500) catch {}; - return; - }; - - const content_length = stats.size; - try ctx.writeStatus(200); - try ctx.prepareToSendBody(content_length, false); - - var remain = content_length; - while (remain > 0) { - const wrote = try std.os.sendfile( - ctx.conn.handle, - ctx.bundler.options.routes.single_page_app_fd, - content_length - remain, - remain, - &[_]std.os.iovec_const{}, - &[_]std.os.iovec_const{}, - 0, - ); - if (wrote == 0) { - break; - } - remain -|= wrote; - } - } - - pub const WatchBuilder = struct { - watcher: *Watcher, - bundler: *Bundler, - allocator: std.mem.Allocator, - printer: JSPrinter.BufferPrinter, - timer: std.time.Timer, - count: usize = 0, - origin: ZigURL, - pub const WatchBuildResult = struct { - value: Value, - id: u32, - timestamp: u32, - log: logger.Log, - bytes: []const u8 = "", - approximate_newline_count: usize = 0, - pub const Value = union(Tag) { - success: Api.WebsocketMessageBuildSuccess, - fail: Api.WebsocketMessageBuildFailure, - }; - pub const Tag = enum { - success, - fail, - }; - }; - pub fn build(this: *WatchBuilder, id: u32, from_timestamp: u32, allocator: std.mem.Allocator) !WatchBuildResult { - defer this.count += 1; - this.printer.ctx.reset(); - var log = logger.Log.init(allocator); - - var watchlist_slice = this.watcher.watchlist.slice(); - - const index = std.mem.indexOfScalar(u32, watchlist_slice.items(.hash), id) orelse return error.MissingWatchID; - - const file_path_str = watchlist_slice.items(.file_path)[index]; - const fd = watchlist_slice.items(.fd)[index]; - const loader = watchlist_slice.items(.loader)[index]; - const macro_remappings = this.bundler.options.macro_remap; - const path = Fs.Path.init(file_path_str); - var old_log = this.bundler.log; - this.bundler.setLog(&log); - - defer { - this.bundler.setLog(old_log); - } - - switch (loader) { - .toml, .json, .ts, .tsx, .js, .jsx => { - // Since we already have: - // - The file descriptor - // - The path - // - The loader - // We can skip resolving. We will need special handling for renaming where basically we: - // - Update the watch item. - // - Clear directory cache - this.bundler.resetStore(); - - var parse_result = this.bundler.parse( - Bundler.ParseOptions{ - .allocator = allocator, - .path = path, - .loader = loader, - .dirname_fd = 0, - .file_descriptor = fd, - .file_hash = id, - .macro_remappings = macro_remappings, - // TODO: make this work correctly when multiple tsconfigs define different JSX pragmas - .jsx = this.bundler.options.jsx, - }, - null, - ) orelse { - return WatchBuildResult{ - .value = .{ - .fail = .{ - .id = id, - .from_timestamp = from_timestamp, - .loader = loader.toAPI(), - .module_path = this.bundler.fs.relativeTo(file_path_str), - .log = try log.toAPI(allocator), - }, - }, - .id = id, - .log = log, - .timestamp = WebsocketHandler.toTimestamp(Server.global_start_time.read()), - }; - }; - - this.printer.ctx.reset(); - { - var old_allocator = this.bundler.linker.allocator; - this.bundler.linker.allocator = allocator; - defer this.bundler.linker.allocator = old_allocator; - this.bundler.linker.link( - Fs.Path.init(file_path_str), - &parse_result, - this.origin, - .absolute_url, - false, - false, - ) catch return WatchBuildResult{ - .value = .{ - .fail = .{ - .id = id, - .from_timestamp = from_timestamp, - .loader = loader.toAPI(), - .module_path = this.bundler.fs.relativeTo(file_path_str), - .log = try log.toAPI(allocator), - }, - }, - - .id = id, - .timestamp = WebsocketHandler.toTimestamp(Server.global_start_time.read()), - .log = log, - }; - } - - var written = this.bundler.print(parse_result, @TypeOf(&this.printer), &this.printer, .esm) catch - return WatchBuildResult{ - .value = .{ - .fail = .{ - .id = id, - .from_timestamp = from_timestamp, - .loader = loader.toAPI(), - .module_path = this.bundler.fs.relativeTo(file_path_str), - .log = try log.toAPI(allocator), - }, - }, - .id = id, - .timestamp = WebsocketHandler.toTimestamp(Server.global_start_time.read()), - .log = log, - }; - - return WatchBuildResult{ - .value = .{ - .success = .{ - .id = id, - .from_timestamp = from_timestamp, - .loader = parse_result.loader.toAPI(), - .module_path = this.bundler.fs.relativeTo(file_path_str), - .blob_length = @as(u32, @truncate(written)), - }, - }, - .id = id, - .bytes = this.printer.ctx.written, - .approximate_newline_count = parse_result.ast.approximate_newline_count, - .timestamp = WebsocketHandler.toTimestamp(Server.global_start_time.read()), - .log = log, - }; - }, - .css => { - const CSSBundlerHMR = Css.NewBundler( - @TypeOf(&this.printer), - @TypeOf(&this.bundler.linker), - @TypeOf(&this.bundler.resolver.caches.fs), - Watcher, - @TypeOf(this.bundler.fs), - true, - .absolute_url, - ); - - const CSSBundler = Css.NewBundler( - @TypeOf(&this.printer), - @TypeOf(&this.bundler.linker), - @TypeOf(&this.bundler.resolver.caches.fs), - Watcher, - @TypeOf(this.bundler.fs), - false, - .absolute_url, - ); - - this.printer.ctx.reset(); - - const count = brk: { - if (this.bundler.options.hot_module_reloading) { - break :brk CSSBundlerHMR.bundle( - file_path_str, - this.bundler.fs, - &this.printer, - this.watcher, - &this.bundler.resolver.caches.fs, - this.watcher.watchlist.items(.hash)[index], - fd, - this.allocator, - &log, - &this.bundler.linker, - this.origin, - ); - } else { - break :brk CSSBundler.bundle( - file_path_str, - this.bundler.fs, - &this.printer, - this.watcher, - &this.bundler.resolver.caches.fs, - this.watcher.watchlist.items(.hash)[index], - fd, - this.allocator, - &log, - &this.bundler.linker, - this.origin, - ); - } - } catch { - return WatchBuildResult{ - .value = .{ - .fail = .{ - .id = id, - .from_timestamp = from_timestamp, - .loader = loader.toAPI(), - .module_path = this.bundler.fs.relativeTo(file_path_str), - .log = try log.toAPI(allocator), - }, - }, - .id = id, - .timestamp = WebsocketHandler.toTimestamp(Server.global_start_time.read()), - .log = log, - }; - }; - - return WatchBuildResult{ - .value = .{ - .success = .{ - .id = id, - .from_timestamp = from_timestamp, - .loader = .css, - .module_path = this.bundler.fs.relativeTo(file_path_str), - .blob_length = @as(u32, @truncate(count.written)), - // .log = std.mem.zeroes(Api.Log), - }, - }, - .id = id, - .bytes = this.printer.ctx.written, - .approximate_newline_count = count.approximate_newline_count, - // .approximate_newline_count = parse_result.ast.approximate_newline_count, - .timestamp = WebsocketHandler.toTimestamp(Server.global_start_time.read()), - .log = log, - }; - }, - else => { - return WatchBuildResult{ - .value = .{ .fail = std.mem.zeroes(Api.WebsocketMessageBuildFailure) }, - .id = id, - .timestamp = WebsocketHandler.toTimestamp(Server.global_start_time.read()), - .log = log, - }; - }, - } - } - }; - - pub const WebsocketHandler = struct { - accept_key: [28]u8 = undefined, - ctx: RequestContext, - websocket: Websocket.Websocket, - conn: std.net.Stream, - tombstone: bool = false, - builder: WatchBuilder, - message_buffer: MutableString, - bundler: Bundler, - task: ThreadPool.Task, - pub var open_websockets: std.ArrayList(*WebsocketHandler) = undefined; - var open_websockets_lock = sync.RwLock.init(); - pub fn addWebsocket(ctx: *RequestContext, server: *Server) !*WebsocketHandler { - open_websockets_lock.lock(); - defer open_websockets_lock.unlock(); - - var clone = try server.allocator.create(WebsocketHandler); - clone.ctx = ctx.*; - clone.conn = ctx.conn; - try ctx.bundler.clone(server.allocator, &clone.bundler); - ctx.bundler = &clone.bundler; - - clone.task = .{ .callback = &onTask }; - clone.message_buffer = try MutableString.init(server.allocator, 0); - clone.ctx.conn = clone.conn; - clone.ctx.log = logger.Log.init(server.allocator); - clone.ctx.origin = ZigURL.parse(server.allocator.dupe(u8, ctx.origin.href) catch unreachable); - - clone.builder = WatchBuilder{ - .allocator = server.allocator, - .bundler = ctx.bundler, - .printer = undefined, - .timer = ctx.timer, - .watcher = ctx.watcher, - .origin = clone.ctx.origin, - }; - - clone.websocket = Websocket.Websocket.create(clone.conn.handle, SOCKET_FLAGS); - clone.tombstone = false; - - ctx.allocator = undefined; - ctx.arena.deinit(); - - try open_websockets.append(clone); - return clone; - } - pub var to_close_buf: [100]*WebsocketHandler = undefined; - pub var to_close: []*WebsocketHandler = &[_]*WebsocketHandler{}; - - pub fn generateTimestamp(handler: *WebsocketHandler) u32 { - return @as(u32, @truncate(handler.ctx.timer.read() / std.time.ns_per_ms)); - } - - pub fn toTimestamp(timestamp: u64) u32 { - return @as(u32, @truncate(timestamp / std.time.ns_per_ms)); - } - - pub fn broadcast(message: []const u8) !void { - { - open_websockets_lock.lockShared(); - defer open_websockets_lock.unlockShared(); - var markForClosing = false; - for (open_websockets.items) |item| { - var socket: *WebsocketHandler = item; - if (socket.tombstone) { - continue; - } - - const written = socket.websocket.writeBinary(message) catch |err| brk: { - Output.prettyError("WebSocket error: {d}", .{@errorName(err)}); - markForClosing = true; - break :brk 0; - }; - - if (socket.tombstone or written < message.len) { - markForClosing = true; - } - - if (markForClosing) { - to_close_buf[to_close.len] = item; - to_close = to_close_buf[0 .. to_close.len + 1]; - } - } - } - - if (to_close.len > 0) { - open_websockets_lock.lock(); - defer open_websockets_lock.unlock(); - for (to_close) |item| { - WebsocketHandler.removeBulkWebsocket(item); - } - to_close = &[_]*WebsocketHandler{}; - } - } - - pub fn removeWebsocket(socket: *WebsocketHandler) void { - open_websockets_lock.lock(); - defer open_websockets_lock.unlock(); - removeBulkWebsocket(socket); - } - - pub fn removeBulkWebsocket(socket: *WebsocketHandler) void { - if (std.mem.indexOfScalar(*WebsocketHandler, open_websockets.items, socket)) |id| { - socket.tombstone = true; - _ = open_websockets.swapRemove(id); - } - } - - pub fn onSpawnThread(_: ?*anyopaque) ?*anyopaque { - Global.setThreadName("HMR"); - Output.Source.configureThread(); - js_ast.Stmt.Data.Store.create(default_allocator); - js_ast.Expr.Data.Store.create(default_allocator); - websocket_printer = JSPrinter.BufferWriter.init(default_allocator) catch unreachable; - - return null; - } - - pub fn onTask(self: *ThreadPool.Task) void { - handle(@fieldParentPtr(WebsocketHandler, "task", self)); - } - const CacheSet = @import("./cache.zig").Set; - threadlocal var websocket_printer: JSPrinter.BufferWriter = undefined; - pub fn handle(self: *WebsocketHandler) void { - var req_body = self.ctx.req_body_node; - defer { - js_ast.Stmt.Data.Store.reset(); - js_ast.Expr.Data.Store.reset(); - Server.current.releaseRequestDataPoolNode(req_body); - } - - self.builder.printer = JSPrinter.BufferPrinter.init( - websocket_printer, - ); - - self.ctx.arena = ThreadlocalArena.init() catch unreachable; - self.ctx.allocator = self.ctx.arena.allocator(); - self.builder.bundler.resolver.caches = CacheSet.init(self.ctx.allocator); - self.builder.bundler.resolver.caches.fs.stream = true; - - _handle(self, &self.ctx) catch {}; - } - - fn _handle(handler: *WebsocketHandler, ctx: *RequestContext) !void { - var is_socket_closed = false; - const fd = ctx.conn.handle; - defer { - websocket_printer = handler.builder.printer.ctx; - handler.tombstone = true; - removeWebsocket(handler); - - ctx.arena.deinit(); - if (!is_socket_closed) { - _ = Syscall.close(fd); - } - bun.default_allocator.destroy(handler); - Output.flush(); - } - - handler.checkUpgradeHeaders() catch |err| { - switch (err) { - error.BadRequest => { - defer is_socket_closed = true; - - try ctx.sendBadRequest(); - }, - } - }; - - // switch (try handler.getWebsocketVersion()) { - // 7, 8, 13 => {}, - // else => { - // // Unsupported version - // // Set header to indicate to the client which versions are supported - // ctx.appendHeader("Sec-WebSocket-Version", "7,8,13"); - // try ctx.writeStatus(426); - // try ctx.flushHeaders(); - // ctx.done(); - // is_socket_closed = true; - // return; - // }, - // } - - const key = try handler.getWebsocketAcceptKey(); - - ctx.appendHeader("Connection", "Upgrade"); - ctx.appendHeader("Upgrade", "websocket"); - ctx.appendHeader("Sec-WebSocket-Accept", key); - ctx.appendHeader("Sec-WebSocket-Protocol", "bun-hmr"); - ctx.writeStatus(101) catch |err| { - if (err == error.SocketClosed) { - is_socket_closed = true; - } - - return; - }; - ctx.flushHeaders() catch |err| { - if (err == error.SocketClosed) { - is_socket_closed = true; - } - - return; - }; - // Output.prettyErrorln("101 Hot Module Reloading connected.", .{}); - // Output.flush(); - Analytics.Features.hot_module_reloading = true; - var build_file_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - - var cmd: Api.WebsocketCommand = undefined; - var msg: Api.WebsocketMessage = .{ - .timestamp = handler.generateTimestamp(), - .kind = .welcome, - }; - var cmd_reader: ApiReader = undefined; - { - var byte_buf: [32 + bun.MAX_PATH_BYTES]u8 = undefined; - var fbs = std.io.fixedBufferStream(&byte_buf); - var writer = ByteApiWriter.init(&fbs); - - try msg.encode(&writer); - var reloader = Api.Reloader.disable; - if (ctx.bundler.options.hot_module_reloading) { - reloader = Api.Reloader.live; - if (ctx.bundler.options.jsx.supports_fast_refresh and ctx.bundler.env.get("BUN_FORCE_HMR") != null) { - reloader = Api.Reloader.fast_refresh; - } - } - - const welcome_message = Api.WebsocketMessageWelcome{ - .asset_prefix = handler.ctx.bundler.options.routes.asset_prefix_path, - .epoch = WebsocketHandler.toTimestamp( - @as(u64, @intCast((handler.ctx.timer.started.timestamp.tv_sec * std.time.ns_per_s))) + @as(u64, @intCast(handler.ctx.timer.started.timestamp.tv_nsec)), - ), - .javascript_reloader = reloader, - .cwd = handler.ctx.bundler.fs.top_level_dir, - }; - try welcome_message.encode(&writer); - if ((try handler.websocket.writeBinary(fbs.getWritten())) == 0) { - handler.tombstone = true; - is_socket_closed = true; - Output.prettyErrorln("ERR: Websocket failed to write.", .{}); - } - } - - while (!handler.tombstone) { - Output.flush(); - - defer Output.flush(); - std.os.getsockoptError(handler.conn.handle) catch |err| { - handler.tombstone = true; - Output.prettyErrorln("Websocket ERR: {s}", .{@errorName(err)}); - is_socket_closed = true; - }; - - var frame = handler.websocket.read() catch |err| { - switch (err) { - error.ConnectionClosed => { - // Output.prettyErrorln("Websocket closed.", .{}); - handler.tombstone = true; - is_socket_closed = true; - continue; - }, - else => { - Output.prettyErrorln("Websocket ERR: {s}", .{@errorName(err)}); - }, - } - return; - }; - switch (frame.header.opcode) { - .Close => { - // Output.prettyErrorln("Websocket closed.", .{}); - is_socket_closed = true; - return; - }, - .Text => { - _ = try handler.websocket.writeText(frame.data); - }, - .Binary => { - var cnst_frame = constStrToU8(frame.data); - cmd_reader = ApiReader.init(cnst_frame, ctx.allocator); - cmd = try Api.WebsocketCommand.decode(&cmd_reader); - switch (cmd.kind) { - .build, .build_with_file_path => { - const request_id = if (cmd.kind == .build) - (try Api.WebsocketCommandBuild.decode(&cmd_reader)).id - else brk: { - const full_build = try Api.WebsocketCommandBuildWithFilePath.decode(&cmd_reader); - if (ctx.watcher.indexOf(full_build.id) != null) break :brk full_build.id; - const file_path = if (std.fs.path.isAbsolute(full_build.file_path)) - full_build.file_path - else - ctx.bundler.fs.absBuf( - &[_]string{ ctx.bundler.fs.top_level_dir, full_build.file_path }, - &build_file_path_buf, - ); - - if (Watcher.getHash(file_path) != full_build.id) { - Output.prettyErrorln("ERR: File path hash mismatch for {s}.", .{full_build.file_path}); - continue; - } - // save because WebSocket's buffer is 8096 - // max file path is 4096 - var path_buf = bun.constStrToU8(file_path); - path_buf.ptr[path_buf.len] = 0; - var file_path_z: [:0]u8 = path_buf.ptr[0..path_buf.len :0]; - const file = std.fs.openFileAbsoluteZ(file_path_z, .{ .mode = .read_only }) catch |err| { - Output.prettyErrorln("ERR:{s} opening file {s} ", .{ @errorName(err), full_build.file_path }); - continue; - }; - Fs.FileSystem.setMaxFd(file.handle); - try ctx.watcher.appendFile( - file.handle, - file_path, - full_build.id, - ctx.bundler.options.loader(Fs.PathName.init(file_path).ext), - 0, - null, - true, - ); - break :brk full_build.id; - }; - - var arena = ThreadlocalArena.init() catch unreachable; - defer arena.deinit(); - - var head = Websocket.WebsocketHeader{ - .final = true, - .opcode = .Binary, - .mask = false, - .len = 0, - }; - - // theres an issue where on the 4th or 5th build - // sometimes the final byte has incorrect data - // we never end up using all those bytes - if (handler.message_buffer.list.items.len > 0) { - @memset(handler.message_buffer.list.items[0..@min(handler.message_buffer.list.items.len, 128)], 0); - } - const build_result = handler.builder.build(request_id, cmd.timestamp, arena.allocator()) catch |err| { - if (err == error.MissingWatchID) { - msg.timestamp = cmd.timestamp; - msg.kind = Api.WebsocketMessageKind.resolve_file; - - handler.message_buffer.reset(); - var buffer_writer = MutableStringAPIWriter.init(&handler.message_buffer); - try msg.encode(&buffer_writer); - const resolve_id = Api.WebsocketMessageResolveId{ .id = request_id }; - try resolve_id.encode(&buffer_writer); - head.len = Websocket.WebsocketHeader.packLength(handler.message_buffer.list.items.len); - var writer = buffer_writer.writable.writer(); - const body_len = handler.message_buffer.list.items.len; - try head.writeHeader(&writer, body_len); - var buffers = handler.message_buffer.toSocketBuffers(2, .{ - .{ body_len, handler.message_buffer.list.items.len }, - .{ 0, body_len }, - }); - _ = try handler.conn.writevAll(&buffers); - continue; - } - - return err; - }; - - const file_path = switch (build_result.value) { - .fail => |fail| fail.module_path, - .success => |fail| fail.module_path, - }; - - switch (build_result.value) { - .fail => { - Output.prettyErrorln( - "error: {s}", - .{ - file_path, - }, - ); - }, - .success => { - if (build_result.timestamp > cmd.timestamp) { - Output.prettyErrorln( - "{d}ms built {s} ({d}+ LOC)", - .{ - build_result.timestamp - cmd.timestamp, - file_path, - build_result.approximate_newline_count, - }, - ); - } - }, - } - - { - defer Output.flush(); - msg.timestamp = build_result.timestamp; - msg.kind = switch (build_result.value) { - .success => .build_success, - else => .build_fail, - }; - handler.message_buffer.reset(); - var buffer_writer = MutableStringAPIWriter.init(&handler.message_buffer); - try msg.encode(&buffer_writer); - var socket_buffers = std.mem.zeroes([4]std.os.iovec_const); - - var socket_buffer_count: usize = 2; - - switch (build_result.value) { - .success => |success| { - try success.encode(&buffer_writer); - const total = handler.message_buffer.list.items.len + build_result.bytes.len + (if (build_result.bytes.len > 0) @as(usize, @sizeOf(u32)) else @as(usize, 0)); - const first_message_len = handler.message_buffer.list.items.len; - head.len = Websocket.WebsocketHeader.packLength(total); - try head.writeHeader(&handler.message_buffer.writer(), total); - socket_buffers[0] = iovec(handler.message_buffer.list.items[first_message_len..]); - socket_buffers[1] = iovec(handler.message_buffer.list.items[0..first_message_len]); - - if (build_result.bytes.len > 0) { - socket_buffers[2] = iovec(build_result.bytes); - // we reuse the accept key buffer - // so we have a pointer that is not stack memory - handler.accept_key[0..@sizeOf(usize)].* = @as([@sizeOf(usize)]u8, @bitCast(bun.hash(build_result.bytes))); - socket_buffers[3] = iovec(handler.accept_key[0..4]); - socket_buffer_count = 4; - } - }, - .fail => |fail| { - try fail.encode(&buffer_writer); - head.len = Websocket.WebsocketHeader.packLength(handler.message_buffer.list.items.len); - const first_message_len = handler.message_buffer.list.items.len; - try head.writeHeader(&handler.message_buffer.writer(), handler.message_buffer.list.items.len); - socket_buffers[0] = iovec(handler.message_buffer.list.items[first_message_len..]); - socket_buffers[1] = iovec(handler.message_buffer.list.items[0..first_message_len]); - }, - } - - _ = try handler.conn.writevAll( - socket_buffers[0..socket_buffer_count], - ); - } - }, - else => { - Output.prettyErrorln( - "[Websocket]: Unknown cmd: {d}. This might be a version mismatch. Try updating your node_modules.bun", - .{@intFromEnum(cmd.kind)}, - ); - }, - } - }, - .Ping => { - var pong = frame; - pong.header.opcode = .Pong; - _ = try handler.websocket.writeDataFrame(pong); - }, - else => { - Output.prettyErrorln("Websocket unknown opcode: {s}", .{@tagName(frame.header.opcode)}); - }, - } - } - } - - fn checkUpgradeHeaders( - self: *WebsocketHandler, - ) !void { - var request: *RequestContext = &self.ctx; - const upgrade_header = request.header("Upgrade") orelse return error.BadRequest; - - if (!strings.eqlComptime(upgrade_header, "websocket")) { - return error.BadRequest; // Can only upgrade to websocket - } - - // Some proxies/load balancers will mess with the connection header - // and browsers also send multiple values here - const connection_header = request.header("Connection") orelse return error.BadRequest; - var it = std.mem.split(u8, connection_header, ","); - while (it.next()) |part| { - const conn = std.mem.trim(u8, part, " "); - if (strings.eqlCaseInsensitiveASCII(conn, "upgrade", true)) { - return; - } - } - return error.BadRequest; // Connection must be upgrade - } - - fn getWebsocketVersion( - self: *WebsocketHandler, - ) !void { - var request: *RequestContext = &self.ctx; - _ = request.header("Sec-WebSocket-Version") orelse { - Output.prettyErrorln("HMR WebSocket error: missing Sec-WebSocket-Version header", .{}); - return error.BadRequest; - }; - // this error is noisy - // return std.fmt.parseInt(u8, v, 10) catch { - // Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Version is invalid {s}", .{v}); - // return error.BadRequest; - // }; - } - - fn getWebsocketAcceptKey( - self: *WebsocketHandler, - ) ![]const u8 { - var request: *RequestContext = &self.ctx; - const key = (request.header("Sec-WebSocket-Key") orelse return error.BadRequest); - if (key.len < 8) { - Output.prettyErrorln("HMR WebSocket error: Sec-WebSocket-Key is less than 8 characters long: {s}", .{key}); - return error.BadRequest; - } - - var hash = std.crypto.hash.Sha1.init(.{}); - var out: [20]u8 = undefined; - hash.update(key); - hash.update("258EAFA5-E914-47DA-95CA-C5AB0DC85B11"); - hash.final(&out); - - // Encode it - return std.base64.standard.Encoder.encode(&self.accept_key, &out); - } - }; - - pub fn writeETag(this: *RequestContext, buffer: anytype) !bool { - const strong_etag = bun.hash(buffer); - const etag_content_slice = std.fmt.bufPrintIntToSlice(strong_etag_buffer[0..49], strong_etag, 16, .upper, .{}); - - this.appendHeader("ETag", etag_content_slice); - - if (this.header("If-None-Match")) |etag_header| { - if (strings.eqlLong(etag_content_slice, etag_header, true)) { - try this.sendNotModified(); - return true; - } - } - - return false; - } - - pub fn handleWebsocket(ctx: *RequestContext, server: *Server) anyerror!void { - ctx.controlled = true; - var handler = try WebsocketHandler.addWebsocket(ctx, server); - server.websocket_threadpool.schedule(ThreadPool.Batch.from(&handler.task)); - } - - threadlocal var client_entry_point: bundler.ClientEntryPoint = undefined; - threadlocal var fallback_entry_point: bundler.FallbackEntryPoint = undefined; - threadlocal var fallback_entry_point_created: bool = false; - - pub fn renderServeResult(ctx: *RequestContext, result: bundler.ServeResult) !void { - if (ctx.keep_alive) { - ctx.appendHeader("Connection", "keep-alive"); - } - - if (result.file.value == .noop) { - return try ctx.sendNotFound(); - } - - ctx.mime_type = result.mime_type; - - const accept: MimeType = brk: { - if (ctx.header("Accept")) |accept| - break :brk MimeType.init(accept, null, null); - - break :brk ctx.mime_type; - }; - - ctx.to_plain_text = accept.category == .text and strings.eqlComptime(accept.value, "text/plain"); - - if (!ctx.to_plain_text) { - if (!ctx.url.is_source_map) { - ctx.appendHeader("Content-Type", ctx.mime_type.value); - } else { - ctx.appendHeader("Content-Type", MimeType.json.value); - } - } else { - ctx.appendHeader("Content-Type", "text/plain"); - } - - const send_body = ctx.method.hasBody(); - - switch (result.file.value) { - .saved => {}, - - .pending => |resolve_result| { - const path = resolve_result.pathConst() orelse { - try ctx.sendNoContent(); - return; - }; - - const hash = Watcher.getHash(result.file.src_path.text); - const input_fd = if (ctx.watcher.indexOf(hash)) |ind| - if (ind > 0) ctx.watcher.watchlist.items(.fd)[ind] else null - else - null; - - if (resolve_result.is_external) { - try ctx.sendBadRequest(); - return; - } - - const SocketPrinterInternal = struct { - const SocketPrinterInternal = @This(); - rctx: *RequestContext, - _loader: Options.Loader, - buffer: MutableString = undefined, - threadlocal var buffer: ?*MutableString = null; - - pub fn reserveNext(this: *SocketPrinterInternal, count: u32) anyerror![*]u8 { - try this.buffer.growIfNeeded(count); - return @as([*]u8, @ptrCast(&this.buffer.list.items.ptr[this.buffer.list.items.len])); - } - - pub fn advanceBy(this: *SocketPrinterInternal, count: u32) void { - if (comptime Environment.isDebug) std.debug.assert(this.buffer.list.items.len + count <= this.buffer.list.capacity); - - this.buffer.list.items = this.buffer.list.items.ptr[0 .. this.buffer.list.items.len + count]; - } - - pub fn init(rctx: *RequestContext, _loader: Options.Loader) SocketPrinterInternal { - if (buffer == null) { - buffer = default_allocator.create(MutableString) catch unreachable; - buffer.?.* = MutableString.init2048(default_allocator) catch unreachable; - } - - buffer.?.reset(); - - return SocketPrinterInternal{ - .rctx = rctx, - ._loader = _loader, - .buffer = buffer.?.*, - }; - } - pub fn writeByte(this: *SocketPrinterInternal, byte: u8) anyerror!usize { - try this.buffer.appendChar(byte); - return 1; - } - pub fn writeAll(this: *SocketPrinterInternal, bytes: anytype) anyerror!usize { - try this.buffer.append(bytes); - return bytes.len; - } - - pub fn slice(this: *SocketPrinterInternal) string { - return this.buffer.list.items; - } - - pub fn getLastByte(this: *const SocketPrinterInternal) u8 { - return if (this.buffer.list.items.len > 0) this.buffer.list.items[this.buffer.list.items.len - 1] else 0; - } - - pub fn getLastLastByte(this: *const SocketPrinterInternal) u8 { - return if (this.buffer.list.items.len > 1) this.buffer.list.items[this.buffer.list.items.len - 2] else 0; - } - - pub fn getWritten(this: *const SocketPrinterInternal) []u8 { - return this.buffer.list.items; - } - - const SourceMapHandler = JSPrinter.SourceMapHandler.For(SocketPrinterInternal, onSourceMapChunk); - pub fn onSourceMapChunk(this: *SocketPrinterInternal, chunk: SourceMap.Chunk, source: logger.Source) anyerror!void { - if (this.rctx.has_called_done) return; - var mutable = try chunk.printSourceMapContents( - source, - MutableString.initEmpty(this.rctx.allocator), - this.rctx.header("Mappings-Only") == null, - false, - ); - - const buf = mutable.toOwnedSliceLeaky(); - if (buf.len == 0) { - try this.rctx.sendNoContent(); - return; - } - - defer this.rctx.done(); - try this.rctx.writeStatus(200); - try this.rctx.prepareToSendBody(buf.len, false); - try this.rctx.writeBodyBuf(buf); - } - pub fn sourceMapHandler(this: *SocketPrinterInternal) JSPrinter.SourceMapHandler { - return SourceMapHandler.init(this); - } - - pub fn done( - chunky: *SocketPrinterInternal, - ) anyerror!void { - SocketPrinterInternal.buffer.?.* = chunky.buffer; - if (chunky.rctx.has_called_done) return; - const buf = chunky.buffer.toOwnedSliceLeaky(); - defer { - chunky.buffer.reset(); - SocketPrinterInternal.buffer.?.* = chunky.buffer; - } - - if (chunky.rctx.header("Open-In-Editor") != null) { - if (http_editor_context.editor == null) { - http_editor_context.detectEditor(chunky.rctx.bundler.env); - } - - if (http_editor_context.editor.? != .none) { - http_editor_context.openInEditor( - http_editor_context.editor.?, - buf, - std.fs.path.basename(chunky.rctx.url.path), - chunky.rctx.bundler.fs.tmpdir(), - chunky.rctx.header("Editor-Line") orelse "", - chunky.rctx.header("Editor-Column") orelse "", - ); - - if (http_editor_context.editor.? != .none) { - try chunky.rctx.sendNoContent(); - return; - } - } - } - - if (buf.len == 0) { - try chunky.rctx.sendNoContent(); - return; - } - - var source_map_url: string = ""; - const send_sourcemap_info = chunky._loader.isJavaScriptLike(); - - if (send_sourcemap_info) { - // This will be cleared by the arena - source_map_url = bun.asByteSlice(chunky.rctx.getFullURLForSourceMap()); - - chunky.rctx.appendHeader("SourceMap", source_map_url); - } - - // Failed experiment: inject "Link" tags for each import path - // Browsers ignore this header when it's coming from a script import. - // In Chrome, the header appears in the Network tab but doesn't seem to do anything - // In Firefox,the header does not appear in the Network tab. - // Safari was not tested - - if (FeatureFlags.strong_etags_for_built_files) { - // Always cache css & json files, even big ones - // css is especially important because we want to try and skip having the browser parse it whenever we can - if (buf.len < 16 * 16 * 16 * 16 or chunky._loader == .css or chunky._loader == .json) { - const strong_etag = bun.hash(buf); - const etag_content_slice = std.fmt.bufPrintIntToSlice(strong_etag_buffer[0..49], strong_etag, 16, .upper, .{}); - chunky.rctx.appendHeader("ETag", etag_content_slice); - - if (chunky.rctx.header("If-None-Match")) |etag_header| { - if (strings.eqlLong(etag_content_slice, etag_header, true)) { - try chunky.rctx.sendNotModified(); - return; - } - } - } - } - - defer chunky.rctx.done(); - try chunky.rctx.writeStatus(200); - const source_map_url_len: usize = if (send_sourcemap_info) - "\n//# sourceMappingURL=".len + source_map_url.len + "\n".len - else - 0; - try chunky.rctx.prepareToSendBody(buf.len + source_map_url_len, false); - - try chunky.rctx.writeBodyBuf(buf); - - if (send_sourcemap_info) { - // TODO: use an io vec - try chunky.rctx.writeBodyBuf("\n//# sourceMappingURL="); - try chunky.rctx.writeBodyBuf(source_map_url); - try chunky.rctx.writeBodyBuf("\n"); - } - } - - pub fn flush( - _: *SocketPrinterInternal, - ) anyerror!void {} - }; - - const SocketPrinter = JSPrinter.NewWriter( - SocketPrinterInternal, - SocketPrinterInternal.writeByte, - SocketPrinterInternal.writeAll, - SocketPrinterInternal.getLastByte, - SocketPrinterInternal.getLastLastByte, - SocketPrinterInternal.reserveNext, - SocketPrinterInternal.advanceBy, - ); - const loader = ctx.bundler.options.loaders.get(result.file.src_path.name.ext) orelse .file; - - var socket_printer = SocketPrinter.init( - SocketPrinterInternal.init(ctx, loader), - ); - - // It will call flush for us automatically - ctx.bundler.resetStore(); - - var client_entry_point_: ?*bundler.ClientEntryPoint = null; - if (resolve_result.import_kind == .entry_point and loader.supportsClientEntryPoint()) { - if (ctx.bundler.options.framework) |*framework| { - if (framework.client.isEnabled()) { - client_entry_point = bundler.ClientEntryPoint{}; - - try client_entry_point.generate(Bundler, ctx.bundler, path.name, framework.client.path); - client_entry_point_ = &client_entry_point; - } - } - } - - const written = (if (!ctx.url.is_source_map) - ctx.bundler.buildWithResolveResult( - resolve_result, - ctx.allocator, - loader, - SocketPrinter, - socket_printer, - .absolute_url, - input_fd, - hash, - Watcher, - ctx.watcher, - client_entry_point_, - ctx.origin, - false, - null, - ) - else - ctx.bundler.buildWithResolveResult( - resolve_result, - ctx.allocator, - loader, - SocketPrinter, - socket_printer, - .absolute_url, - input_fd, - hash, - Watcher, - ctx.watcher, - client_entry_point_, - ctx.origin, - true, - socket_printer.ctx.sourceMapHandler(), - )) catch |err| { - ctx.sendInternalError(err) catch {}; - return; - }; - - // CSS handles this specially - if (loader != .css and client_entry_point_ == null) { - if (written.input_fd) |written_fd| { - try ctx.watcher.addFile( - written_fd, - result.file.src_path.text, - hash, - loader, - resolve_result.dirname_fd, - resolve_result.package_json, - true, - ); - - if (ctx.watcher.watchloop_handle == null) { - ctx.watcher.start() catch {}; - } - } - } else { - if (written.written > 0) { - if (ctx.watcher.watchloop_handle == null) { - try ctx.watcher.start(); - } - } - } - - if (written.empty) { - switch (loader) { - .css => try ctx.sendNoContent(), - .toml, .js, .jsx, .ts, .tsx, .json => { - const buf = "export default {};"; - const strong_etag = comptime bun.hash(buf); - const etag_content_slice = std.fmt.bufPrintIntToSlice(strong_etag_buffer[0..49], strong_etag, 16, .upper, .{}); - ctx.appendHeader("ETag", etag_content_slice); - - if (ctx.header("If-None-Match")) |etag_header| { - if (strings.eqlLong(etag_content_slice, etag_header, true)) { - try ctx.sendNotModified(); - return; - } - } - defer ctx.done(); - try ctx.writeStatus(200); - try ctx.prepareToSendBody(buf.len, false); - try ctx.writeBodyBuf(buf); - }, - else => unreachable, - } - } - }, - .noop => { - try ctx.sendNotFound(); - }, - .copy, .move => |file| { - // defer std.os.close(file.fd); - defer { - // for public dir content, we close on completion - if (file.close_handle_on_complete) { - std.debug.assert(!file.autowatch); - std.os.close(file.fd); - } - - if (file.autowatch) { - // we must never autowatch a file that will be closed - std.debug.assert(!file.close_handle_on_complete); - - if (ctx.watcher.addFile( - file.fd, - result.file.src_path.text, - Watcher.getHash(result.file.src_path.text), - result.file.loader, - file.dir, - null, - true, - )) { - if (ctx.watcher.watchloop_handle == null) { - ctx.watcher.start() catch |err| { - Output.prettyErrorln("Failed to start watcher: {s}", .{@errorName(err)}); - }; - } - } else |_| {} - } - } - - // if (result.mime_type.category != .html) { - // hash(absolute_file_path, size, mtime) - var weak_etag = std.hash.Wyhash.init(0); - weak_etag_buffer[0] = 'W'; - weak_etag_buffer[1] = '/'; - weak_etag.update(result.file.src_path.text); - std.mem.writeInt(u64, weak_etag_tmp_buffer[0..8], result.file.size, .little); - weak_etag.update(weak_etag_tmp_buffer[0..8]); - - if (result.file.mtime) |mtime| { - std.mem.writeInt(i128, weak_etag_tmp_buffer[0..16], mtime, .little); - weak_etag.update(weak_etag_tmp_buffer[0..16]); - } - - const etag_content_slice = std.fmt.bufPrintIntToSlice(weak_etag_buffer[2..], weak_etag.final(), 16, .upper, .{}); - const complete_weak_etag = weak_etag_buffer[0 .. etag_content_slice.len + 2]; - - ctx.appendHeader("ETag", complete_weak_etag); - - if (ctx.header("If-None-Match")) |etag_header| { - if (strings.eqlLong(complete_weak_etag, etag_header, true)) { - try ctx.sendNotModified(); - return; - } - } - // } else { - // ctx.appendHeader("Cache-Control", "no-cache"); - // } - - switch (result.file.size) { - 0 => { - try ctx.sendNoContent(); - return; - }, - else => { - defer ctx.done(); - - try ctx.writeStatus(200); - try ctx.prepareToSendBody(result.file.size, false); - if (!send_body) return; - - _ = try std.os.sendfile( - ctx.conn.handle, - file.fd, - 0, - result.file.size, - &[_]std.os.iovec_const{}, - &[_]std.os.iovec_const{}, - 0, - ); - }, - } - }, - .buffer => |buffer| { - - // The version query string is only included for: - // - The runtime - // - node_modules - // For the runtime, it's a hash of the file contents - // For node modules, it's just the package version from the package.json - // It's safe to assume node_modules are immutable. In practice, they aren't. - // However, a lot of other stuff breaks when node_modules change so it's fine - if (strings.contains(ctx.url.query_string, "v=")) { - ctx.appendHeader("Cache-Control", "public, immutable, max-age=31556952"); - } - - if (FeatureFlags.strong_etags_for_built_files) { - const did_send = ctx.writeETag(buffer.bytes) catch false; - if (did_send) return; - } - - if (buffer.bytes.len == 0) { - return try ctx.sendNoContent(); - } - - defer ctx.done(); - try ctx.writeStatus(200); - try ctx.prepareToSendBody(buffer.bytes.len, false); - if (!send_body) return; - _ = try ctx.writeSocket(buffer.bytes, SOCKET_FLAGS); - }, - } - } - - fn handleBunURL(ctx: *RequestContext, server: *Server) !void { - const path = ctx.url.path["bun:".len..]; - - if (strings.eqlComptime(path, "_api.hmr")) { - if (ctx.header("Upgrade")) |upgrade| { - if (strings.eqlCaseInsensitiveASCII(upgrade, "websocket", true)) { - try ctx.handleWebsocket(server); - return; - } - } - } - - if (strings.eqlComptime(path, "error.js")) { - const buffer = ErrorJS.sourceContent(); - ctx.appendHeader("Content-Type", MimeType.javascript.value); - ctx.appendHeader("Cache-Control", "public, max-age=3600"); - ctx.appendHeader("Age", "0"); - - if (FeatureFlags.strong_etags_for_built_files) { - const did_send = ctx.writeETag(buffer) catch false; - if (did_send) return; - } - - if (buffer.len == 0) { - return try ctx.sendNoContent(); - } - const send_body = ctx.method.hasBody(); - defer ctx.done(); - try ctx.writeStatus(200); - try ctx.prepareToSendBody(buffer.len, false); - if (!send_body) return; - _ = try ctx.writeSocket(buffer, SOCKET_FLAGS); - return; - } - - if (strings.eqlComptime(path, "erro.css")) { - const buffer = ErrorCSS.sourceContent(); - ctx.appendHeader("Content-Type", MimeType.css.value); - ctx.appendHeader("Cache-Control", "public, max-age=3600"); - ctx.appendHeader("Age", "0"); - - if (FeatureFlags.strong_etags_for_built_files) { - const did_send = ctx.writeETag(buffer) catch false; - if (did_send) return; - } - - if (buffer.len == 0) { - return try ctx.sendNoContent(); - } - const send_body = ctx.method.hasBody(); - defer ctx.done(); - try ctx.writeStatus(200); - try ctx.prepareToSendBody(buffer.len, false); - if (!send_body) return; - _ = try ctx.writeSocket(buffer, SOCKET_FLAGS); - return; - } - - if (strings.eqlComptime(path, "fallback")) { - const resolved = try ctx.bundler.resolver.resolve(ctx.bundler.fs.top_level_dir, ctx.bundler.options.framework.?.fallback.path, .stmt); - const resolved_path = resolved.pathConst() orelse return try ctx.sendNotFound(); - const mime_type_ext = ctx.bundler.options.out_extensions.get(resolved_path.name.ext) orelse resolved_path.name.ext; - const loader = ctx.bundler.options.loader(resolved_path.name.ext); - try ctx.renderServeResult(bundler.ServeResult{ - .file = Options.OutputFile.initPending(loader, resolved), - .mime_type = MimeType.byLoader( - loader, - mime_type_ext[1..], - ), - }); - return; - } - - if (strings.eqlComptime(path, "wrap")) { - const buffer = Runtime.sourceContent(ctx.bundler.options.jsx.use_embedded_refresh_runtime); - ctx.appendHeader("Content-Type", MimeType.javascript.value); - ctx.appendHeader("Cache-Control", "public, max-age=3600"); - ctx.appendHeader("Age", "0"); - if (FeatureFlags.strong_etags_for_built_files) { - const did_send = ctx.writeETag(buffer) catch false; - if (did_send) return; - } - - if (buffer.len == 0) { - return try ctx.sendNoContent(); - } - const send_body = ctx.method.hasBody(); - defer ctx.done(); - try ctx.writeStatus(200); - try ctx.prepareToSendBody(buffer.len, false); - if (!send_body) return; - _ = try ctx.writeSocket(buffer, SOCKET_FLAGS); - return; - } - - if (strings.eqlComptime(path, "info")) { - return try ctx.sendBunInfoJSON(); - } - - if (strings.eqlComptime(path, "reactfsh-v0.11.0")) { - const buffer = @embedFile("react-refresh.js"); - ctx.appendHeader("Content-Type", MimeType.javascript.value); - ctx.appendHeader("Cache-Control", "public, max-age=3600"); - ctx.appendHeader("Age", "0"); - if (FeatureFlags.strong_etags_for_built_files) { - const did_send = ctx.writeETag(buffer) catch false; - if (did_send) return; - } - - if (buffer.len == 0) { - return try ctx.sendNoContent(); - } - const send_body = ctx.method.hasBody(); - defer ctx.done(); - try ctx.writeStatus(200); - try ctx.prepareToSendBody(buffer.len, false); - if (!send_body) return; - _ = try ctx.writeSocket(buffer, SOCKET_FLAGS); - return; - } - - try ctx.sendNotFound(); - return; - } - - fn sendBunInfoJSON(ctx: *RequestContext) anyerror!void { - defer ctx.bundler.resetStore(); - - var buffer_writer = try JSPrinter.BufferWriter.init(default_allocator); - - var writer = JSPrinter.BufferPrinter.init(buffer_writer); - defer writer.ctx.buffer.deinit(); - var source = logger.Source.initEmptyFile("info.json"); - _ = try JSPrinter.printJSON( - *JSPrinter.BufferPrinter, - &writer, - try Global.BunInfo.generate(*Bundler, ctx.bundler, ctx.allocator), - &source, - ); - const buffer = writer.ctx.written; - - ctx.appendHeader("Content-Type", MimeType.json.value); - ctx.appendHeader("Cache-Control", "public, max-age=3600"); - ctx.appendHeader("Age", "0"); - if (FeatureFlags.strong_etags_for_built_files) { - const did_send = ctx.writeETag(buffer) catch false; - if (did_send) return; - } - - if (buffer.len == 0) { - return try ctx.sendNoContent(); - } - const send_body = ctx.method.hasBody(); - defer ctx.done(); - try ctx.writeStatus(200); - try ctx.prepareToSendBody(buffer.len, false); - if (!send_body) return; - _ = try ctx.writeSocket(buffer, SOCKET_FLAGS); - } - - // https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Sec-Fetch-Dest - pub fn isScriptOrStyleRequest(ctx: *RequestContext) bool { - const header_ = ctx.header("Sec-Fetch-Dest") orelse return false; - return strings.eqlComptime(header_, "script") or - strings.eqlComptime(header_, "style"); - } - - fn handleSrcURL(ctx: *RequestContext, _: *Server) !void { - var input_path = ctx.url.path["src:".len..]; - var line: string = ""; - var column: string = ""; - if (std.mem.indexOfScalar(u8, input_path, ':')) |i| { - line = input_path[i + 1 ..]; - input_path = input_path[0..i]; - - if (line.len > 0) { - if (std.mem.indexOfScalar(u8, line, ':')) |j| { - column = line[j + 1 ..]; - line = line[0..j]; - } - } - } - - if (ctx.bundler.options.routes.asset_prefix_path.len > 0 and - strings.hasPrefix(input_path, ctx.bundler.options.routes.asset_prefix_path)) - { - input_path = input_path[ctx.bundler.options.routes.asset_prefix_path.len..]; - } - if (input_path.len == 0) return ctx.sendNotFound(); - - const result = ctx.buildFile(input_path) catch |err| { - if (err == error.ModuleNotFound) { - return try ctx.sendNotFound(); - } - - return err; - }; - - switch (result.file.value) { - .pending => |resolve_result| { - const path = resolve_result.pathConst() orelse return try ctx.sendNotFound(); - if (ctx.header("Open-In-Editor") != null) { - if (http_editor_context.editor == null) - http_editor_context.detectEditor(ctx.bundler.env); - - if (http_editor_context.editor) |editor| { - if (editor != .none) { - editor.open(http_editor_context.path, path.text, line, column, bun.default_allocator) catch |err| { - if (editor != .other) { - Output.prettyErrorln("Error {s} opening in {s}", .{ @errorName(err), @tagName(editor) }); - } - - http_editor_context.editor = Editor.none; - }; - - if (http_editor_context.editor.? != .none) { - defer ctx.done(); - try ctx.writeStatus(200); - ctx.appendHeader("Content-Type", MimeType.html.value); - const auto_close = ""; - try ctx.prepareToSendBody(auto_close.len, false); - try ctx.writeBodyBuf(auto_close); - return; - } - } - } - } - - var needs_close = false; - const fd = if (resolve_result.file_fd != 0) - resolve_result.file_fd - else brk: { - var file = std.fs.openFileAbsoluteZ(path.textZ(), .{ .mode = .read_only }) catch |err| { - Output.prettyErrorln("Failed to open {s} due to error {s}", .{ path.text, @errorName(err) }); - return try ctx.sendInternalError(err); - }; - needs_close = true; - break :brk file.handle; - }; - defer { - if (needs_close) { - std.os.close(fd); - } - } - - const content_length = brk: { - var file = std.fs.File{ .handle = fd }; - var stat = file.stat() catch |err| { - Output.prettyErrorln("Failed to read {s} due to error {s}", .{ path.text, @errorName(err) }); - return try ctx.sendInternalError(err); - }; - break :brk stat.size; - }; - - if (content_length == 0) { - return try ctx.sendNoContent(); - } - - ctx.appendHeader("Content-Type", "text/plain"); - defer ctx.done(); - - try ctx.writeStatus(200); - try ctx.prepareToSendBody(content_length, false); - - _ = try std.os.sendfile( - ctx.conn.handle, - fd, - 0, - content_length, - &[_]std.os.iovec_const{}, - &[_]std.os.iovec_const{}, - 0, - ); - }, - else => return try ctx.sendNotFound(), - } - } - - fn handleAbsURL(ctx: *RequestContext, _: *Server) !void { - const extname = ctx.url.extname; - switch (extname.len) { - 3 => { - if (!(strings.eqlComptimeIgnoreLen(extname, "css") or strings.eqlComptimeIgnoreLen(extname, "tsx") or strings.eqlComptimeIgnoreLen(extname, "jsx") or strings.eqlComptime(extname, "mjs"))) return try ctx.sendNotFound(); - }, - 2 => { - if (!(strings.eqlComptimeIgnoreLen(extname, "js") or strings.eqlComptimeIgnoreLen(extname, "ts"))) return try ctx.sendNotFound(); - }, - 4 => { - if (!(strings.eqlComptimeIgnoreLen(extname, "json") or strings.eqlComptimeIgnoreLen(extname, "yaml"))) return try ctx.sendNotFound(); - }, - else => { - return try ctx.sendNotFound(); - }, - } - - switch (ctx.method) { - .GET, .HEAD => { - const result = try ctx.buildFile( - ctx.url.path["abs:".len..], - ); - try @call(.always_inline, RequestContext.renderServeResult, .{ ctx, result }); - }, - else => { - try ctx.sendNotFound(); - }, - } - } - - pub fn handleReservedRoutes(ctx: *RequestContext, server: *Server) !bool { - - // From HTTP, we serve files with a hash modkey - // The format is - // hash:${hash}/${ORIGINAL_PATH} - // hash:abcdefg123/app/foo/my-file.jpeg - // The hash exists for browser cache invalidation - if (strings.hasPrefixComptime(ctx.url.path, "hash:")) { - var current = ctx.url.path; - current = current["hash:".len..]; - if (strings.indexOfChar(current, '/')) |i| { - current = current[i + 1 ..]; - ctx.url.path = current; - return false; - } - } - - if (strings.hasPrefixComptime(ctx.url.path, "bun:")) { - try ctx.handleBunURL(server); - return true; - } - - if (strings.hasPrefixComptime(ctx.url.path, "src:")) { - try ctx.handleSrcURL(server); - return true; - } - - if (strings.hasPrefixComptime(ctx.url.path, "abs:")) { - try ctx.handleAbsURL(server); - return true; - } - - return false; - } - - pub inline fn buildFile(ctx: *RequestContext, path_name: string) !bundler.ServeResult { - if (ctx.bundler.options.isFrontendFrameworkEnabled()) { - return try ctx.bundler.buildFile( - &ctx.log, - path_name, - true, - ); - } else { - return try ctx.bundler.buildFile( - &ctx.log, - path_name, - false, - ); - } - } - pub fn handleGet(ctx: *RequestContext) !void { - const result = try ctx.buildFile( - ctx.url.pathWithoutAssetPrefix(ctx.bundler.options.routes.asset_prefix_path), - ); - try @call(.always_inline, RequestContext.renderServeResult, .{ ctx, result }); - } - - pub fn handleRequest(ctx: *RequestContext) !void { - switch (ctx.method) { - .GET, .HEAD, .OPTIONS => { - return ctx.handleGet(); - }, - else => { - return ctx.sendNotFound(); - }, - } - } -}; - -pub const RequestContext = if (!Environment.isPosix) struct {} else PosixRequestContext; - -// // u32 == File ID from Watcher -// pub const WatcherBuildChannel = sync.Channel(u32, .Dynamic); -// pub const WatcherBuildQueue = struct { -// channel: WatcherBuildChannel, -// bundler: *Bundler, -// watcher: *Watcher, -// allocator: std.mem.Allocator, - -// pub fn start(queue: *@This()) void { -// var stdout = std.io.getStdOut(); -// var stderr = std.io.getStdErr(); -// var output_source = Output.Source.init(stdout, stderr); - -// Output.Source.set(&output_source); -// Output.enable_ansi_colors = stderr.isTty(); -// defer Output.flush(); -// queue.loop(); -// } - -// pub fn loop(queue: *@This()) !void { -// while (true) { - -// } -// } -// }; - -// This is a tiny HTTP server. -// It needs to support: -// - Static files -// - ETags, If-Not-Modified-Since -// - Bundling -// - Content-Type header -// - Content-Range header -// Fancy things to support: -// - Server-Timings for: -// - Resolver time -// - Parsing time -// - IO read time -const Editor = @import("./open.zig").Editor; -const EditorContext = @import("./open.zig").EditorContext; - -pub const Server = struct { - log: logger.Log, - allocator: std.mem.Allocator, - bundler: *Bundler, - watcher: *Watcher, - timer: std.time.Timer = undefined, - transform_options: Api.TransformOptions, - javascript_enabled: bool = false, - fallback_only: bool = false, - req_body_release_queue_mutex: Lock = Lock.init(), - req_body_release_queue: RequestDataPool.List = RequestDataPool.List{}, - - websocket_threadpool: ThreadPool = ThreadPool.init(.{ - // on macOS, the max stack size is 65520 bytes, - // so we ask for 65519 - .stack_size = 65519, - .max_threads = std.math.maxInt(u32), - }), - - pub var current: *Server = undefined; - - pub fn releaseRequestDataPoolNode(this: *Server, node: *RequestDataPool.Node) void { - this.req_body_release_queue_mutex.lock(); - defer this.req_body_release_queue_mutex.unlock(); - node.next = null; - - this.req_body_release_queue.prepend(node); - } - - pub fn cleanupRequestData(this: *Server) void { - this.req_body_release_queue_mutex.lock(); - defer this.req_body_release_queue_mutex.unlock(); - var any = false; - while (this.req_body_release_queue.popFirst()) |node| { - node.next = null; - node.release(); - any = true; - } - } - - threadlocal var filechange_buf: [32]u8 = undefined; - threadlocal var filechange_buf_hinted: [32]u8 = undefined; - pub fn onError( - _: *@This(), - err: anyerror, - ) void { - Output.prettyErrorln("Watcher crashed: {s}", .{@errorName(err)}); - } - pub fn onFileUpdate( - ctx: *Server, - events: []watcher.WatchEvent, - changed_files: []?[:0]u8, - watchlist: watcher.Watchlist, - ) void { - if (Output.isEmojiEnabled()) { - _onFileUpdate(ctx, events, changed_files, watchlist, true); - } else { - _onFileUpdate(ctx, events, changed_files, watchlist, false); - } - } - - var _on_file_update_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - fn _onFileUpdate( - ctx: *Server, - events: []watcher.WatchEvent, - changed_files: []?[:0]u8, - watchlist: watcher.Watchlist, - comptime is_emoji_enabled: bool, - ) void { - var fbs = std.io.fixedBufferStream(&filechange_buf); - var hinted_fbs = std.io.fixedBufferStream(&filechange_buf_hinted); - { - var writer = ByteApiWriter.init(&fbs); - const message_type = Api.WebsocketMessage{ - .timestamp = RequestContext.WebsocketHandler.toTimestamp(ctx.timer.read()), - .kind = .file_change_notification, - }; - - message_type.encode(&writer) catch unreachable; - } - - { - var writer = ByteApiWriter.init(&hinted_fbs); - const message_type = Api.WebsocketMessage{ - .timestamp = RequestContext.WebsocketHandler.toTimestamp(ctx.timer.read()), - .kind = Api.WebsocketMessageKind.file_change_notification_with_hint, - }; - - message_type.encode(&writer) catch unreachable; - } - - var slice = watchlist.slice(); - const file_paths = slice.items(.file_path); - var counts = slice.items(.count); - const kinds = slice.items(.kind); - const hashes = slice.items(.hash); - var file_descriptors = slice.items(.fd); - const header = fbs.getWritten(); - defer ctx.watcher.flushEvictions(); - defer Output.flush(); - - var rfs: *Fs.FileSystem.RealFS = &ctx.bundler.fs.fs; - - // It's important that this function does not do any memory allocations - // If this blocks, it can cause cascading bad things to happen - for (events) |event| { - const file_path = file_paths[event.index]; - const update_count = counts[event.index] + 1; - counts[event.index] = update_count; - const kind = kinds[event.index]; - - // so it's consistent with the rest - // if we use .extname we might run into an issue with whether or not the "." is included. - const path = Fs.PathName.init(file_path); - const id = hashes[event.index]; - var content_fbs = std.io.fixedBufferStream(filechange_buf[header.len..]); - var hinted_content_fbs = std.io.fixedBufferStream(filechange_buf_hinted[header.len..]); - - if (comptime Environment.isDebug) { - Output.prettyErrorln("[watcher] {s}: -- {}", .{ @tagName(kind), event.op }); - } - - switch (kind) { - .file => { - if (event.op.delete or event.op.rename) { - ctx.watcher.removeAtIndex( - event.index, - 0, - &.{}, - .file, - ); - - if (comptime FeatureFlags.verbose_watcher) { - Output.prettyErrorln("File changed: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); - } - } else { - var tmp = ctx.bundler.options.loaders.get(path.ext) orelse .file; - const change_message = Api.WebsocketMessageFileChangeNotification{ - .id = id, - .loader = tmp.toAPI(), - }; - - var content_writer = ByteApiWriter.init(&content_fbs); - change_message.encode(&content_writer) catch unreachable; - const change_buf = content_fbs.getWritten(); - const written_buf = filechange_buf[0 .. header.len + change_buf.len]; - RequestContext.WebsocketHandler.broadcast(written_buf) catch |err| { - Output.prettyErrorln("Error writing change notification: {s}", .{@errorName(err)}); - }; - if (comptime is_emoji_enabled) { - Output.prettyErrorln("📜 File change: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); - } else { - Output.prettyErrorln(" File change: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); - } - } - }, - .directory => { - const affected = event.names(changed_files); - var entries_option: ?*Fs.FileSystem.RealFS.EntriesOption = null; - if (affected.len > 0) { - entries_option = rfs.entries.get(file_path); - } - - rfs.bustEntriesCache(file_path); - ctx.bundler.resolver.dir_cache.remove(file_path); - - if (entries_option) |dir_ent| { - var last_file_hash: Watcher.HashType = std.math.maxInt(Watcher.HashType); - for (affected) |changed_name_ptr| { - const changed_name: []u8 = (changed_name_ptr orelse continue)[0..]; - if (changed_name.len == 0 or changed_name[0] == '~' or changed_name[0] == '.') continue; - - const loader = (ctx.bundler.options.loaders.get(Fs.PathName.init(changed_name).ext) orelse .file); - if (loader.isJavaScriptLikeOrJSON() or loader == .css) { - var path_string: bun.PathString = undefined; - var file_hash: Watcher.HashType = last_file_hash; - const abs_path: string = brk: { - if (dir_ent.entries.get(changed_name)) |file_ent| { - // reset the file descriptor - file_ent.entry.cache.fd = 0; - file_ent.entry.need_stat = true; - path_string = file_ent.entry.abs_path; - file_hash = Watcher.getHash(path_string.slice()); - for (hashes, 0..) |hash, entry_id| { - if (hash == file_hash) { - file_descriptors[entry_id] = 0; - break; - } - } - - break :brk path_string.slice(); - } else { - var file_path_without_trailing_slash = std.mem.trimRight(u8, file_path, std.fs.path.sep_str); - @memcpy(_on_file_update_path_buf[0..file_path_without_trailing_slash.len], file_path_without_trailing_slash); - _on_file_update_path_buf[file_path_without_trailing_slash.len] = std.fs.path.sep; - - @memcpy(_on_file_update_path_buf[file_path_without_trailing_slash.len + 1 .. changed_name.len], changed_name); - const path_slice = _on_file_update_path_buf[0 .. file_path_without_trailing_slash.len + changed_name.len + 1]; - file_hash = Watcher.getHash(path_slice); - break :brk path_slice; - } - }; - - // skip consecutive duplicates - if (last_file_hash == file_hash) continue; - last_file_hash = file_hash; - - const change_message = Api.WebsocketMessageFileChangeNotification{ - .id = file_hash, - .loader = loader.toAPI(), - }; - - var content_writer = ByteApiWriter.init(&hinted_content_fbs); - change_message.encode(&content_writer) catch unreachable; - const change_buf = hinted_content_fbs.getWritten(); - const written_buf = filechange_buf_hinted[0 .. header.len + change_buf.len]; - RequestContext.WebsocketHandler.broadcast(written_buf) catch |err| { - Output.prettyErrorln("Error writing change notification: {s}", .{@errorName(err)}); - }; - if (comptime is_emoji_enabled) { - Output.prettyErrorln("📜 File change: {s}", .{ctx.bundler.fs.relativeTo(abs_path)}); - } else { - Output.prettyErrorln(" File change: {s}", .{ctx.bundler.fs.relativeTo(abs_path)}); - } - } - } - } - - // if (event.op.delete or event.op.rename) - // ctx.watcher.removeAtIndex(event.index, hashes[event.index], parent_hashes, .directory); - if (comptime is_emoji_enabled) { - Output.prettyErrorln("📁 Dir change: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); - } else { - Output.prettyErrorln(" Dir change: {s}", .{ctx.bundler.fs.relativeTo(file_path)}); - } - }, - } - } - } - - fn run(server: *Server, comptime features: ConnectionFeatures) !void { - _ = Fs.FileSystem.RealFS.adjustUlimit() catch {}; - - RequestContext.WebsocketHandler.open_websockets = @TypeOf( - RequestContext.WebsocketHandler.open_websockets, - ).init(server.allocator); - var listener = std.net.StreamServer.init(.{ - .kernel_backlog = 1280, - }); - defer listener.deinit(); - server.websocket_threadpool.stack_size = @min( - @max(128_000, Fs.FileSystem.RealFS.Limit.stack), - 4_000_000, - ); - - // listener.setFastOpen(true) catch {}; - // listener.setNoDelay(true) catch {}; - // listener.setQuickACK(true) catch {}; - - // try listener.ack(true); - - var port: u16 = 3000; - - if (server.transform_options.port) |_port| { - port = _port; - } else if (server.bundler.options.origin.getPort()) |_port| { - port = _port; - } - - { - var attempts: u8 = 0; - - restart: while (attempts < 10) : (attempts += 1) { - listener.listen(std.net.Address.initIp4( - .{ 0, 0, 0, 0 }, - port, - )) catch |err| { - switch (err) { - error.AddressInUse => { - port += 1; - continue :restart; - }, - else => { - Output.prettyErrorln("{s} while trying to start listening on port {d}.\n\n", .{ @errorName(err), port }); - Global.exit(1); - }, - } - }; - - break :restart; - } - - if (attempts >= 10) { - var random_number = std.rand.DefaultPrng.init(@as(u64, @intCast(std.time.milliTimestamp()))); - const default_port = @as(u16, @intCast(server.bundler.options.origin.getPort() orelse 3000)); - Output.prettyErrorln( - "error: bun can't start because port {d} is already in use. Tried {d} - {d}. Try closing the other apps or manually passing bun a port\n\n bun --origin http://localhost:{d}/\n", - .{ - default_port, - default_port, - port, - random_number.random().intRangeAtMost(u16, 3011, 65535), - }, - ); - Global.exit(1); - } - } - - const addr = listener.listen_address; - if (server.bundler.options.origin.getPort() != addr.getPort()) { - server.bundler.options.origin = ZigURL.parse(try std.fmt.allocPrint(server.allocator, "{s}://{s}:{d}", .{ server.bundler.options.origin.displayProtocol(), server.bundler.options.origin.displayHostname(), addr.getPort() })); - } - - const start_time = Global.getStartTime(); - const now = std.time.nanoTimestamp(); - Output.printStartEnd(start_time, now); - - const display_path: string = brk: { - if (server.bundler.options.routes.single_page_app_routing) { - const lhs = std.mem.trimRight(u8, server.bundler.fs.top_level_dir, std.fs.path.sep_str); - const rhs = std.mem.trimRight(u8, server.bundler.options.routes.static_dir, std.fs.path.sep_str); - - if (strings.eql(lhs, rhs)) { - break :brk "."; - } - - break :brk resolve_path.relative(lhs, rhs); - } - - break :brk ""; - }; - - // This is technically imprecise. - // However, we want to optimize for easy to copy paste - // Nobody should get weird CORS errors when you go to the printed url. - if (addr.in.sa.addr == 0) { - if (server.bundler.options.routes.single_page_app_routing) { - Output.prettyError( - " bun!! v{s}\n\n\n Link: http://localhost:{d}\n {s}/index.html \n\n\n", - .{ - Global.package_json_version_with_sha, - addr.getPort(), - display_path, - }, - ); - } else { - Output.prettyError(" bun!! v{s}\n\n\n Link: http://localhost:{d}\n\n\n", .{ - Global.package_json_version_with_sha, - addr.getPort(), - }); - } - } else { - if (server.bundler.options.routes.single_page_app_routing) { - Output.prettyError(" bun!! v{s}\n\n\n Link: http://{any}\n {s}/index.html \n\n\n", .{ - Global.package_json_version_with_sha, - addr, - display_path, - }); - } else { - Output.prettyError(" bun!! v{s}\n\n\n Link: http://{any}\n\n\n", .{ - Global.package_json_version_with_sha, - addr, - }); - } - } - - Output.flush(); - - Analytics.Features.framework = server.bundler.options.framework != null; - Analytics.Features.filesystem_router = server.bundler.router != null; - - const UpgradeCheckerThread = @import("./cli/upgrade_command.zig").UpgradeCheckerThread; - - UpgradeCheckerThread.spawn(server.bundler.env); - - var did_init = false; - while (!did_init) { - defer Output.flush(); - var conn = listener.accept() catch - continue; - - disableSIGPIPESoClosingTheTabDoesntCrash(conn.stream); - - // We want to bind to the network socket as quickly as possible so that opening the URL works - // We use a secondary loop so that we avoid the extra branch in a hot code path - Analytics.Features.fast_refresh = server.bundler.options.jsx.supports_fast_refresh; - server.detectTSConfig(); - server.detectFastRefresh(); - try server.initWatcher(); - did_init = true; - Analytics.enqueue(Analytics.EventName.http_start); - - server.handleConnection(conn.stream, comptime features); - } - - server.cleanupRequestData(); - var counter: usize = 0; - - while (true) { - defer Output.flush(); - var conn = listener.accept() catch - continue; - - disableSIGPIPESoClosingTheTabDoesntCrash(conn.stream); - - server.handleConnection(conn.stream, comptime features); - counter +%= 1; - if (counter % 4 == 0) server.cleanupRequestData(); - } - } - - pub const ConnectionFeatures = struct { - public_folder: PublicFolderPriority = PublicFolderPriority.none, - filesystem_router: bool = false, - single_page_app_routing: bool = false, - pub const PublicFolderPriority = enum { - none, - first, - last, - }; - }; - - threadlocal var req_ctx_: RequestContext = undefined; - pub fn handleConnection(server: *Server, conn: std.net.Stream, comptime features: ConnectionFeatures) void { - var req_buf_node = RequestDataPool.get(server.allocator); - - // https://stackoverflow.com/questions/686217/maximum-on-http-header-values - var read_size = conn.read(&req_buf_node.data) catch { - _ = conn.write(comptime RequestContext.printStatusLine(400) ++ "\r\n\r\n") catch {}; - return; - }; - - if (read_size == 0) { - // Actually, this was not a request. - return; - } - - var req = picohttp.Request.parse(req_buf_node.data[0..read_size], &req_headers_buf) catch |err| { - _ = conn.write(comptime RequestContext.printStatusLine(400) ++ "\r\n\r\n") catch {}; - _ = Syscall.close(conn.handle); - Output.printErrorln("ERR: {s}", .{@errorName(err)}); - return; - }; - - var request_arena = ThreadlocalArena.init() catch unreachable; - var request_allocator = request_arena.allocator(); - var req_ctx = request_allocator.create(RequestContext) catch unreachable; - - req_ctx.init( - req, - request_arena, - conn, - server.bundler, - server.watcher, - server.timer, - ) catch |err| { - Output.prettyErrorln("[{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - _ = Syscall.close(conn.handle); - request_arena.deinit(); - return; - }; - - req_ctx.req_body_node = req_buf_node; - req_ctx.timer.reset(); - - const is_navigation_request = req_ctx.isBrowserNavigation(); - defer if (is_navigation_request == .yes) Analytics.enqueue(Analytics.EventName.http_build); - req_ctx.parseOrigin(); - // req_ctx.appendHeader("Date", value: string) - outer: { - const now = DateTime.Datetime.now(); - req_ctx.appendHeader( - "Date", - now.formatHttpBuf(&req_ctx.datetime_buf) catch brk: { - break :brk now.formatHttp(req_ctx.allocator) catch break :outer; - }, - ); - } - - if (req_ctx.url.needs_redirect) { - req_ctx.handleRedirect(req_ctx.url.path) catch |err| { - Output.prettyErrorln("[{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - conn.close(); - return; - }; - return; - } - - defer { - if (!req_ctx.controlled) { - req_ctx.req_body_node.release(); - req_ctx.arena.deinit(); - } - } - - req_ctx.log = logger.Log.init(server.allocator); - var log = &req_ctx.log; - - req_ctx.bundler.setLog(log); - // req_ctx.bundler.setAllocator(req_ctx.allocator); - - var did_print: bool = false; - - defer { - if (!req_ctx.controlled) { - if (!req_ctx.has_called_done) { - if (comptime Environment.isDebug) { - if (@errorReturnTrace()) |trace| { - std.debug.dumpStackTrace(trace.*); - Output.printError("\n", .{}); - } - } - - req_ctx.sendInternalError(error.InternalError) catch {}; - } - const status = req_ctx.status orelse @as(HTTPStatusCode, @intCast(500)); - - if (log.msgs.items.len == 0) { - if (!did_print) { - switch (status) { - // For success codes, just don't print anything. - // It's really noisy. - 200, 304, 101 => {}, - - 201...303, 305...399 => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); - }, - 400...499 => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); - }, - else => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); - }, - } - } - } else { - defer Output.flush(); - defer log.deinit(); - log.printForLogLevel(Output.errorWriter()) catch {}; - - if (!did_print) { - switch (status) { - // For success codes, just don't print anything. - // It's really noisy. - 200, 304, 101 => {}, - - 201...303, 305...399 => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); - }, - 400...499 => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); - }, - else => { - Output.prettyErrorln("{d} {s} {s} as {s}", .{ status, @tagName(req_ctx.method), req.path, req_ctx.mime_type.value }); - }, - } - } - } - } - } - - if (comptime FeatureFlags.keep_alive) { - if (req_ctx.header("Connection")) |connection| { - req_ctx.keep_alive = strings.eqlInsensitive(connection, "keep-alive"); - } - } else { - req_ctx.keep_alive = false; - req_ctx.appendHeader("Connection", "close"); - } - - var finished = req_ctx.handleReservedRoutes(server) catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - did_print = true; - return; - }; - - if (!finished) { - switch (comptime features.public_folder) { - .none => { - if (comptime features.single_page_app_routing) { - if (req_ctx.url.isRoot(server.bundler.options.routes.asset_prefix_path)) { - req_ctx.sendSinglePageHTML() catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - did_print = true; - }; - finished = true; - } - } - }, - else => { - // Check if this is a route to an HTML file in the public folder. - // Note: the public folder may actually just be the root folder - // In this case, we only check if the pathname has no extension - if (!finished) { - if (req_ctx.matchPublicFolder(comptime features.public_folder == .last or features.single_page_app_routing)) |result| { - finished = true; - req_ctx.renderServeResult(result) catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - did_print = true; - return; - }; - } - - finished = finished or req_ctx.has_called_done; - } - }, - } - } - - if (comptime features.filesystem_router) { - if (!finished) { - req_ctx.bundler.router.?.match(*Server, server, RequestContext, req_ctx) catch |err| { - switch (err) { - error.ModuleNotFound => {}, - else => { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - did_print = true; - }, - } - }; - finished = req_ctx.controlled or req_ctx.has_called_done; - } - } else { - request_handler: { - if (!finished) { - req_ctx.handleRequest() catch |err| { - switch (err) { - error.ModuleNotFound => { - break :request_handler; - }, - else => { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - did_print = true; - }, - } - }; - finished = finished or req_ctx.has_called_done; - } - } - } - - if (comptime features.public_folder == .last) { - if (!finished) { - if (req_ctx.matchPublicFolder(false)) |result| { - finished = true; - req_ctx.renderServeResult(result) catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - did_print = true; - }; - } - - finished = finished or req_ctx.has_called_done; - } - } - - if (comptime features.single_page_app_routing or features.public_folder != .none) { - if (!finished and (req_ctx.bundler.options.routes.single_page_app_routing and req_ctx.url.extname.len == 0)) { - if (!finished) { - req_ctx.sendSinglePageHTML() catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - did_print = true; - }; - } - finished = finished or req_ctx.has_called_done; - } - } - - if (!finished) { - // if we're about to 404 and it's the favicon, use our stand-in - if (strings.eqlComptime(req_ctx.url.path, "favicon.ico")) { - req_ctx.sendFavicon() catch |err| { - Output.printErrorln("FAIL [{s}] - {s}: {s}", .{ @errorName(err), req.method, req.path }); - did_print = true; - }; - return; - } - - req_ctx.sendNotFound() catch {}; - } - } - - pub fn initWatcher(server: *Server) !void { - server.watcher = try Watcher.init(server, server.bundler.fs, server.allocator); - - if (comptime FeatureFlags.watch_directories and !Environment.isTest) { - server.bundler.resolver.watcher = ResolveWatcher(*Watcher, onMaybeWatchDirectory).init(server.watcher); - } - } - - pub fn onMaybeWatchDirectory(watch: *Watcher, file_path: string, dir_fd: StoredFileDescriptorType) void { - // We don't want to watch: - // - Directories outside the root directory - // - Directories inside node_modules - if (std.mem.indexOf(u8, file_path, "node_modules") == null and std.mem.indexOf(u8, file_path, watch.fs.top_level_dir) != null) { - watch.addDirectory(dir_fd, file_path, Watcher.getHash(file_path), false) catch {}; - } - } - - pub fn detectFastRefresh(this: *Server) void { - if (this.bundler.options.jsx.runtime == .solid) - return; - - defer this.bundler.resetStore(); - - _ = this.bundler.resolver.resolve(this.bundler.fs.top_level_dir, this.bundler.options.jsx.importSource(), .internal) catch { - // if they don't have React, they can't use fast refresh - this.bundler.options.jsx.supports_fast_refresh = false; - return; - }; - - this.bundler.options.jsx.supports_fast_refresh = true; - this.bundler.options.jsx.refresh_runtime = "bun:reactfsh-v0.11.0"; - this.bundler.options.jsx.use_embedded_refresh_runtime = true; - this.bundler.resolver.opts = this.bundler.options; - } - - pub fn detectTSConfig(this: *Server) void { - defer this.bundler.resetStore(); - - const dir_info = (this.bundler.resolver.readDirInfo(this.bundler.fs.top_level_dir) catch return) orelse return; - - if (dir_info.package_json) |pkg| { - Analytics.setProjectID(dir_info.abs_path, pkg.name); - } else { - Analytics.setProjectID(dir_info.abs_path, ""); - } - - const tsconfig = dir_info.tsconfig_json orelse return; - Analytics.Features.tsconfig = true; - Analytics.Features.tsconfig_paths = tsconfig.paths.count() > 0; - } - - pub var global_start_time: std.time.Timer = undefined; - pub fn start(allocator: std.mem.Allocator, options: Api.TransformOptions, comptime DebugType: type, debug: DebugType) !void { - if (comptime Environment.isWindows) unreachable; - - var log = logger.Log.init(allocator); - var server = try allocator.create(Server); - server.* = Server{ - .allocator = allocator, - .log = log, - .bundler = undefined, - .watcher = undefined, - .transform_options = options, - .timer = try std.time.Timer.start(), - }; - global_start_time = server.timer; - server.bundler = try allocator.create(Bundler); - server.bundler.* = try Bundler.init(allocator, &server.log, options, null); - server.bundler.configureLinker(); - try server.bundler.configureRouter(true); - Server.current = server; - - if (debug.dump_environment_variables) { - server.bundler.dumpEnvironmentVariables(); - return; - } - - if (debug.dump_limits) { - Fs.FileSystem.printLimits(); - Global.exit(0); - return; - } - - http_editor_context.name = debug.editor; - - switch (debug.macros) { - .disable => { - server.bundler.options.no_macros = true; - }, - .map => |macros| { - server.bundler.options.macro_remap = macros; - }, - .unspecified => {}, - } - - RequestContext.fallback_only = true; - - Analytics.Features.filesystem_router = server.bundler.router != null; - - const public_folder_is_top_level = server.bundler.options.routes.static_dir_enabled and strings.eql( - server.bundler.fs.top_level_dir, - server.bundler.options.routes.static_dir, - ); - - server.websocket_threadpool.on_thread_spawn = RequestContext.WebsocketHandler.onSpawnThread; - - if (server.bundler.router != null and server.bundler.options.routes.static_dir_enabled) { - if (!public_folder_is_top_level) { - try server.run( - ConnectionFeatures{ .public_folder = .first, .filesystem_router = true }, - ); - } else { - try server.run( - ConnectionFeatures{ .public_folder = .last, .filesystem_router = true }, - ); - } - } else if (server.bundler.router != null) { - try server.run( - ConnectionFeatures{ - .filesystem_router = true, - }, - ); - } else if (server.bundler.options.routes.static_dir_enabled) { - if (server.bundler.options.routes.single_page_app_routing) { - if (!public_folder_is_top_level) { - try server.run( - ConnectionFeatures{ - .public_folder = .first, - .single_page_app_routing = true, - }, - ); - } else { - try server.run( - ConnectionFeatures{ - .public_folder = .last, - .single_page_app_routing = true, - }, - ); - } - } else { - if (!public_folder_is_top_level) { - try server.run( - ConnectionFeatures{ - .public_folder = .first, - }, - ); - } else { - try server.run( - ConnectionFeatures{ - .public_folder = .last, - }, - ); - } - } - } else if (server.bundler.options.routes.single_page_app_routing) { - try server.run( - ConnectionFeatures{ - .single_page_app_routing = true, - }, - ); - } else { - try server.run( - ConnectionFeatures{ .filesystem_router = false }, - ); - } - } -}; diff --git a/src/bun_js.zig b/src/bun_js.zig index 5698ba474ec8fa..959214960ee072 100644 --- a/src/bun_js.zig +++ b/src/bun_js.zig @@ -28,7 +28,7 @@ const bundler = bun.bundler; const DotEnv = @import("env_loader.zig"); const which = @import("which.zig").which; const JSC = @import("root").bun.JSC; -const AsyncHTTP = @import("root").bun.HTTP.AsyncHTTP; +const AsyncHTTP = @import("root").bun.http.AsyncHTTP; const Arena = @import("./mimalloc_arena.zig").Arena; const OpaqueWrap = JSC.OpaqueWrap; diff --git a/src/cli/create_command.zig b/src/cli/create_command.zig index 55a72ca4fb2023..acb8158dbd122e 100644 --- a/src/cli/create_command.zig +++ b/src/cli/create_command.zig @@ -29,7 +29,7 @@ const bundler = bun.bundler; const fs = @import("../fs.zig"); const URL = @import("../url.zig").URL; -const HTTP = @import("root").bun.HTTP; +const HTTP = @import("root").bun.http; const NetworkThread = HTTP.NetworkThread; const ParseJSON = @import("../json_parser.zig").ParseJSONUTF8; const Archive = @import("../libarchive/libarchive.zig").Archive; @@ -40,7 +40,7 @@ const NPMClient = @import("../which_npm_client.zig").NPMClient; const which = @import("../which.zig").which; const clap = @import("root").bun.clap; const Lock = @import("../lock.zig").Lock; -const Headers = @import("root").bun.HTTP.Headers; +const Headers = @import("root").bun.http.Headers; const CopyFile = @import("../copy_file.zig"); var bun_path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; const Futex = @import("../futex.zig"); diff --git a/src/cli/dev_command.zig b/src/cli/dev_command.zig deleted file mode 100644 index 91c9fda3dc2c52..00000000000000 --- a/src/cli/dev_command.zig +++ /dev/null @@ -1,12 +0,0 @@ -const Command = @import("../cli.zig").Command; - -pub const DevCommand = struct { - pub fn exec(ctx: Command.Context) !void { - if (comptime @import("root").bun.Environment.isWindows) unreachable; - - const Server = @import("../bun_dev_http_server.zig").Server; - const Global = @import("root").bun.Global; - Global.configureAllocator(.{ .long_running = true }); - try Server.start(ctx.allocator, ctx.args, @TypeOf(ctx.debug), ctx.debug); - } -}; diff --git a/src/cli/install_completions_command.zig b/src/cli/install_completions_command.zig index 9eb253b90af10b..65548f2ea65295 100644 --- a/src/cli/install_completions_command.zig +++ b/src/cli/install_completions_command.zig @@ -37,7 +37,7 @@ const NPMClient = @import("../which_npm_client.zig").NPMClient; const which = @import("../which.zig").which; const clap = @import("root").bun.clap; const Lock = @import("../lock.zig").Lock; -const Headers = @import("root").bun.HTTP.Headers; +const Headers = @import("root").bun.http.Headers; const CopyFile = @import("../copy_file.zig"); const ShellCompletions = @import("./shell_completions.zig"); diff --git a/src/cli/test_command.zig b/src/cli/test_command.zig index 82fb00bf3f5ec0..77b96df8c95253 100644 --- a/src/cli/test_command.zig +++ b/src/cli/test_command.zig @@ -35,14 +35,14 @@ var path_buf: [bun.MAX_PATH_BYTES]u8 = undefined; var path_buf2: [bun.MAX_PATH_BYTES]u8 = undefined; const PathString = bun.PathString; const is_bindgen = std.meta.globalOption("bindgen", bool) orelse false; -const HTTPThread = @import("root").bun.HTTP.HTTPThread; +const HTTPThread = @import("root").bun.http.HTTPThread; const JSC = @import("root").bun.JSC; const jest = JSC.Jest; const TestRunner = JSC.Jest.TestRunner; const Snapshots = JSC.Snapshot.Snapshots; const Test = TestRunner.Test; -const NetworkThread = @import("root").bun.HTTP.NetworkThread; +const NetworkThread = @import("root").bun.http.NetworkThread; const uws = @import("root").bun.uws; fn fmtStatusTextLine(comptime status: @Type(.EnumLiteral), comptime emoji: bool) []const u8 { diff --git a/src/cli/upgrade_command.zig b/src/cli/upgrade_command.zig index 196569df88f872..82cd2c45ff80d0 100644 --- a/src/cli/upgrade_command.zig +++ b/src/cli/upgrade_command.zig @@ -28,7 +28,7 @@ const bundler = bun.bundler; const fs = @import("../fs.zig"); const URL = @import("../url.zig").URL; -const HTTP = @import("root").bun.HTTP; +const HTTP = @import("root").bun.http; const ParseJSON = @import("../json_parser.zig").ParseJSONUTF8; const Archive = @import("../libarchive/libarchive.zig").Archive; const Zlib = @import("../zlib.zig"); @@ -37,7 +37,7 @@ const DotEnv = @import("../env_loader.zig"); const which = @import("../which.zig").which; const clap = @import("root").bun.clap; const Lock = @import("../lock.zig").Lock; -const Headers = @import("root").bun.HTTP.Headers; +const Headers = @import("root").bun.http.Headers; const CopyFile = @import("../copy_file.zig"); const NetworkThread = HTTP.NetworkThread; diff --git a/src/deps/uws.zig b/src/deps/uws.zig index cca7bafaa9d006..cfec8132fcf9f9 100644 --- a/src/deps/uws.zig +++ b/src/deps/uws.zig @@ -298,6 +298,22 @@ pub fn NewSocketHandler(comptime is_ssl: bool) type { this.socket, ) > 0; } + + pub fn isClosedOrHasError(this: ThisSocket) bool { + if (this.isClosed() or this.isShutdown()) { + return true; + } + + return this.getError() != 0; + } + + pub fn getError(this: ThisSocket) i32 { + return us_socket_get_error( + comptime ssl_int, + this.socket, + ); + } + pub fn isClosed(this: ThisSocket) bool { return us_socket_is_closed( comptime ssl_int, @@ -2530,3 +2546,5 @@ pub fn newSocketFromFd(ctx: *SocketContext, ext_size: c_int, fd: LIBUS_SOCKET_DE .socket = us_socket_from_fd(ctx, ext_size, fd) orelse return null, }; } + +extern fn us_socket_get_error(ssl_flag: c_int, socket: *Socket) c_int; diff --git a/src/http_client_async.zig b/src/http.zig similarity index 99% rename from src/http_client_async.zig rename to src/http.zig index a66633ab07fa78..d27e7fe3c9eeed 100644 --- a/src/http_client_async.zig +++ b/src/http.zig @@ -394,7 +394,7 @@ fn NewHTTPContext(comptime ssl: bool) type { std.debug.assert(hostname.len > 0); std.debug.assert(port > 0); - if (hostname.len <= MAX_KEEPALIVE_HOSTNAME and !socket.isClosed() and !socket.isShutdown() and socket.isEstablished()) { + if (hostname.len <= MAX_KEEPALIVE_HOSTNAME and !socket.isClosedOrHasError() and socket.isEstablished()) { if (this.pending_sockets.get()) |pending| { socket.ext(**anyopaque).?.* = bun.cast(**anyopaque, ActiveSocket.init(pending).ptr()); socket.flush(); @@ -620,7 +620,7 @@ fn NewHTTPContext(comptime ssl: bool) type { continue; } - if (http_socket.isShutdown()) { + if (http_socket.isShutdown() or http_socket.getError() != 0) { http_socket.ext(**anyopaque).?.* = bun.cast(**anyopaque, ActiveSocket.init(&dead_socket).ptr()); http_socket.close(0, null); continue; @@ -2621,6 +2621,8 @@ pub fn onData(this: *HTTPClient, comptime is_ssl: bool, incoming_data: []const u if (this.state.content_encoding_i < response.headers.len and !this.state.did_set_content_encoding) { // if it compressed with this header, it is no longer because we will decompress it var mutable_headers = std.ArrayListUnmanaged(picohttp.Header){ .items = response.headers, .capacity = response.headers.len }; + // we remove the content encoding header + _ = mutable_headers.orderedRemove(this.state.content_encoding_i); this.state.did_set_content_encoding = true; response.headers = mutable_headers.items; this.state.content_encoding_i = std.math.maxInt(@TypeOf(this.state.content_encoding_i)); @@ -2883,7 +2885,7 @@ pub fn progressUpdate(this: *HTTPClient, comptime is_ssl: bool, ctx: *NewHTTPCon if (is_done) { socket.ext(**anyopaque).?.* = bun.cast(**anyopaque, NewHTTPContext(is_ssl).ActiveSocket.init(&dead_socket).ptr()); - if (this.isKeepAlivePossible() and !socket.isClosed()) { + if (this.isKeepAlivePossible() and !socket.isClosedOrHasError()) { ctx.releaseSocket( socket, this.connected_url.hostname, @@ -3138,6 +3140,11 @@ fn handleResponseBodyChunkedEncodingFromMultiplePackets( buffer.list.items.ptr + (buffer.list.items.len -| incoming_data.len), &bytes_decoded, ); + if (comptime Environment.allow_assert) { + if (pret == -1) { + @breakpoint(); + } + } buffer.list.items.len -|= incoming_data.len - bytes_decoded; this.state.total_body_received += bytes_decoded; @@ -3145,9 +3152,7 @@ fn handleResponseBodyChunkedEncodingFromMultiplePackets( switch (pret) { // Invalid HTTP response body - -1 => { - return error.InvalidHTTPResponse; - }, + -1 => return error.InvalidHTTPResponse, // Needs more data -2 => { if (this.progress_node) |progress| { diff --git a/src/http/header_builder.zig b/src/http/header_builder.zig index 540484655b15b4..882ea7e0f11618 100644 --- a/src/http/header_builder.zig +++ b/src/http/header_builder.zig @@ -2,7 +2,7 @@ const HeaderBuilder = @This(); const StringBuilder = @import("../string_builder.zig"); const Headers = @import("./headers.zig"); const string = @import("root").bun.string; -const HTTPClient = @import("../http_client_async.zig"); +const HTTPClient = @import("../http.zig"); const Api = @import("../api/schema.zig").Api; const std = @import("std"); diff --git a/src/http/zlib.zig b/src/http/zlib.zig index 8144930c23798d..c69a7b48f7e730 100644 --- a/src/http/zlib.zig +++ b/src/http/zlib.zig @@ -1,7 +1,7 @@ const Lock = @import("../lock.zig").Lock; const std = @import("std"); const MutableString = @import("root").bun.MutableString; -const getAllocator = @import("../http_client_async.zig").getAllocator; +const getAllocator = @import("../http.zig").getAllocator; const ZlibPool = @This(); const Zlib = @import("../zlib.zig"); const bun = @import("root").bun; diff --git a/src/install/install.zig b/src/install/install.zig index 5e6736cc3d88d7..4d3fc08831a622 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -34,7 +34,7 @@ const Fs = @import("../fs.zig"); const FileSystem = Fs.FileSystem; const Lock = @import("../lock.zig").Lock; const URL = @import("../url.zig").URL; -const HTTP = bun.HTTP; +const HTTP = bun.http; const AsyncHTTP = HTTP.AsyncHTTP; const HTTPChannel = HTTP.HTTPChannel; const NetworkThread = HTTP.NetworkThread; diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index 8ea681e4a42eca..c5d77145c601e6 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -34,14 +34,14 @@ const bundler = bun.bundler; const DotEnv = @import("../env_loader.zig"); const which = @import("../which.zig").which; const Run = @import("../bun_js.zig").Run; -const HeaderBuilder = bun.HTTP.HeaderBuilder; +const HeaderBuilder = bun.http.HeaderBuilder; const Fs = @import("../fs.zig"); const FileSystem = Fs.FileSystem; const Lock = @import("../lock.zig").Lock; const URL = @import("../url.zig").URL; -const AsyncHTTP = bun.HTTP.AsyncHTTP; -const HTTPChannel = bun.HTTP.HTTPChannel; -const NetworkThread = bun.HTTP.NetworkThread; +const AsyncHTTP = bun.http.AsyncHTTP; +const HTTPChannel = bun.http.HTTPChannel; +const NetworkThread = bun.http.NetworkThread; const Integrity = @import("./integrity.zig").Integrity; const clap = bun.clap; diff --git a/src/install/npm.zig b/src/install/npm.zig index 5590074f4fb87f..7f444f0bb3c4d6 100644 --- a/src/install/npm.zig +++ b/src/install/npm.zig @@ -18,7 +18,7 @@ const Integrity = @import("./integrity.zig").Integrity; const Bin = @import("./bin.zig").Bin; const Environment = @import("root").bun.Environment; const Aligner = @import("./install.zig").Aligner; -const HTTPClient = @import("root").bun.HTTP; +const HTTPClient = @import("root").bun.http; const json_parser = bun.JSON; const default_allocator = @import("root").bun.default_allocator; const IdentityContext = @import("../identity_context.zig").IdentityContext; diff --git a/src/js/builtins/ReadableStreamInternals.ts b/src/js/builtins/ReadableStreamInternals.ts index 36f049f3ec2ef5..eee2c41864eb00 100644 --- a/src/js/builtins/ReadableStreamInternals.ts +++ b/src/js/builtins/ReadableStreamInternals.ts @@ -1259,10 +1259,11 @@ export function readableStreamError(stream, error) { } export function readableStreamDefaultControllerShouldCallPull(controller) { - const stream = $getByIdDirectPrivate(controller, "controlledReadableStream"); - if (!$readableStreamDefaultControllerCanCloseOrEnqueue(controller)) return false; if (!($getByIdDirectPrivate(controller, "started") === 1)) return false; + + const stream = $getByIdDirectPrivate(controller, "controlledReadableStream"); + if ( (!$isReadableStreamLocked(stream) || !$getByIdDirectPrivate($getByIdDirectPrivate(stream, "reader"), "readRequests")?.isNotEmpty()) && @@ -1482,10 +1483,17 @@ export function readableStreamReaderGenericRelease(reader) { } export function readableStreamDefaultControllerCanCloseOrEnqueue(controller) { - return ( - !$getByIdDirectPrivate(controller, "closeRequested") && - $getByIdDirectPrivate($getByIdDirectPrivate(controller, "controlledReadableStream"), "state") === $streamReadable - ); + if ($getByIdDirectPrivate(controller, "closeRequested")) { + return false; + } + + const controlledReadableStream = $getByIdDirectPrivate(controller, "controlledReadableStream"); + + if (!$isObject(controlledReadableStream)) { + return false; + } + + return $getByIdDirectPrivate(controlledReadableStream, "state") === $streamReadable; } export function lazyLoadStream(stream, autoAllocateChunkSize) { diff --git a/src/js/node/crypto.js b/src/js/node/crypto.js index 40d5d712c10f56..9ce1bca1c83ac6 100644 --- a/src/js/node/crypto.js +++ b/src/js/node/crypto.js @@ -11291,7 +11291,6 @@ var require_sign = __commonJS({ curves = require_curves2(); function sign(hash, key, hashType, signType, tag) { var priv = parseKeys(getKeyFrom(key, "private")); - if (priv.curve) { if (signType !== "ecdsa" && signType !== "ecdsa/rsa") throw new Error("wrong private key type"); return ecSign(hash, priv); @@ -12018,7 +12017,6 @@ const harcoded_curves = [ "secp224r1", "prime256v1", "prime192v1", - "ed25519", "secp384r1", "secp521r1", ]; @@ -12037,6 +12035,8 @@ const { createPrivateKey, generateKeySync, generateKeyPairSync, + sign: nativeSign, + verify: nativeVerify, } = $lazy("internal/crypto"); const kCryptoKey = Symbol.for("::bunKeyObjectCryptoKey::"); @@ -12293,34 +12293,95 @@ function _createPublicKey(key) { } crypto_exports.createPublicKey = _createPublicKey; crypto_exports.KeyObject = KeyObject; +var webcrypto = crypto; +var _subtle = webcrypto.subtle; const _createSign = crypto_exports.createSign; -crypto_exports.sign = function (algorithm, data, key, encoding, callback) { + +crypto_exports.sign = function (algorithm, data, key, callback) { + // TODO: move this to native + var dsaEncoding, padding, saltLength; + // key must be a KeyObject + if (!(key instanceof KeyObject)) { + if ($isObject(key) && key.key) { + padding = key.padding; + saltLength = key.saltLength; + dsaEncoding = key.dsaEncoding; + } + if (key.key instanceof KeyObject) { + key = key.key; + } else { + key = _createPrivateKey(key); + } + } if (typeof callback === "function") { try { - const result = _createSign(algorithm).update(data, encoding).sign(key, encoding); + let result; + if (key.asymmetricKeyType === "rsa") { + // RSA-PSS is supported by native but other RSA algorithms are not + result = _createSign(algorithm || "sha256") + .update(data) + .sign(key); + } else { + result = nativeSign(key[kCryptoKey], data, algorithm, dsaEncoding, padding, saltLength); + } callback(null, result); } catch (err) { callback(err); } } else { - return _createSign(algorithm).update(data, encoding).sign(key, encoding); + if (key.asymmetricKeyType === "rsa") { + return _createSign(algorithm || "sha256") + .update(data) + .sign(key); + } else { + return nativeSign(key[kCryptoKey], data, algorithm, dsaEncoding, padding, saltLength); + } } }; const _createVerify = crypto_exports.createVerify; + crypto_exports.verify = function (algorithm, data, key, signature, callback) { + // TODO: move this to native + var dsaEncoding, padding, saltLength; + // key must be a KeyObject + if (!(key instanceof KeyObject)) { + if ($isObject(key) && key.key) { + padding = key.padding; + saltLength = key.saltLength; + dsaEncoding = key.dsaEncoding; + } + if (key.key instanceof KeyObject && key.key.type === "public") { + key = key.key; + } else { + key = _createPublicKey(key); + } + } if (typeof callback === "function") { try { - const result = _createVerify(algorithm).update(data).verify(key, signature); + let result; + if (key.asymmetricKeyType === "rsa") { + // RSA-PSS is supported by native but other RSA algorithms are not + result = _createVerify(algorithm || "sha256") + .update(data) + .verify(key, signature); + } else { + result = nativeVerify(key[kCryptoKey], data, signature, algorithm, dsaEncoding, padding, saltLength); + } callback(null, result); } catch (err) { callback(err); } } else { - return _createVerify(algorithm).update(data).verify(key, signature); + if (key.asymmetricKeyType === "rsa") { + return _createVerify(algorithm || "sha256") + .update(data) + .verify(key, signature); + } else { + return nativeVerify(key[kCryptoKey], data, signature, algorithm, dsaEncoding, padding, saltLength); + } } }; -var webcrypto = crypto; __export(crypto_exports, { DEFAULT_ENCODING: () => DEFAULT_ENCODING, getRandomValues: () => getRandomValues, @@ -12331,7 +12392,7 @@ __export(crypto_exports, { scryptSync: () => scryptSync, timingSafeEqual: () => timingSafeEqual, webcrypto: () => webcrypto, - subtle: () => webcrypto.subtle, + subtle: () => _subtle, }); export default crypto_exports; diff --git a/src/js/node/fs.promises.ts b/src/js/node/fs.promises.ts index 91566d15561295..ae804935fee22c 100644 --- a/src/js/node/fs.promises.ts +++ b/src/js/node/fs.promises.ts @@ -103,8 +103,13 @@ function cp(src, dest, options) { // This is currently stubbed for Next.js support. class Dir { #entries: Dirent[]; - constructor(e: Dirent[]) { + #path: string; + constructor(e: Dirent[], path: string) { this.#entries = e; + this.#path = path; + } + get path() { + return this.#path; } readSync() { return this.#entries.shift() ?? null; @@ -127,7 +132,7 @@ class Dir { } async function opendir(dir: string) { const entries = await fs.readdir(dir, { withFileTypes: true }); - return new Dir(entries); + return new Dir(entries, dir); } export default { diff --git a/src/js/node/http.ts b/src/js/node/http.ts index f26182ea048a2a..65a6f8de4c44ec 100644 --- a/src/js/node/http.ts +++ b/src/js/node/http.ts @@ -694,7 +694,7 @@ class IncomingMessage extends Readable { #bodyStream: ReadableStreamDefaultReader | undefined; #fakeSocket: FakeSocket | undefined = undefined; #noBody = false; - #aborted = false; + aborted = false; #req; url; #type; @@ -720,7 +720,7 @@ class IncomingMessage extends Readable { async #consumeStream(reader: ReadableStreamDefaultReader) { while (true) { var { done, value } = await reader.readMany(); - if (this.#aborted) return; + if (this.aborted) return; if (done) { this.push(null); process.nextTick(destroyBodyStreamNT, this); @@ -747,13 +747,10 @@ class IncomingMessage extends Readable { } } - get aborted() { - return this.#aborted; - } - + //TODO: call from abort signal handler #abort() { - if (this.#aborted) return; - this.#aborted = true; + if (this.aborted) return; + this.aborted = true; var bodyStream = this.#bodyStream; if (!bodyStream) return; bodyStream.cancel(); @@ -766,30 +763,44 @@ class IncomingMessage extends Readable { return (this.#fakeSocket ??= new FakeSocket()); } + set statusCode(val) {} + get statusCode() { return this.#req.status; } + set statusMessage(val) {} + get statusMessage() { return STATUS_CODES[this.#req.status]; } + set httpVersion(val) {} + get httpVersion() { return "1.1"; } + set rawTrailers(val) {} + get rawTrailers() { return []; } + set httpVersionMajor(val) {} + get httpVersionMajor() { return 1; } + set httpVersionMinor(val) {} + get httpVersionMinor() { return 1; } + set trailers(val) {} + get trailers() { return kEmptyObject; } @@ -1365,8 +1376,8 @@ class ClientRequest extends OutgoingMessage { // Timeouts are handled via this.setTimeout. timeout: false, - // Disable auto gzip/deflate - decompress: false, + // should be safe to decompress by default if we remove the headers and will be faster + decompress: true, }) .then(response => { const prevIsHTTPS = isNextIncomingMessageHTTPS; diff --git a/src/js_ast.zig b/src/js_ast.zig index b9ddedbab3c3a7..00c94edc1e62fd 100644 --- a/src/js_ast.zig +++ b/src/js_ast.zig @@ -19,7 +19,6 @@ const ObjectPool = @import("./pool.zig").ObjectPool; const ImportRecord = @import("import_record.zig").ImportRecord; const allocators = @import("allocators.zig"); const JSC = @import("root").bun.JSC; -const HTTP = @import("root").bun.HTTP; const RefCtx = @import("./ast/base.zig").RefCtx; const JSONParser = bun.JSON; const is_bindgen = std.meta.globalOption("bindgen", bool) orelse false; @@ -28,6 +27,7 @@ const JSPrinter = @import("./js_printer.zig"); const js_lexer = @import("./js_lexer.zig"); const TypeScript = @import("./js_parser.zig").TypeScript; const ThreadlocalArena = @import("./mimalloc_arena.zig").Arena; +const MimeType = bun.http.MimeType; /// This is the index to the automatically-generated part containing code that /// calls "__export(exports, { ... getters ... })". This is used to generate @@ -3084,13 +3084,13 @@ pub const Expr = struct { pub fn fromBlob( blob: *const JSC.WebCore.Blob, allocator: std.mem.Allocator, - mime_type_: ?HTTP.MimeType, + mime_type_: ?MimeType, log: *logger.Log, loc: logger.Loc, ) !Expr { var bytes = blob.sharedView(); - const mime_type = mime_type_ orelse HTTP.MimeType.init(blob.content_type, null, null); + const mime_type = mime_type_ orelse MimeType.init(blob.content_type, null, null); if (mime_type.category == .json) { var source = logger.Source.initPathString("fetch.json", bytes); @@ -7021,7 +7021,7 @@ pub const Macro = struct { .Boolean => this.coerce(value, .Boolean), .Array => this.coerce(value, .Array), .Object => this.coerce(value, .Object), - .JSON => this.coerce(value, .JSON), + .toJSON, .JSON => this.coerce(value, .JSON), .Integer => this.coerce(value, .Integer), .Double => this.coerce(value, .Double), .String => this.coerce(value, .String), @@ -7031,7 +7031,7 @@ pub const Macro = struct { this.source, this.caller.loc, this.allocator, - "cannot coerce {s} to Bun's AST. Please return a valid macro using the JSX syntax", + "cannot coerce {s} to Bun's AST. Please return a simpler type", .{@tagName(value.jsType())}, ) catch unreachable; break :brk error.MacroFailed; @@ -7062,7 +7062,7 @@ pub const Macro = struct { } var blob_: ?JSC.WebCore.Blob = null; - var mime_type: ?HTTP.MimeType = null; + var mime_type: ?MimeType = null; if (value.jsType() == .DOMWrapper) { if (value.as(JSC.WebCore.Response)) |resp| { diff --git a/src/js_lexer_tables.zig b/src/js_lexer_tables.zig index 8256ff79d23ca2..60a705c3012844 100644 --- a/src/js_lexer_tables.zig +++ b/src/js_lexer_tables.zig @@ -250,11 +250,12 @@ pub const PropertyModifierKeyword = enum { }); }; -pub const TypeScriptAccessibilityModifier = ComptimeStringMap(u1, .{ - .{ "private", 1 }, - .{ "protected", 1 }, - .{ "public", 1 }, - .{ "readonly", 1 }, +pub const TypeScriptAccessibilityModifier = ComptimeStringMap(void, .{ + .{ "override", void }, + .{ "private", void }, + .{ "protected", void }, + .{ "public", void }, + .{ "readonly", void }, }); pub const TokenEnumType = std.EnumArray(T, []u8); diff --git a/src/napi/napi.zig b/src/napi/napi.zig index ffda175f28e855..082405a55c9f81 100644 --- a/src/napi/napi.zig +++ b/src/napi/napi.zig @@ -271,37 +271,99 @@ inline fn setNapiValue(result: *napi_value, value: JSValue) void { value.ensureStillAlive(); result.* = value; } -pub export fn napi_create_string_latin1(env: napi_env, str: [*]const u8, length: usize, result: *napi_value) napi_status { - log("napi_create_string_latin1", .{}); - const slice = if (NAPI_AUTO_LENGTH == length) - bun.sliceTo(@as([*:0]const u8, @ptrCast(str)), 0) - else - str[0..length]; +pub export fn napi_create_string_latin1(env: napi_env, str: ?[*]const u8, length: usize, result_: ?*napi_value) napi_status { + const result: *napi_value = result_ orelse { + return invalidArg(); + }; + + const slice: []const u8 = brk: { + if (NAPI_AUTO_LENGTH == length) { + break :brk bun.sliceTo(@as([*:0]const u8, @ptrCast(str)), 0); + } else if (length > std.math.maxInt(u32)) { + return invalidArg(); + } + + if (str) |ptr| + break :brk ptr[0..length]; + + return invalidArg(); + }; + + log("napi_create_string_latin1: {s}", .{slice}); + + var string = bun.String.createUninitializedLatin1(slice.len); + if (string.tag == .Dead) { + return .generic_failure; + } - setNapiValue(result, JSC.ZigString.init(slice).toValueGC(env)); + if (slice.len > 0) { + @memcpy(@constCast(string.latin1())[0..slice.len], slice); + } + + defer string.deref(); + setNapiValue(result, string.toJS(env)); return .ok; } -pub export fn napi_create_string_utf8(env: napi_env, str: [*]const u8, length: usize, result: *napi_value) napi_status { - const slice = if (NAPI_AUTO_LENGTH == length) - bun.sliceTo(@as([*:0]const u8, @ptrCast(str)), 0) - else - str[0..length]; +pub export fn napi_create_string_utf8(env: napi_env, str: ?[*]const u8, length: usize, result_: ?*napi_value) napi_status { + const result: *napi_value = result_ orelse { + return invalidArg(); + }; + const slice: []const u8 = brk: { + if (NAPI_AUTO_LENGTH == length) { + break :brk bun.sliceTo(@as([*:0]const u8, @ptrCast(str)), 0); + } else if (length > std.math.maxInt(u32)) { + return invalidArg(); + } + + if (str) |ptr| + break :brk ptr[0..length]; + + return invalidArg(); + }; log("napi_create_string_utf8: {s}", .{slice}); var string = bun.String.create(slice); + if (string.tag == .Dead) { + return .generic_failure; + } + defer string.deref(); setNapiValue(result, string.toJS(env)); return .ok; } -pub export fn napi_create_string_utf16(env: napi_env, str: [*]const char16_t, length: usize, result: *napi_value) napi_status { - log("napi_create_string_utf16", .{}); - const slice = if (NAPI_AUTO_LENGTH == length) - bun.sliceTo(@as([*:0]const char16_t, @ptrCast(str)), 0) - else - str[0..length]; +pub export fn napi_create_string_utf16(env: napi_env, str: ?[*]const char16_t, length: usize, result_: ?*napi_value) napi_status { + const result: *napi_value = result_ orelse { + return invalidArg(); + }; + + const slice: []const u16 = brk: { + if (NAPI_AUTO_LENGTH == length) { + break :brk bun.sliceTo(@as([*:0]const u16, @ptrCast(str)), 0); + } else if (length > std.math.maxInt(u32)) { + return invalidArg(); + } + + if (str) |ptr| + break :brk ptr[0..length]; + + return invalidArg(); + }; + + if (comptime bun.Environment.allow_assert) + log("napi_create_string_utf16: {d} {any}", .{ slice.len, strings.FormatUTF16{ .buf = slice[0..@min(slice.len, 512)] } }); + + var string = bun.String.createUninitializedUTF16(slice.len); + if (string.tag == .Dead) { + return .generic_failure; + } + + if (slice.len > 0) { + @memcpy(@constCast(string.utf16())[0..slice.len], slice); + } - setNapiValue(result, JSC.ZigString.from16(slice.ptr, length).toValueGC(env)); + defer string.deref(); + setNapiValue(result, string.toJS(env)); return .ok; } pub extern fn napi_create_symbol(env: napi_env, description: napi_value, result: *napi_value) napi_status; @@ -710,15 +772,7 @@ pub export fn napi_is_arraybuffer(_: napi_env, value: napi_value, result: *bool) result.* = !value.isNumber() and value.jsTypeLoose() == .ArrayBuffer; return .ok; } -pub export fn napi_create_arraybuffer(env: napi_env, byte_length: usize, data: [*]const u8, result: *napi_value) napi_status { - log("napi_create_arraybuffer", .{}); - var typed_array = JSC.C.JSObjectMakeTypedArray(env.ref(), .kJSTypedArrayTypeArrayBuffer, byte_length, TODO_EXCEPTION); - var array_buffer = JSValue.c(typed_array).asArrayBuffer(env) orelse return genericFailure(); - const len = @min(array_buffer.len, @as(u32, @truncate(byte_length))); - @memcpy(array_buffer.ptr[0..len], data[0..len]); - result.* = JSValue.c(typed_array); - return .ok; -} +pub extern fn napi_create_arraybuffer(env: napi_env, byte_length: usize, data: [*]const u8, result: *napi_value) napi_status; pub extern fn napi_create_external_arraybuffer(env: napi_env, external_data: ?*anyopaque, byte_length: usize, finalize_cb: napi_finalize, finalize_hint: ?*anyopaque, result: *napi_value) napi_status; diff --git a/src/network_thread.zig b/src/network_thread.zig index a2353ebd28fb9d..2d0ae6ab838702 100644 --- a/src/network_thread.zig +++ b/src/network_thread.zig @@ -8,7 +8,7 @@ const std = @import("std"); pub const AsyncIO = bun.AsyncIO; const Output = bun.Output; const IdentityContext = @import("./identity_context.zig").IdentityContext; -const HTTP = @import("./http_client_async.zig"); +const HTTP = @import("./http.zig"); const NetworkThread = @This(); const Environment = bun.Environment; const Lock = @import("./lock.zig").Lock; diff --git a/src/options.zig b/src/options.zig index 6655695dee9400..f500eeb7421f70 100644 --- a/src/options.zig +++ b/src/options.zig @@ -692,13 +692,13 @@ pub const Loader = enum(u8) { }; } - pub fn toMimeType(this: Loader) bun.HTTP.MimeType { + pub fn toMimeType(this: Loader) bun.http.MimeType { return switch (this) { - .jsx, .js, .ts, .tsx => bun.HTTP.MimeType.javascript, - .css => bun.HTTP.MimeType.css, - .toml, .json => bun.HTTP.MimeType.json, - .wasm => bun.HTTP.MimeType.wasm, - else => bun.HTTP.MimeType.other, + .jsx, .js, .ts, .tsx => bun.http.MimeType.javascript, + .css => bun.http.MimeType.css, + .toml, .json => bun.http.MimeType.json, + .wasm => bun.http.MimeType.wasm, + else => bun.http.MimeType.other, }; } @@ -1701,29 +1701,6 @@ pub const BundleOptions = struct { opts.conditions = try ESMConditions.init(allocator, Target.DefaultConditions.get(opts.target)); - if (transform.serve orelse false) { - // When we're serving, we need some kind of URL. - if (!opts.origin.isAbsolute()) { - const protocol: string = if (opts.origin.hasHTTPLikeProtocol()) opts.origin.protocol else "http"; - - const had_valid_port = opts.origin.hasValidPort(); - const port: string = if (had_valid_port) opts.origin.port else "3000"; - - opts.origin = URL.parse( - try std.fmt.allocPrint( - allocator, - "{s}://localhost:{s}{s}", - .{ - protocol, - port, - opts.origin.path, - }, - ), - ); - opts.origin.port_was_automatically_set = !had_valid_port; - } - } - switch (opts.target) { .node => { opts.import_path_format = .relative; @@ -1758,162 +1735,9 @@ pub const BundleOptions = struct { opts.external = ExternalModules.init(allocator, &fs.fs, fs.top_level_dir, transform.external, log, opts.target); opts.out_extensions = opts.target.outExtensions(allocator); - if (comptime !bun.Environment.isWindows) { - if (transform.serve orelse false) { - opts.preserve_extensions = true; - opts.append_package_version_in_query_string = true; - if (opts.framework == null) - opts.env.behavior = .load_all; - - opts.source_map = SourceMapOption.fromApi(transform.source_map orelse Api.SourceMapMode.external); - - opts.resolve_mode = .lazy; - - var dir_to_use: string = opts.routes.static_dir; - const static_dir_set = opts.routes.static_dir_enabled or dir_to_use.len == 0; - var disabled_static = false; - - var chosen_dir = dir_to_use; - - if (!static_dir_set) { - chosen_dir = choice: { - if (fs.fs.readDirectory(fs.top_level_dir, null, 0, false)) |dir_| { - const dir: *const Fs.FileSystem.RealFS.EntriesOption = dir_; - switch (dir.*) { - .entries => { - if (dir.entries.getComptimeQuery("public")) |q| { - if (q.entry.kind(&fs.fs, true) == .dir) { - break :choice "public"; - } - } - - if (dir.entries.getComptimeQuery("static")) |q| { - if (q.entry.kind(&fs.fs, true) == .dir) { - break :choice "static"; - } - } - - break :choice "."; - }, - else => { - break :choice ""; - }, - } - } else |_| { - break :choice ""; - } - }; - - if (chosen_dir.len == 0) { - disabled_static = true; - opts.routes.static_dir_enabled = false; - } - } - - if (!disabled_static) { - var _dirs = [_]string{chosen_dir}; - opts.routes.static_dir = try fs.absAlloc(allocator, &_dirs); - const static_dir = std.fs.openIterableDirAbsolute(opts.routes.static_dir, .{}) catch |err| brk: { - switch (err) { - error.FileNotFound => { - opts.routes.static_dir_enabled = false; - }, - error.AccessDenied => { - Output.prettyErrorln( - "error: access denied when trying to open directory for static files: \"{s}\".\nPlease re-open bun with access to this folder or pass a different folder via \"--public-dir\". Note: --public-dir is relative to --cwd (or the process' current working directory).\n\nThe public folder is where static assets such as images, fonts, and .html files go.", - .{opts.routes.static_dir}, - ); - std.process.exit(1); - }, - else => { - Output.prettyErrorln( - "error: \"{s}\" when accessing public folder: \"{s}\"", - .{ @errorName(err), opts.routes.static_dir }, - ); - std.process.exit(1); - }, - } - - break :brk null; - }; - if (static_dir) |handle| { - opts.routes.static_dir_handle = handle.dir; - } - opts.routes.static_dir_enabled = opts.routes.static_dir_handle != null; - } - - const should_try_to_find_a_index_html_file = (opts.framework == null or !opts.framework.?.server.isEnabled()) and - !opts.routes.routes_enabled; - - if (opts.routes.static_dir_enabled and should_try_to_find_a_index_html_file) { - const dir = opts.routes.static_dir_handle.?; - var index_html_file = dir.openFile("index.html", .{ .mode = .read_only }) catch |err| brk: { - switch (err) { - error.FileNotFound => {}, - else => { - Output.prettyErrorln( - "{s} when trying to open {s}/index.html. single page app routing is disabled.", - .{ @errorName(err), opts.routes.static_dir }, - ); - }, - } - - opts.routes.single_page_app_routing = false; - break :brk null; - }; - - if (index_html_file) |index_dot_html| { - opts.routes.single_page_app_routing = true; - opts.routes.single_page_app_fd = index_dot_html.handle; - } - } - - if (!opts.routes.single_page_app_routing and should_try_to_find_a_index_html_file) { - attempt: { - var abs_buf: [bun.MAX_PATH_BYTES]u8 = undefined; - // If it's not in static-dir/index.html, check if it's in top level dir/index.html - var parts = [_]string{"index.html"}; - var full_path = resolve_path.joinAbsStringBuf(fs.top_level_dir, &abs_buf, &parts, .auto); - abs_buf[full_path.len] = 0; - var abs_buf_z: [:0]u8 = abs_buf[0..full_path.len :0]; - - const file = std.fs.openFileAbsoluteZ(abs_buf_z, .{ .mode = .read_only }) catch |err| { - switch (err) { - error.FileNotFound => {}, - else => { - Output.prettyErrorln( - "{s} when trying to open {s}/index.html. single page app routing is disabled.", - .{ @errorName(err), fs.top_level_dir }, - ); - }, - } - break :attempt; - }; - - opts.routes.single_page_app_routing = true; - opts.routes.single_page_app_fd = file.handle; - } - } - - // Windows has weird locking rules for file access. - // so it's a bad idea to keep a file handle open for a long time on Windows. - if (Environment.isWindows and opts.routes.static_dir_handle != null) { - opts.routes.static_dir_handle.?.close(); - } - opts.hot_module_reloading = opts.target.isWebLike(); - - if (transform.disable_hmr orelse false) - opts.hot_module_reloading = false; - - opts.serve = true; - } else { - opts.source_map = SourceMapOption.fromApi(transform.source_map orelse Api.SourceMapMode._none); - } - } else { - opts.source_map = SourceMapOption.fromApi(transform.source_map orelse Api.SourceMapMode._none); - } + opts.source_map = SourceMapOption.fromApi(transform.source_map orelse Api.SourceMapMode._none); - opts.tree_shaking = opts.serve or opts.target.isBun() or opts.production; + opts.tree_shaking = opts.target.isBun() or opts.production; opts.inlining = opts.tree_shaking; if (opts.inlining) opts.minify_syntax = true; diff --git a/src/panic_handler.zig b/src/panic_handler.zig index 761c25c3ab1904..8319c9d99e93c6 100644 --- a/src/panic_handler.zig +++ b/src/panic_handler.zig @@ -11,7 +11,7 @@ const default_allocator = bun.default_allocator; const C = bun.C; const CLI = @import("./cli.zig").Cli; const Features = @import("./analytics/analytics_thread.zig").Features; -const HTTP = @import("root").bun.HTTP.AsyncHTTP; +const HTTP = @import("root").bun.http.AsyncHTTP; const Report = @import("./report.zig"); pub fn NewPanicHandler(comptime panic_func: fn ([]const u8, ?*std.builtin.StackTrace, ?usize) noreturn) type { diff --git a/src/report.zig b/src/report.zig index 421bb52f2147ce..5420bb8ba7ddc5 100644 --- a/src/report.zig +++ b/src/report.zig @@ -13,7 +13,7 @@ const C = bun.C; const CLI = @import("./cli.zig").Cli; const Features = @import("./analytics/analytics_thread.zig").Features; const Platform = @import("./analytics/analytics_thread.zig").GenerateHeader.GeneratePlatform; -const HTTP = @import("root").bun.HTTP.AsyncHTTP; +const HTTP = @import("root").bun.http.AsyncHTTP; const CrashReporter = @import("./crash_reporter.zig"); const Report = @This(); diff --git a/src/resolver/data_url.zig b/src/resolver/data_url.zig index 771f56ffe0f1c3..9435638add0c78 100644 --- a/src/resolver/data_url.zig +++ b/src/resolver/data_url.zig @@ -113,8 +113,8 @@ pub const DataURL = struct { return parsed; } - pub fn decodeMimeType(d: DataURL) bun.HTTP.MimeType { - return bun.HTTP.MimeType.init(d.mime_type, null, null); + pub fn decodeMimeType(d: DataURL) bun.http.MimeType { + return bun.http.MimeType.init(d.mime_type, null, null); } pub fn decodeData(url: DataURL, allocator: std.mem.Allocator) ![]u8 { diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 874f4cb7989445..4896e3460a8c80 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -26,7 +26,6 @@ const BrowserMap = @import("./package_json.zig").BrowserMap; const CacheSet = cache.Set; const DataURL = @import("./data_url.zig").DataURL; pub const DirInfo = @import("./dir_info.zig"); -const HTTPWatcher = if (Environment.isTest or Environment.isWasm) void else @import("../bun_dev_http_server.zig").Watcher; const ResolvePath = @import("./resolve_path.zig"); const NodeFallbackModules = @import("../node_fallbacks.zig"); const Mutex = @import("../lock.zig").Lock; diff --git a/src/standalone_bun.zig b/src/standalone_bun.zig index 0f36045dca176c..759f50f6da6130 100644 --- a/src/standalone_bun.zig +++ b/src/standalone_bun.zig @@ -50,7 +50,7 @@ pub const StandaloneModuleGraph = struct { blob_.* = bun.JSC.WebCore.Blob.initWithStore(store, globalObject); blob_.allocator = bun.default_allocator; - if (bun.HTTP.MimeType.byExtensionNoDefault(bun.strings.trimLeadingChar(std.fs.path.extension(this.name), '.'))) |mime| { + if (bun.http.MimeType.byExtensionNoDefault(bun.strings.trimLeadingChar(std.fs.path.extension(this.name), '.'))) |mime| { store.mime_type = mime; blob_.content_type = mime.value; blob_.content_type_was_set = true; diff --git a/src/url.zig b/src/url.zig index 0cce41a4e6aea1..4704aec87a8935 100644 --- a/src/url.zig +++ b/src/url.zig @@ -1077,13 +1077,13 @@ pub const FormData = struct { if (filename_str.len > 0) { const extension = std.fs.path.extension(filename_str); if (extension.len > 0) { - if (bun.HTTP.MimeType.byExtensionNoDefault(extension[1..extension.len])) |mime| { + if (bun.http.MimeType.byExtensionNoDefault(extension[1..extension.len])) |mime| { break :brk mime.value; } } } - if (bun.HTTP.MimeType.sniff(value_str)) |mime| { + if (bun.http.MimeType.sniff(value_str)) |mime| { break :brk mime.value; } diff --git a/test/js/bun/http/serve.test.ts b/test/js/bun/http/serve.test.ts index db321d9e85bd3b..0592cf80a5c01d 100644 --- a/test/js/bun/http/serve.test.ts +++ b/test/js/bun/http/serve.test.ts @@ -947,6 +947,18 @@ describe("should support Content-Range with Bun.file()", () => { }, }); + const getServerWithSize = runTest.bind(null, { + fetch(req) { + const { searchParams } = new URL(req.url); + const start = Number(searchParams.get("start")); + const end = Number(searchParams.get("end")); + const file = Bun.file(fixture); + return new Response(file.slice(start, end), { + headers: { "Content-Range": "bytes " + start + "-" + end + "/" + file.size }, + }); + }, + }); + const good = [ [0, 1], [1, 2], @@ -973,6 +985,19 @@ describe("should support Content-Range with Bun.file()", () => { }); } + for (const [start, end] of good) { + it(`good range with size: ${start} - ${end}`, async () => { + await getServerWithSize(async server => { + const response = await fetch(`http://${server.hostname}:${server.port}/?start=${start}&end=${end}`, { + verbose: true, + }); + expect(parseInt(response.headers.get("Content-Range")?.split("/")[1])).toEqual(full.byteLength); + expect(await response.arrayBuffer()).toEqual(full.buffer.slice(start, end)); + expect(response.status).toBe(start > 0 || end < full.byteLength ? 206 : 200); + }); + }); + } + const emptyRanges = [ [0, 0], [1, 1], diff --git a/test/js/bun/test/mock-fn.test.js b/test/js/bun/test/mock-fn.test.js index aef4e85ad0b4b8..7844889f88640b 100644 --- a/test/js/bun/test/mock-fn.test.js +++ b/test/js/bun/test/mock-fn.test.js @@ -65,10 +65,13 @@ describe("mock()", () => { expect(fn).toHaveBeenCalledTimes(1); expect(fn.mock.calls).toHaveLength(1); expect(fn.mock.calls[0]).toBeEmpty(); + expect(fn).toHaveBeenLastCalledWith(); expect(fn()).toBe(42); expect(fn).toHaveBeenCalledTimes(2); expect(fn.mock.calls).toHaveLength(2); expect(fn.mock.calls[1]).toBeEmpty(); + expect(fn).toHaveBeenLastCalledWith(); + expect(fn).toHaveBeenCalledWith(); }); test("passes this value", () => { const fn = jest.fn(function hey() { @@ -107,10 +110,13 @@ describe("mock()", () => { expect(fn).toHaveBeenCalledTimes(1); expect(fn.mock.calls).toHaveLength(1); expect(fn.mock.calls[0]).toBeEmpty(); + expect(fn).toHaveBeenLastCalledWith(); expect(Number(fn.call(234))).toBe(234); expect(fn).toHaveBeenCalledTimes(2); expect(fn.mock.calls).toHaveLength(2); expect(fn.mock.calls[1]).toBeEmpty(); + expect(fn).toHaveBeenLastCalledWith(); + expect(fn).toHaveBeenCalledWith(); }); test(".apply works", function () { const fn = jest.fn(function hey() { @@ -121,10 +127,13 @@ describe("mock()", () => { expect(fn).toHaveBeenCalledTimes(1); expect(fn.mock.calls).toHaveLength(1); expect(fn.mock.calls[0]).toBeEmpty(); + expect(fn).toHaveBeenLastCalledWith(); expect(Number(fn.apply(234))).toBe(234); expect(fn).toHaveBeenCalledTimes(2); expect(fn.mock.calls).toHaveLength(2); expect(fn.mock.calls[1]).toBeEmpty(); + expect(fn).toHaveBeenLastCalledWith(); + expect(fn).toHaveBeenCalledWith(); }); test(".bind works", () => { const fn = jest.fn(function hey() { @@ -135,10 +144,13 @@ describe("mock()", () => { expect(fn).toHaveBeenCalledTimes(1); expect(fn.mock.calls).toHaveLength(1); expect(fn.mock.calls[0]).toBeEmpty(); + expect(fn).toHaveBeenLastCalledWith(); expect(Number(fn.bind(234)())).toBe(234); expect(fn).toHaveBeenCalledTimes(2); expect(fn.mock.calls).toHaveLength(2); expect(fn.mock.calls[1]).toBeEmpty(); + expect(fn).toHaveBeenLastCalledWith(); + expect(fn).toHaveBeenCalledWith(); }); test(".name works", () => { const fn = jest.fn(function hey() { @@ -181,6 +193,10 @@ describe("mock()", () => { value: 43, }); expect(fn.mock.calls[0]).toEqual([43]); + expect(fn).toHaveBeenCalled(); + expect(fn).toHaveBeenCalledTimes(1); + expect(fn).toHaveBeenLastCalledWith(43); + expect(fn).toHaveBeenCalledWith(43); }); test("works when throwing", () => { const instance = new Error("foo"); @@ -193,6 +209,8 @@ describe("mock()", () => { value: instance, }); expect(fn.mock.calls[0]).toEqual([43]); + expect(fn).toHaveBeenLastCalledWith(43); + expect(fn).toHaveBeenCalledWith(43); }); test("mockReset works", () => { const instance = new Error("foo"); @@ -205,11 +223,15 @@ describe("mock()", () => { value: instance, }); expect(fn.mock.calls[0]).toEqual([43]); + expect(fn).toHaveBeenLastCalledWith(43); + expect(fn).toHaveBeenCalledWith(43); fn.mockReset(); expect(fn.mock.calls).toBeEmpty(); expect(fn.mock.results).toBeEmpty(); expect(fn.mock.instances).toBeEmpty(); expect(fn).not.toHaveBeenCalled(); + expect(fn).not.toHaveBeenLastCalledWith(43); + expect(fn).not.toHaveBeenCalledWith(43); expect(() => expect(fn).toHaveBeenCalled()).toThrow(); expect(fn(43)).toBe(undefined); expect(fn.mock.results).toEqual([ @@ -219,6 +241,8 @@ describe("mock()", () => { }, ]); expect(fn.mock.calls).toEqual([[43]]); + expect(fn).toHaveBeenLastCalledWith(43); + expect(fn).toHaveBeenCalledWith(43); }); test("mockClear works", () => { const instance = new Error("foo"); @@ -231,17 +255,23 @@ describe("mock()", () => { value: instance, }); expect(fn.mock.calls[0]).toEqual([43]); + expect(fn).toHaveBeenLastCalledWith(43); + expect(fn).toHaveBeenCalledWith(43); fn.mockClear(); expect(fn.mock.calls).toBeEmpty(); expect(fn.mock.results).toBeEmpty(); expect(fn.mock.instances).toBeEmpty(); expect(fn).not.toHaveBeenCalled(); + expect(fn).not.toHaveBeenLastCalledWith(43); + expect(fn).not.toHaveBeenCalledWith(43); expect(() => fn(43)).toThrow("foo"); expect(fn.mock.results[0]).toEqual({ type: "throw", value: instance, }); expect(fn.mock.calls[0]).toEqual([43]); + expect(fn).toHaveBeenLastCalledWith(43); + expect(fn).toHaveBeenCalledWith(43); }); // this is an implementation detail i don't think we *need* to support test("mockClear doesnt update existing object", () => { @@ -255,10 +285,14 @@ describe("mock()", () => { value: instance, }); expect(fn.mock.calls[0]).toEqual([43]); + expect(fn).toHaveBeenLastCalledWith(43); + expect(fn).toHaveBeenCalledWith(43); const stolen = fn.mock; fn.mockClear(); expect(stolen).not.toBe(fn.mock); expect(fn.mock.calls).toBeEmpty(); + expect(fn).not.toHaveBeenLastCalledWith(43); + expect(fn).not.toHaveBeenCalledWith(43); expect(stolen.calls).not.toBeEmpty(); expect(fn.mock.results).toBeEmpty(); expect(stolen.results).not.toBeEmpty(); @@ -271,22 +305,29 @@ describe("mock()", () => { value: instance, }); expect(fn.mock.calls[0]).toEqual([43]); + expect(fn).toHaveBeenLastCalledWith(43); + expect(fn).toHaveBeenCalledWith(43); }); test("multiple calls work", () => { const fn = jest.fn(f => f); expect(fn(43)).toBe(43); + expect(fn).toHaveBeenLastCalledWith(43); expect(fn(44)).toBe(44); + expect(fn).toHaveBeenLastCalledWith(44); expect(fn.mock.calls[0]).toEqual([43]); expect(fn.mock.results[0]).toEqual({ type: "return", value: 43, }); expect(fn.mock.calls[1]).toEqual([44]); + expect(fn).toHaveBeenLastCalledWith(44); expect(fn.mock.results[1]).toEqual({ type: "return", value: 44, }); expect(fn.mock.contexts).toEqual([undefined, undefined]); + expect(fn).toHaveBeenCalledWith(43); + expect(fn).toHaveBeenCalledWith(44); }); test("this arg", () => { const fn = jest.fn(function (add) { @@ -295,6 +336,8 @@ describe("mock()", () => { const obj = { foo: 42, fn }; expect(obj.fn(2)).toBe(44); expect(fn.mock.calls[0]).toEqual([2]); + expect(fn).toHaveBeenLastCalledWith(2); + expect(fn).toHaveBeenCalledWith(2); expect(fn.mock.results[0]).toEqual({ type: "return", value: 44, @@ -320,19 +363,26 @@ describe("mock()", () => { }); const obj = { foo: 42, fn }; expect(obj.fn(2)).toBe(44); + expect(fn).toHaveBeenLastCalledWith(2); const this2 = { foo: 43 }; expect(fn.call(this2, 2)).toBe(45); + expect(fn).toHaveBeenLastCalledWith(2); const this3 = { foo: 44 }; expect(fn.apply(this3, [2])).toBe(46); + expect(fn).toHaveBeenLastCalledWith(2); const this4 = { foo: 45 }; expect(fn.bind(this4)(3)).toBe(48); + expect(fn).toHaveBeenLastCalledWith(3); const this5 = { foo: 45 }; expect(fn.bind(this5, 2)()).toBe(47); + expect(fn).toHaveBeenLastCalledWith(2); expect(fn.mock.calls[0]).toEqual([2]); expect(fn.mock.calls[1]).toEqual([2]); expect(fn.mock.calls[2]).toEqual([2]); expect(fn.mock.calls[3]).toEqual([3]); expect(fn.mock.calls[4]).toEqual([2]); + expect(fn).toHaveBeenCalledWith(2); + expect(fn).toHaveBeenCalledWith(3); expect(fn.mock.results[0]).toEqual({ type: "return", value: 44, @@ -509,6 +559,64 @@ describe("mock()", () => { expect(fn1.mock.invocationCallOrder).toEqual([first, first + 3]); expect(fn2.mock.invocationCallOrder).toEqual([first + 1, first + 2]); }); + + test("toHaveBeenCalledWith, toHaveBeenLastCalledWith works", () => { + const fn = jest.fn(); + expect(() => expect(() => {}).not.toHaveBeenLastCalledWith()).toThrow(); + expect(() => expect(() => {}).not.toHaveBeenNthCalledWith()).toThrow(); + expect(() => expect(() => {}).not.toHaveBeenCalledWith()).toThrow(); + expect(fn).not.toHaveBeenCalled(); + expect(() => expect(fn).toHaveBeenCalledTimes(-1)).toThrow(); + expect(fn).toHaveBeenCalledTimes(0); + expect(fn).not.toHaveBeenCalledWith(); + expect(fn).not.toHaveBeenLastCalledWith(); + expect(() => expect(fn).toHaveBeenNthCalledWith(0)).toThrow(); + expect(() => expect(fn).toHaveBeenNthCalledWith(-1)).toThrow(); + expect(() => expect(fn).toHaveBeenNthCalledWith(1.1)).toThrow(); + expect(fn).not.toHaveBeenNthCalledWith(1); + fn(); + expect(fn).toHaveBeenCalled(); + expect(fn).toHaveBeenCalledTimes(1); + expect(fn).toHaveBeenCalledWith(); + expect(fn).toHaveBeenLastCalledWith(); + expect(fn).toHaveBeenNthCalledWith(1); + expect(fn).not.toHaveBeenNthCalledWith(1, 1); + expect(fn).not.toHaveBeenCalledWith(1); + fn(1); + expect(fn).toHaveBeenCalledWith(1); + expect(fn).toHaveBeenLastCalledWith(1); + expect(fn).toHaveBeenNthCalledWith(1); + expect(fn).toHaveBeenNthCalledWith(2, 1); + fn(1, 2, 3); + expect(fn).not.toHaveBeenCalledWith("123"); + expect(fn).not.toHaveBeenLastCalledWith(1); + expect(fn).not.toHaveBeenLastCalledWith(1, 2); + expect(fn).not.toHaveBeenLastCalledWith("123"); + expect(fn).toHaveBeenLastCalledWith(1, 2, 3); + expect(fn).not.toHaveBeenLastCalledWith(3, 2, 1); + expect(fn).toHaveBeenNthCalledWith(3, 1, 2, 3); + expect(fn).not.toHaveBeenNthCalledWith(4, 3, 2, 1); + fn("random string"); + expect(fn).toHaveBeenCalledWith(); + expect(fn).toHaveBeenNthCalledWith(1); + expect(fn).toHaveBeenCalledWith(1); + expect(fn).toHaveBeenNthCalledWith(2, 1); + expect(fn).toHaveBeenCalledWith(1, 2, 3); + expect(fn).toHaveBeenNthCalledWith(3, 1, 2, 3); + expect(fn).toHaveBeenCalledWith("random string"); + expect(fn).toHaveBeenLastCalledWith("random string"); + expect(fn).toHaveBeenNthCalledWith(4, "random string"); + expect(fn).toHaveBeenCalledWith(expect.stringMatching(/^random \w+$/)); + expect(fn).toHaveBeenLastCalledWith(expect.stringMatching(/^random \w+$/)); + expect(fn).toHaveBeenNthCalledWith(4, expect.stringMatching(/^random \w+$/)); + fn(1, undefined); + expect(fn).toHaveBeenLastCalledWith(1, undefined); + expect(fn).not.toHaveBeenLastCalledWith(1); + expect(fn).toHaveBeenCalledWith(1, undefined); + expect(fn).not.toHaveBeenCalledWith(undefined); + expect(fn).toHaveBeenNthCalledWith(5, 1, undefined); + expect(fn).not.toHaveBeenNthCalledWith(5, 1); + }); }); describe("spyOn", () => { diff --git a/test/js/node/crypto/crypto.key-objects.test.ts b/test/js/node/crypto/crypto.key-objects.test.ts index b124ca4796d92e..0598c25ba6c318 100644 --- a/test/js/node/crypto/crypto.key-objects.test.ts +++ b/test/js/node/crypto/crypto.key-objects.test.ts @@ -1430,7 +1430,6 @@ describe("crypto.KeyObjects", () => { } ); - assertApproximateSize(publicKeyDER, 74); const publicKey = { @@ -1476,122 +1475,105 @@ describe("crypto.KeyObjects", () => { }); }); -test.todo("RSA-PSS should work", async () => { +test("RSA-PSS should work", async () => { // Test RSA-PSS. + const expectedKeyDetails = { + modulusLength: 2048, + publicExponent: 65537n, + }; { - // This key pair does not restrict the message digest algorithm or salt - // length. - // const publicPem = fs.readFileSync(path.join(import.meta.dir, "fixtures", "rsa_pss_public_2048.pem"), "ascii"); - // const privatePem = fs.readFileSync(path.join(import.meta.dir, "fixtures", "rsa_pss_private_2048.pem"), "ascii"); - // const publicKey = createPublicKey(publicPem); - // const privateKey = createPrivateKey(privatePem); - // // Because no RSASSA-PSS-params appears in the PEM, no defaults should be - // // added for the PSS parameters. This is different from an empty - // // RSASSA-PSS-params sequence (see test below). - // const expectedKeyDetails = { - // modulusLength: 2048, - // publicExponent: 65537n, - // }; - // expect(publicKey.type).toBe("public"); - // expect(publicKey.asymmetricKeyType).toBe("rsa-pss"); - // expect(publicKey.asymmetricKeyDetails).toBe(expectedKeyDetails); - // expect(privateKey.type).toBe("private"); - // expect(privateKey.asymmetricKeyType).toBe("rsa-pss"); - // expect(privateKey.asymmetricKeyDetails).toBe(expectedKeyDetails); - // assert.throws( - // () => publicKey.export({ format: 'jwk' }), - // { code: 'ERR_CRYPTO_JWK_UNSUPPORTED_KEY_TYPE' }); - // assert.throws( - // () => privateKey.export({ format: 'jwk' }), - // { code: 'ERR_CRYPTO_JWK_UNSUPPORTED_KEY_TYPE' }); - // for (const key of [privatePem, privateKey]) { - // // Any algorithm should work. - // for (const algo of ['sha1', 'sha256']) { - // // Any salt length should work. - // for (const saltLength of [undefined, 8, 10, 12, 16, 18, 20]) { - // const signature = createSign(algo) - // .update('foo') - // .sign({ key, saltLength }); - // for (const pkey of [key, publicKey, publicPem]) { - // const okay = createVerify(algo) - // .update('foo') - // .verify({ key: pkey, saltLength }, signature); - // assert.ok(okay); - // } - // } - // } - // } - // // Exporting the key using PKCS#1 should not work since this would discard - // // any algorithm restrictions. - // assert.throws(() => { - // publicKey.export({ format: 'pem', type: 'pkcs1' }); - // }, { - // code: 'ERR_CRYPTO_INCOMPATIBLE_KEY_OPTIONS' - // }); - // { - // // This key pair enforces sha1 as the message digest and the MGF1 - // // message digest and a salt length of 20 bytes. - // const publicPem = fixtures.readKey('rsa_pss_public_2048_sha1_sha1_20.pem'); - // const privatePem = - // fixtures.readKey('rsa_pss_private_2048_sha1_sha1_20.pem'); - // const publicKey = createPublicKey(publicPem); - // const privateKey = createPrivateKey(privatePem); - // // Unlike the previous key pair, this key pair contains an RSASSA-PSS-params - // // sequence. However, because all values in the RSASSA-PSS-params are set to - // // their defaults (see RFC 3447), the ASN.1 structure contains an empty - // // sequence. Node.js should add the default values to the key details. - // const expectedKeyDetails = { - // modulusLength: 2048, - // publicExponent: 65537n, - // hashAlgorithm: 'sha1', - // mgf1HashAlgorithm: 'sha1', - // saltLength: 20 - // }; - // assert.strictEqual(publicKey.type, 'public'); - // assert.strictEqual(publicKey.asymmetricKeyType, 'rsa-pss'); - // assert.deepStrictEqual(publicKey.asymmetricKeyDetails, expectedKeyDetails); - // assert.strictEqual(privateKey.type, 'private'); - // assert.strictEqual(privateKey.asymmetricKeyType, 'rsa-pss'); - // assert.deepStrictEqual(privateKey.asymmetricKeyDetails, expectedKeyDetails); - // } - // { - // // This key pair enforces sha256 as the message digest and the MGF1 - // // message digest and a salt length of at least 16 bytes. - // const publicPem = - // fixtures.readKey('rsa_pss_public_2048_sha256_sha256_16.pem'); - // const privatePem = - // fixtures.readKey('rsa_pss_private_2048_sha256_sha256_16.pem'); - // const publicKey = createPublicKey(publicPem); - // const privateKey = createPrivateKey(privatePem); - // assert.strictEqual(publicKey.type, 'public'); - // assert.strictEqual(publicKey.asymmetricKeyType, 'rsa-pss'); - // assert.strictEqual(privateKey.type, 'private'); - // assert.strictEqual(privateKey.asymmetricKeyType, 'rsa-pss'); - // for (const key of [privatePem, privateKey]) { - // // Signing with anything other than sha256 should fail. - // assert.throws(() => { - // createSign('sha1').sign(key); - // }, /digest not allowed/); - // // Signing with salt lengths less than 16 bytes should fail. - // for (const saltLength of [8, 10, 12]) { - // assert.throws(() => { - // createSign('sha1').sign({ key, saltLength }); - // }, /pss saltlen too small/); - // } - // // Signing with sha256 and appropriate salt lengths should work. - // for (const saltLength of [undefined, 16, 18, 20]) { - // const signature = createSign('sha256') - // .update('foo') - // .sign({ key, saltLength }); - // for (const pkey of [key, publicKey, publicPem]) { - // const okay = createVerify('sha256') - // .update('foo') - // .verify({ key: pkey, saltLength }, signature); - // assert.ok(okay); - // } - // } - // } - // } + const { privateKey, publicKey } = generateKeyPairSync("rsa-pss", { + modulusLength: 2048, + publicExponent: 65537, + }); + expect(publicKey.type).toBe("public"); + expect(publicKey.asymmetricKeyType).toBe("rsa-pss"); + expect(publicKey.asymmetricKeyDetails).toEqual(expectedKeyDetails); + expect(privateKey.type).toBe("private"); + expect(privateKey.asymmetricKeyType).toBe("rsa-pss"); + expect(privateKey.asymmetricKeyDetails).toEqual(expectedKeyDetails); + expect(() => publicKey.export({ format: "jwk" })).toThrow(/ERR_CRYPTO_JWK_UNSUPPORTED_KEY_TYPE/); + expect(() => privateKey.export({ format: "jwk" })).toThrow(/ERR_CRYPTO_JWK_UNSUPPORTED_KEY_TYPE/); + + for (const key of [privateKey]) { + // Any algorithm should work. + for (const algo of ["sha1", "sha256"]) { + // Any salt length should work. + for (const saltLength of [undefined, 8, 10, 12, 16, 18, 20]) { + const signature = sign(algo, Buffer.from("foo"), { key, saltLength }); + for (const pkey of [key, publicKey]) { + const okay = verify(algo, Buffer.from("foo"), { key: pkey, saltLength }, signature); + expect(okay).toBeTrue(); + } + } + } + } + // Exporting the key using PKCS#1 should not work since this would discard + // any algorithm restrictions. + expect(() => { + publicKey.export({ format: "pem", type: "pkcs1" }); + }).toThrow(/ERR_CRYPTO_JWK_UNSUPPORTED_KEY_TYPE/); + + { + // Unlike the previous key pair, this key pair contains an RSASSA-PSS-params + // sequence. However, because all values in the RSASSA-PSS-params are set to + // their defaults (see RFC 3447), the ASN.1 structure contains an empty + // sequence. Node.js should add the default values to the key details. + const { privateKey, publicKey } = generateKeyPairSync("rsa-pss", { + modulusLength: 2048, + publicExponent: 65537, + hashAlgorithm: "sha1", + mgf1HashAlgorithm: "sha1", + saltLength: 20, + }); + + expect(publicKey.type).toBe("public"); + expect(publicKey.asymmetricKeyType).toBe("rsa-pss"); + // RSA_get0_pss_params returns NULL. In OpenSSL, this function retries RSA-PSS + // parameters associated with |RSA| objects, but BoringSSL does not support + // the id-RSASSA-PSS key encoding. + // We expect only modulusLength and publicExponent to be present. + expect(publicKey.asymmetricKeyDetails).toEqual(expectedKeyDetails); + expect(privateKey.type).toBe("private"); + expect(privateKey.asymmetricKeyType).toBe("rsa-pss"); + } + { + // This key pair enforces sha256 as the message digest and the MGF1 + // message digest and a salt length of at least 16 bytes. + const { privateKey, publicKey } = generateKeyPairSync("rsa-pss", { + modulusLength: 2048, + publicExponent: 65537, + hashAlgorithm: "sha256", + saltLength: 16, + }); + expect(publicKey.type).toBe("public"); + expect(publicKey.asymmetricKeyType).toBe("rsa-pss"); + expect(privateKey.type).toBe("private"); + expect(privateKey.asymmetricKeyType).toBe("rsa-pss"); + for (const key of [privateKey]) { + // Signing with anything other than sha256 should fail. + expect(() => { + sign("sha1", Buffer.from("foo"), key); + }).toThrow(/digest not allowed/); + // Signing with salt lengths less than 16 bytes should fail. + // We don't enforce this yet because of BoringSSL's limitations. TODO: check this + // for (const saltLength of [8, 10, 12]) { + // expect(() => { + // createSign("sha1").sign({ key, saltLength }); + // }).toThrow(/pss saltlen too small/); + // } + // Signing with sha256 and appropriate salt lengths should work. + for (const saltLength of [undefined, 16, 18, 20]) { + const signature = sign("sha256", Buffer.from("foo"), { key, saltLength }); + for (const pkey of [key, publicKey]) { + const okay = verify("sha256", Buffer.from("foo"), { key: pkey, saltLength }, signature); + expect(okay).toBeTrue(); + } + } + } + } + + // TODO: check how to use MGF1 and saltLength using BoringSSL // { // // This key enforces sha512 as the message digest and sha256 as the MGF1 // // message digest. @@ -1641,3 +1623,72 @@ test.todo("RSA-PSS should work", async () => { // } } }); + +test("Ed25519 should work", async () => { + const { publicKey, privateKey } = generateKeyPairSync("ed25519"); + + expect(publicKey.type).toBe("public"); + expect(publicKey.asymmetricKeyType).toBe("ed25519"); + expect(publicKey.asymmetricKeyDetails).toEqual({ namedCurve: "Ed25519" }); + expect(privateKey.type).toBe("private"); + expect(privateKey.asymmetricKeyType).toBe("ed25519"); + expect(privateKey.asymmetricKeyDetails).toEqual({ namedCurve: "Ed25519" }); + + { + const signature = sign(undefined, Buffer.from("foo"), privateKey); + const okay = verify(undefined, Buffer.from("foo"), publicKey, signature); + expect(okay).toBeTrue(); + } +}); + +test("ECDSA should work", async () => { + const { publicKey, privateKey } = generateKeyPairSync("ec", { namedCurve: "prime256v1" }); + + expect(publicKey.type).toBe("public"); + expect(publicKey.asymmetricKeyType).toBe("ec"); + expect(publicKey.asymmetricKeyDetails).toEqual({ namedCurve: "prime256v1" }); + expect(privateKey.type).toBe("private"); + expect(privateKey.asymmetricKeyType).toBe("ec"); + expect(privateKey.asymmetricKeyDetails).toEqual({ namedCurve: "prime256v1" }); + + // default format (DER) + { + const signature = sign("sha256", Buffer.from("foo"), privateKey); + expect(signature.byteLength).not.toBe(64); + const okay = verify("sha256", Buffer.from("foo"), publicKey, signature); + expect(okay).toBeTrue(); + } + // IeeeP1363 format + { + const signature = sign("sha256", Buffer.from("foo"), { key: privateKey, dsaEncoding: "ieee-p1363" }); + expect(signature.byteLength).toBe(64); + + const okay = verify("sha256", Buffer.from("foo"), { key: publicKey, dsaEncoding: "ieee-p1363" }, signature); + expect(okay).toBeTrue(); + } + // DER format + { + const signature = sign("sha256", Buffer.from("foo"), { key: privateKey, dsaEncoding: "der" }); + expect(signature.byteLength).not.toBe(64); + + const okay = verify("sha256", Buffer.from("foo"), { key: publicKey, dsaEncoding: "der" }, signature); + expect(okay).toBeTrue(); + } + + expect(() => { + //@ts-ignore + sign("sha256", Buffer.from("foo"), { key: privateKey, dsaEncoding: "kjshdakjshd" }); + }).toThrow(/invalid dsaEncoding/); + + expect(() => { + const signature = sign("sha256", Buffer.from("foo"), privateKey); + //@ts-ignore + verify("sha256", Buffer.from("foo"), { key: publicKey, dsaEncoding: "ieee-p136" }, signature); + }).toThrow(/invalid dsaEncoding/); + + expect(() => { + //@ts-ignore + const signature = sign("sha256", Buffer.from("foo"), { key: privateKey, dsaEncoding: "ieee-p136" }); + verify("sha256", Buffer.from("foo"), { key: publicKey, dsaEncoding: "der" }, signature); + }).toThrow(/invalid dsaEncoding/); +}); diff --git a/test/js/node/events/native-event-emitter.test.ts b/test/js/node/events/native-event-emitter.test.ts new file mode 100644 index 00000000000000..b82e484b86f8e3 --- /dev/null +++ b/test/js/node/events/native-event-emitter.test.ts @@ -0,0 +1,17 @@ +import { describe, test, expect } from "bun:test"; +import stream from "stream"; + +describe("Native EventEmitter", () => { + test("newListener fires before the listener is actually added", () => { + const emitter = new stream.Stream(); // stream extends native EventEmitters + let called = false; + emitter.on("newListener", (event: any, listener: any) => { + expect(event).toBe("foo"); + expect(emitter.listeners("foo")).toEqual([]); + expect(emitter.listenerCount("foo")).toEqual(0); + called = true; + }); + emitter.on("foo", () => {}); + expect(called).toBe(true); + }); +}); diff --git a/test/js/node/fs/fs.test.ts b/test/js/node/fs/fs.test.ts index ea0185bdfcd312..268ef8e5983193 100644 --- a/test/js/node/fs/fs.test.ts +++ b/test/js/node/fs/fs.test.ts @@ -1,5 +1,5 @@ import { describe, expect, it } from "bun:test"; -import { dirname } from "node:path"; +import { dirname, resolve, relative } from "node:path"; import { promisify } from "node:util"; import { bunEnv, bunExe, gc } from "harness"; import fs, { @@ -940,7 +940,7 @@ it("realpath async", async () => { err ? reject(err) : resolve(path); }); expect(await promise).toBe(realpathSync(import.meta.path)); -}); +}, 30_000); describe("stat", () => { it("file metadata is correct", () => { @@ -1721,6 +1721,102 @@ describe("fs/promises", () => { expect(files.length).toBeGreaterThan(0); }); + it("readdir(path, {recursive: true}) produces the same result as Node.js", async () => { + const full = resolve(import.meta.dir, "../"); + const [bun, subprocess] = await Promise.all([ + (async function () { + console.time("readdir(path, {recursive: true})"); + const files = await promises.readdir(full, { recursive: true }); + files.sort(); + console.timeEnd("readdir(path, {recursive: true})"); + return files; + })(), + (async function () { + const subprocess = Bun.spawn({ + cmd: [ + "node", + "-e", + `process.stdout.write(JSON.stringify(require("fs").readdirSync(${JSON.stringify( + full, + )}, { recursive: true }).sort()), null, 2)`, + ], + cwd: process.cwd(), + stdout: "pipe", + stderr: "inherit", + stdin: "inherit", + }); + await subprocess.exited; + return subprocess; + })(), + ]); + + expect(subprocess.exitCode).toBe(0); + const text = await new Response(subprocess.stdout).text(); + const node = JSON.parse(text); + expect(bun).toEqual(node as string[]); + }, 100000); + + for (let withFileTypes of [false, true] as const) { + const doIt = async () => { + const maxFD = openSync("/dev/null", "r"); + closeSync(maxFD); + const full = resolve(import.meta.dir, "../"); + + const pending = new Array(100); + for (let i = 0; i < 100; i++) { + pending[i] = promises.readdir(full, { recursive: true, withFileTypes }); + } + + const results = await Promise.all(pending); + for (let i = 0; i < 100; i++) { + results[i].sort(); + } + expect(results[0].length).toBeGreaterThan(0); + for (let i = 1; i < 100; i++) { + expect(results[i]).toEqual(results[0]); + } + + if (!withFileTypes) { + expect(results[0]).toContain(relative(full, import.meta.path)); + } + + const newMaxFD = openSync("/dev/null", "r"); + closeSync(newMaxFD); + expect(maxFD).toBe(newMaxFD); // assert we do not leak file descriptors + }; + + const fail = async () => { + const maxFD = openSync("/dev/null", "r"); + closeSync(maxFD); + + const pending = new Array(100); + for (let i = 0; i < 100; i++) { + pending[i] = promises.readdir("/notfound/i/dont/exist/for/sure/" + i, { recursive: true, withFileTypes }); + } + + const results = await Promise.allSettled(pending); + for (let i = 0; i < 100; i++) { + expect(results[i].status).toBe("rejected"); + expect(results[i].reason!.code).toBe("ENOENT"); + expect(results[i].reason!.path).toBe("/notfound/i/dont/exist/for/sure/" + i); + } + + const newMaxFD = openSync("/dev/null", "r"); + closeSync(newMaxFD); + expect(maxFD).toBe(newMaxFD); // assert we do not leak file descriptors + }; + + if (withFileTypes) { + describe("withFileTypes", () => { + it("readdir(path, {recursive: true} should work x 100", doIt, 10_000); + it("readdir(path, {recursive: true} should fail x 100", fail, 10_000); + }); + } else { + it("readdir(path, {recursive: true} should work x 100", doIt, 10_000); + it("readdir(path, {recursive: true} should fail x 100", fail, 10_000); + } + } + it("readdir() no args doesnt segfault", async () => { const fizz = [ [], @@ -1775,6 +1871,10 @@ describe("fs/promises", () => { expect(await exists(path)).toBe(false); }); }); + + it("opendir should have a path property, issue#4995", async () => { + expect((await fs.promises.opendir(".")).path).toBe("."); + }); }); it("stat on a large file", () => { diff --git a/test/js/third_party/got/got.test.ts b/test/js/third_party/got/got.test.ts new file mode 100644 index 00000000000000..223c6c41125f28 --- /dev/null +++ b/test/js/third_party/got/got.test.ts @@ -0,0 +1,47 @@ +import { test, expect, describe } from "bun:test"; +import got from "got"; +import { Readable } from "stream"; + +describe("got", () => { + test("should work", async () => { + const server = Bun.serve({ + fetch(request, server) { + return new Response("Hello World!"); + }, + }); + + const response = await got(`http://${server.hostname}:${server.port}/`); + expect(response.statusCode).toBe(200); + expect(response.body).toBe("Hello World!"); + expect(response.headers["content-length"]).toBe("12"); + expect(response.url).toBe(`http://${server.hostname}:${server.port}/`); + + server.stop(); + }); + + test("json response", async () => { + const server = Bun.serve({ + async fetch(request, server) { + expect(request.method).toBe("POST"); + const data = await request.json(); + expect(data).toEqual({ hello: "world" }); + + return new Response("Hello World!"); + }, + }); + + const stream = await got.post(`http://${server.hostname}:${server.port}/`, { json: { hello: "world" } }); + expect(stream.body).toBe("Hello World!"); + + server.stop(); + }); + + test("https gzip", async () => { + const stream = await got("https://bun.sh/", { + headers: { + "Accept-Encoding": "gzip", + }, + }); + expect(stream.statusCode).toBe(200); + }); +}); diff --git a/test/js/third_party/got/package.json b/test/js/third_party/got/package.json new file mode 100644 index 00000000000000..e44c5289b6c76d --- /dev/null +++ b/test/js/third_party/got/package.json @@ -0,0 +1,6 @@ +{ + "name": "test-got", + "dependencies": { + "got": "13.0.0" + } +} diff --git a/test/js/web/fetch/fetch.test.ts b/test/js/web/fetch/fetch.test.ts index 9689d924fb0aa2..0fc8c0dda0e404 100644 --- a/test/js/web/fetch/fetch.test.ts +++ b/test/js/web/fetch/fetch.test.ts @@ -347,7 +347,7 @@ describe("Headers", () => { expect(headers.getAll("set-cookie")).toEqual(["foo=bar; Path=/; HttpOnly"]); }); - it("presence of content-encoding header(issue #5668)", async () => { + it("presence of content-encoding header with auto decompress", async () => { startServer({ fetch(req) { const content = gzipSync(JSON.stringify({ message: "Hello world" })); @@ -360,10 +360,36 @@ describe("Headers", () => { }); }, }); + // if decompress: true, the content-encoding header should be removed + // by default decompress is true const result = await fetch(`http://${server.hostname}:${server.port}/`); - const value = result.headers.get("content-encoding"); + expect(result.headers.has("content-encoding")).toBeFalse(); + // body should be decompressed already const body = await result.json(); + expect(body).toBeDefined(); + expect(body.message).toBe("Hello world"); + }); + it("presence of content-encoding header(issue #5668)", async () => { + startServer({ + fetch(req) { + const content = gzipSync(JSON.stringify({ message: "Hello world" })); + return new Response(content, { + status: 200, + headers: { + "content-encoding": "gzip", + "content-type": "application/json", + }, + }); + }, + }); + // if decompress: true, the content-encoding header should be removed + const result = await fetch(`http://${server.hostname}:${server.port}/`, { decompress: false }); + const value = result.headers.get("content-encoding"); expect(value).toBe("gzip"); + // manually decompress and check the body + const decompressed = Bun.gunzipSync(await result.arrayBuffer()); + const decoder = new TextDecoder(); + const body = JSON.parse(decoder.decode(decompressed)); expect(body).toBeDefined(); expect(body.message).toBe("Hello world"); }); diff --git a/test/napi/napi-app/main.cpp b/test/napi/napi-app/main.cpp index 605510a45b4cbd..65a7fb7034ffdb 100644 --- a/test/napi/napi-app/main.cpp +++ b/test/napi/napi-app/main.cpp @@ -49,7 +49,7 @@ napi_value test_napi_get_value_string_utf8_with_buffer(const Napi::CallbackInfo std::cout << "Chars to copy: " << len << std::endl; std::cout << "Copied chars: " << copied << std::endl; std::cout << "Buffer: "; - for (int i = 0; i < BUF_SIZE; i++) { + for (size_t i = 0; i < BUF_SIZE; i++) { std::cout << (int)buf[i] << ", "; } std::cout << std::endl; diff --git a/test/package.json b/test/package.json index e56cfe57389a9d..ed0bf162782a24 100644 --- a/test/package.json +++ b/test/package.json @@ -48,7 +48,8 @@ "webpack-cli": "4.7.2", "yargs": "17.7.2", "@grpc/grpc-js": "1.9.9", - "@grpc/proto-loader": "0.7.10" + "@grpc/proto-loader": "0.7.10", + "got": "13.0.0" }, "private": true, "scripts": { diff --git a/test/transpiler/7324.test.ts b/test/transpiler/7324.test.ts new file mode 100644 index 00000000000000..24d402d2f5daa1 --- /dev/null +++ b/test/transpiler/7324.test.ts @@ -0,0 +1,21 @@ +import { test, expect } from "bun:test"; + +test("override is an accessibility modifier", () => { + class FooParent {} + + class FooChild extends FooParent {} + + class BarParent { + constructor(readonly foo: FooParent) {} + } + + class BarChild extends BarParent { + constructor(override foo: FooChild) { + super(foo); + } + } + + new BarChild(new FooChild()); + + expect().pass(); +});