diff --git a/src/bundler/bundle_v2.zig b/src/bundler/bundle_v2.zig index 94f9966b7726b4..22476a5ba45891 100644 --- a/src/bundler/bundle_v2.zig +++ b/src/bundler/bundle_v2.zig @@ -2580,7 +2580,7 @@ pub const ParseTask = struct { .json => { const trace = tracer(@src(), "ParseJSON"); defer trace.end(); - const root = (try resolver.caches.json.parseJSON(log, source, allocator)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty); + const root = (try resolver.caches.json.parsePackageJSON(log, source, allocator)) orelse Expr.init(E.Object, E.Object{}, Logger.Loc.Empty); return JSAst.init((try js_parser.newLazyExportAST(allocator, bundler.options.define, opts, log, root, &source, "")).?); }, .toml => { diff --git a/src/cache.zig b/src/cache.zig index 43273e9bb5f3f6..4cefe3b9f02d8f 100644 --- a/src/cache.zig +++ b/src/cache.zig @@ -314,6 +314,10 @@ pub const Json = struct { return try parse(cache, log, source, allocator, json_parser.ParseJSON); } + pub fn parsePackageJSON(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr { + return try parse(cache, log, source, allocator, json_parser.ParseTSConfig); + } + pub fn parseTSConfig(cache: *@This(), log: *logger.Log, source: logger.Source, allocator: std.mem.Allocator) anyerror!?js_ast.Expr { return try parse(cache, log, source, allocator, json_parser.ParseTSConfig); } diff --git a/src/cli/bunx_command.zig b/src/cli/bunx_command.zig index 79d5de7c42c420..34558a1b6a2d6a 100644 --- a/src/cli/bunx_command.zig +++ b/src/cli/bunx_command.zig @@ -81,7 +81,7 @@ pub const BunxCommand = struct { bun.JSAst.Expr.Data.Store.create(default_allocator); bun.JSAst.Stmt.Data.Store.create(default_allocator); - const expr = try bun.JSON.ParseJSONUTF8(&source, bundler.log, bundler.allocator); + const expr = try bun.JSON.ParsePackageJSONUTF8(&source, bundler.log, bundler.allocator); // choose the first package that fits if (expr.get("bin")) |bin_expr| { diff --git a/src/cli/filter_arg.zig b/src/cli/filter_arg.zig index 94f70886f45487..4ac81ae07678e4 100644 --- a/src/cli/filter_arg.zig +++ b/src/cli/filter_arg.zig @@ -66,7 +66,7 @@ pub fn getCandidatePackagePatterns(allocator: std.mem.Allocator, log: *bun.logge }; defer allocator.free(json_source.contents); - const json = try json_parser.ParseJSONUTF8(&json_source, log, allocator); + const json = try json_parser.ParsePackageJSONUTF8(&json_source, log, allocator); const prop = json.asProperty("workspaces") orelse continue; diff --git a/src/cli/init_command.zig b/src/cli/init_command.zig index c878e28e784587..5e2baeb9f17beb 100644 --- a/src/cli/init_command.zig +++ b/src/cli/init_command.zig @@ -12,7 +12,7 @@ const std = @import("std"); const open = @import("../open.zig"); const CLI = @import("../cli.zig"); const Fs = @import("../fs.zig"); -const ParseJSON = @import("../json_parser.zig").ParseJSONUTF8; +const ParseJSON = @import("../json_parser.zig").ParsePackageJSONUTF8; const js_parser = bun.js_parser; const js_ast = bun.JSAst; const linker = @import("../linker.zig"); diff --git a/src/install/install.zig b/src/install/install.zig index 26eaf41a924a98..756f77092fd9eb 100644 --- a/src/install/install.zig +++ b/src/install/install.zig @@ -5078,7 +5078,7 @@ pub const PackageManager = struct { data.json_buf, ); initializeStore(); - const json = json_parser.ParseJSONUTF8( + const json = json_parser.ParsePackageJSONUTF8( &package_json_source, manager.log, manager.allocator, @@ -7053,7 +7053,7 @@ pub const PackageManager = struct { const json_path = try bun.getFdPath(json_file.handle, &package_json_cwd_buf); const json_source = logger.Source.initPathString(json_path, json_buf[0..json_len]); initializeStore(); - const json = try json_parser.ParseJSONUTF8(&json_source, ctx.log, ctx.allocator); + const json = try json_parser.ParsePackageJSONUTF8(&json_source, ctx.log, ctx.allocator); if (json.asProperty("workspaces")) |prop| { const json_array = switch (prop.expr.data) { .e_array => |arr| arr, @@ -8347,7 +8347,7 @@ pub const PackageManager = struct { current_package_json_buf[current_package_json_contents_len - 1] == '\n'; initializeStore(); - var current_package_json = json_parser.ParseJSONUTF8(&package_json_source, ctx.log, manager.allocator) catch |err| { + var current_package_json = json_parser.ParsePackageJSONUTF8(&package_json_source, ctx.log, manager.allocator) catch |err| { switch (Output.enable_ansi_colors) { inline else => |enable_ansi_colors| { ctx.log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}; @@ -8498,7 +8498,7 @@ pub const PackageManager = struct { // Now, we _re_ parse our in-memory edited package.json // so we can commit the version we changed from the lockfile - current_package_json = json_parser.ParseJSONUTF8(&source, ctx.log, manager.allocator) catch |err| { + current_package_json = json_parser.ParsePackageJSONUTF8(&source, ctx.log, manager.allocator) catch |err| { Output.prettyErrorln("error: package.json failed to parse due to error {s}", .{@errorName(err)}); Global.exit(1); return; diff --git a/src/install/lockfile.zig b/src/install/lockfile.zig index f487e038920c52..3414db20a76242 100644 --- a/src/install/lockfile.zig +++ b/src/install/lockfile.zig @@ -2675,7 +2675,7 @@ pub const Package = extern struct { }; initializeStore(); - break :brk try json_parser.ParseJSONUTF8( + break :brk try json_parser.ParsePackageJSONUTF8( &json_src, log, allocator, @@ -3489,7 +3489,7 @@ pub const Package = extern struct { comptime features: Features, ) !void { initializeStore(); - const json = json_parser.ParseJSONUTF8AlwaysDecode(&source, log, allocator) catch |err| { + const json = json_parser.ParsePackageJSONUTF8AlwaysDecode(&source, log, allocator) catch |err| { switch (Output.enable_ansi_colors) { inline else => |enable_ansi_colors| { log.printForLogLevelWithEnableAnsiColors(Output.errorWriter(), enable_ansi_colors) catch {}; @@ -3837,13 +3837,13 @@ pub const Package = extern struct { workspace_allocator: std.mem.Allocator, dir: std.fs.Dir, path: []const u8, - path_buf: *[bun.MAX_PATH_BYTES]u8, + path_buf: *bun.PathBuffer, name_to_copy: *[1024]u8, log: *logger.Log, ) !WorkspaceEntry { const path_to_use = if (path.len == 0) "package.json" else brk: { const paths = [_]string{ path, "package.json" }; - break :brk bun.path.joinStringBuf(path_buf, &paths, .posix); + break :brk bun.path.joinStringBuf(path_buf, &paths, .auto); }; // TODO: windows @@ -3869,7 +3869,7 @@ pub const Package = extern struct { .name = name_to_copy[0..workspace_json.found_name.len], .name_loc = workspace_json.name_loc, }; - debug("processWorkspaceName({s}) = {s}", .{ path_to_use, entry.name }); + debug("processWorkspaceName({s}) = {s}", .{ path, entry.name }); if (workspace_json.has_found_version) { entry.version = try allocator.dupe(u8, workspace_json.found_version); } @@ -3896,8 +3896,9 @@ pub const Package = extern struct { var asterisked_workspace_paths = std.ArrayList(string).init(allocator); defer asterisked_workspace_paths.deinit(); - const filepath_buf = allocator.create([bun.MAX_PATH_BYTES]u8) catch unreachable; - defer allocator.destroy(filepath_buf); + const filepath_bufOS = allocator.create(bun.PathBuffer) catch unreachable; + const filepath_buf = std.mem.asBytes(filepath_bufOS); + defer allocator.destroy(filepath_bufOS); for (arr.slice()) |item| { defer fallback.fixed_buffer_allocator.reset(); @@ -3947,12 +3948,12 @@ pub const Package = extern struct { workspace_allocator, std.fs.cwd(), input_path, - filepath_buf, + filepath_bufOS, workspace_name_buf, log, ) catch |err| { switch (err) { - error.FileNotFound => { + error.EISNOTDIR, error.EISDIR, error.EACCESS, error.EPERM, error.ENOENT, error.FileNotFound => { log.addErrorFmt( source, item.loc, @@ -4083,7 +4084,7 @@ pub const Package = extern struct { workspace_allocator, dir_fd.asDir(), "", - filepath_buf, + filepath_bufOS, workspace_name_buf, log, ) catch |err| { diff --git a/src/json_parser.zig b/src/json_parser.zig index 2e9eff0e4c58b1..bbb7111d3bfc0c 100644 --- a/src/json_parser.zig +++ b/src/json_parser.zig @@ -378,6 +378,7 @@ pub const PackageJSONVersionChecker = struct { .is_json = true, .json_warn_duplicate_keys = false, .allow_trailing_commas = true, + .allow_comments = true, }; pub fn init(allocator: std.mem.Allocator, source: *const logger.Source, log: *logger.Log) !Parser { @@ -732,11 +733,12 @@ pub fn ParseJSON( return try parser.parseExpr(false, false); } -/// Parse JSON +/// Parse Package JSON +/// Allow trailing commas & comments. /// This eagerly transcodes UTF-16 strings into UTF-8 strings /// Use this when the text may need to be reprinted to disk as JSON (and not as JavaScript) /// Eagerly converting UTF-8 to UTF-16 can cause a performance issue -pub fn ParseJSONUTF8( +pub fn ParsePackageJSONUTF8( source: *const logger.Source, log: *logger.Log, allocator: std.mem.Allocator, @@ -761,18 +763,24 @@ pub fn ParseJSONUTF8( else => {}, } - var parser = try JSONParser.init(allocator, source.*, log); + var parser = try JSONLikeParser(.{ + .is_json = true, + .always_decode_escape_sequences = false, + .allow_comments = true, + .allow_trailing_commas = true, + }).init(allocator, source.*, log); bun.assert(parser.source().contents.len > 0); return try parser.parseExpr(false, true); } -pub fn ParseJSONUTF8AlwaysDecode( +pub fn ParsePackageJSONUTF8AlwaysDecode( source: *const logger.Source, log: *logger.Log, allocator: std.mem.Allocator, ) !Expr { const len = source.contents.len; + switch (len) { // This is to be consisntent with how disabled JS files are handled 0 => { @@ -794,11 +802,47 @@ pub fn ParseJSONUTF8AlwaysDecode( var parser = try JSONLikeParser(.{ .is_json = true, .always_decode_escape_sequences = true, + .allow_comments = true, + .allow_trailing_commas = true, }).init(allocator, source.*, log); - if (comptime Environment.allow_assert) { - bun.assert(parser.source().contents.len > 0); + bun.assert(parser.source().contents.len > 0); + + return try parser.parseExpr(false, true); +} + +/// Parse Package JSON +/// Allow trailing commas & comments. +/// This eagerly transcodes UTF-16 strings into UTF-8 strings +/// Use this when the text may need to be reprinted to disk as JSON (and not as JavaScript) +/// Eagerly converting UTF-8 to UTF-16 can cause a performance issue +pub fn ParseJSONUTF8( + source: *const logger.Source, + log: *logger.Log, + allocator: std.mem.Allocator, +) !Expr { + const len = source.contents.len; + + switch (len) { + // This is to be consisntent with how disabled JS files are handled + 0 => { + return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }; + }, + // This is a fast pass I guess + 2 => { + if (strings.eqlComptime(source.contents[0..1], "\"\"") or strings.eqlComptime(source.contents[0..1], "''")) { + return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_string_data }; + } else if (strings.eqlComptime(source.contents[0..1], "{}")) { + return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_object_data }; + } else if (strings.eqlComptime(source.contents[0..1], "[]")) { + return Expr{ .loc = logger.Loc{ .start = 0 }, .data = empty_array_data }; + } + }, + else => {}, } + var parser = try JSONParser.init(allocator, source.*, log); + bun.assert(parser.source().contents.len > 0); + return try parser.parseExpr(false, true); } diff --git a/src/resolver/package_json.zig b/src/resolver/package_json.zig index ac13ca954e11a5..b99ef46aeaecc6 100644 --- a/src/resolver/package_json.zig +++ b/src/resolver/package_json.zig @@ -617,7 +617,7 @@ pub const PackageJSON = struct { var json_source = logger.Source.initPathString(key_path.text, entry.contents); json_source.path.pretty = r.prettyPath(json_source.path); - const json: js_ast.Expr = (r.caches.json.parseJSON(r.log, json_source, allocator) catch |err| { + const json: js_ast.Expr = (r.caches.json.parsePackageJSON(r.log, json_source, allocator) catch |err| { if (Environment.isDebug) { Output.printError("{s}: JSON parse error: {s}", .{ package_json_path, @errorName(err) }); } diff --git a/src/resolver/resolve_path.zig b/src/resolver/resolve_path.zig index 19bd7e14785e2c..8fd3cadaf10923 100644 --- a/src/resolver/resolve_path.zig +++ b/src/resolver/resolve_path.zig @@ -1197,11 +1197,18 @@ pub fn joinZBuf(buf: []u8, _parts: anytype, comptime _platform: Platform) [:0]co return buf[start_offset..][0..joined.len :0]; } pub fn joinStringBuf(buf: []u8, parts: anytype, comptime _platform: Platform) []const u8 { + return joinStringBufT(u8, buf, parts, _platform); +} +pub fn joinStringBufW(buf: []u16, parts: anytype, comptime _platform: Platform) []const u16 { + return joinStringBufT(u16, buf, parts, _platform); +} + +pub fn joinStringBufT(comptime T: type, buf: []T, parts: anytype, comptime _platform: Platform) []const T { const platform = comptime _platform.resolve(); var written: usize = 0; - var temp_buf_: [4096]u8 = undefined; - var temp_buf: []u8 = &temp_buf_; + var temp_buf_: [4096]T = undefined; + var temp_buf: []T = &temp_buf_; var free_temp_buf = false; defer { if (free_temp_buf) { @@ -1216,7 +1223,7 @@ pub fn joinStringBuf(buf: []u8, parts: anytype, comptime _platform: Platform) [] } if (count * 2 > temp_buf.len) { - temp_buf = bun.default_allocator.alloc(u8, count * 2) catch @panic("Out of memory"); + temp_buf = bun.default_allocator.alloc(T, count * 2) catch @panic("Out of memory"); free_temp_buf = true; } @@ -1230,8 +1237,14 @@ pub fn joinStringBuf(buf: []u8, parts: anytype, comptime _platform: Platform) [] written += 1; } - bun.copy(u8, temp_buf[written..], part); - written += part.len; + const Element = std.meta.Elem(@TypeOf(part)); + if (comptime T == u16 and Element == u8) { + const wrote = bun.strings.convertUTF8toUTF16InBuffer(temp_buf[written..], part); + written += wrote.len; + } else { + bun.copy(T, temp_buf[written..], part); + written += part.len; + } } if (written == 0) { @@ -1239,7 +1252,7 @@ pub fn joinStringBuf(buf: []u8, parts: anytype, comptime _platform: Platform) [] return buf[0..1]; } - return normalizeStringNode(temp_buf[0..written], buf, platform); + return normalizeStringNodeT(T, temp_buf[0..written], buf, platform); } pub fn joinAbsStringBuf(cwd: []const u8, buf: []u8, _parts: anytype, comptime _platform: Platform) []const u8 { @@ -1617,26 +1630,37 @@ pub fn normalizeStringNode( buf: []u8, comptime platform: Platform, ) []u8 { + return normalizeStringNodeT(u8, str, buf, platform); +} + +pub fn normalizeStringNodeT( + comptime T: type, + str: []const T, + buf: []T, + comptime platform: Platform, +) []const T { if (str.len == 0) { buf[0] = '.'; return buf[0..1]; } - const is_absolute = platform.isAbsolute(str); - const trailing_separator = platform.isSeparator(str[str.len - 1]); + const is_absolute = platform.isAbsoluteT(T, str); + const trailing_separator = platform.isSeparatorT(T, str[str.len - 1]); // `normalizeStringGeneric` handles absolute path cases for windows // we should not prefix with / var buf_ = if (platform == .windows) buf else buf[1..]; - var out = if (!is_absolute) normalizeStringGeneric( + var out = if (!is_absolute) normalizeStringGenericT( + T, str, buf_, true, comptime platform.resolve().separator(), comptime platform.getSeparatorFuncT(), false, - ) else normalizeStringGeneric( + ) else normalizeStringGenericT( + T, str, buf_, false, @@ -1652,7 +1676,8 @@ pub fn normalizeStringNode( } if (trailing_separator) { - buf[0..2].* = platform.trailingSeparator(); + const sep = platform.trailingSeparator(); + buf[0..2].* = .{ sep[0], sep[1] }; return buf[0..2]; } @@ -1661,7 +1686,7 @@ pub fn normalizeStringNode( } if (trailing_separator) { - if (!platform.isSeparator(out[out.len - 1])) { + if (!platform.isSeparatorT(T, out[out.len - 1])) { buf_[out.len] = platform.separator(); out = buf_[0 .. out.len + 1]; } diff --git a/src/resolver/resolver.zig b/src/resolver/resolver.zig index 1d87d1e2026c38..34dec389f1c38c 100644 --- a/src/resolver/resolver.zig +++ b/src/resolver/resolver.zig @@ -731,7 +731,7 @@ pub const Resolver = struct { // support passing a package.json or path to a package const pkg: *const PackageJSON = result.package_json orelse r.packageJSONForResolvedNodeModuleWithIgnoreMissingName(&result, true) orelse return error.MissingPackageJSON; - const json = (try r.caches.json.parseJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError; + const json = (try r.caches.json.parsePackageJSON(r.log, pkg.source, r.allocator)) orelse return error.JSONParseError; pkg.loadFrameworkWithPreference(pair, json, r.allocator, load_defines, preference); const dir = pkg.source.path.sourceDir(); diff --git a/src/sys.zig b/src/sys.zig index 457e309aae579e..7de54be790b249 100644 --- a/src/sys.zig +++ b/src/sys.zig @@ -990,9 +990,23 @@ pub noinline fn openFileAtWindowsA( } pub fn openatWindowsT(comptime T: type, dir: bun.FileDescriptor, path: []const T, flags: bun.Mode) Maybe(bun.FileDescriptor) { + return openatWindowsTMaybeNormalize(T, dir, path, flags, true); +} + +fn openatWindowsTMaybeNormalize(comptime T: type, dir: bun.FileDescriptor, path: []const T, flags: bun.Mode, comptime normalize: bool) Maybe(bun.FileDescriptor) { if (flags & O.DIRECTORY != 0) { + const windows_options: WindowsOpenDirOptions = .{ .iterable = flags & O.PATH == 0, .no_follow = flags & O.NOFOLLOW != 0, .can_rename_or_delete = false }; + if (comptime !normalize and T == u16) { + return openDirAtWindowsNtPath(dir, path, windows_options); + } + // we interpret O_PATH as meaning that we don't want iteration - return openDirAtWindowsT(T, dir, path, .{ .iterable = flags & O.PATH == 0, .no_follow = flags & O.NOFOLLOW != 0, .can_rename_or_delete = false }); + return openDirAtWindowsT( + T, + dir, + path, + windows_options, + ); } const nonblock = flags & O.NONBLOCK != 0; @@ -1029,6 +1043,10 @@ pub fn openatWindowsT(comptime T: type, dir: bun.FileDescriptor, path: []const T const options: windows.ULONG = if (follow_symlinks) file_or_dir_flag | blocking_flag else file_or_dir_flag | windows.FILE_OPEN_REPARSE_POINT; + if (comptime !normalize and T == u16) { + return openFileAtWindowsNtPath(dir, path, access_mask, creation, options); + } + return openFileAtWindowsT(T, dir, path, access_mask, creation, options); } @@ -2769,8 +2787,22 @@ pub const File = struct { /// 1. Open a file for reading /// 2. Read the file to a buffer /// 3. Return the File handle and the buffer - pub fn readFileFrom(dir_fd: anytype, path: [:0]const u8, allocator: std.mem.Allocator) Maybe(struct { File, []u8 }) { - const this = switch (bun.sys.openat(from(dir_fd).handle, path, O.RDONLY, 0)) { + pub fn readFileFrom(dir_fd: anytype, path: anytype, allocator: std.mem.Allocator) Maybe(struct { File, []u8 }) { + const ElementType = std.meta.Elem(@TypeOf(path)); + + const rc = brk: { + if (comptime Environment.isWindows and ElementType == u16) { + break :brk openatWindowsTMaybeNormalize(u16, from(dir_fd).handle, path, O.RDONLY, false); + } + + if (comptime ElementType == u8 and std.meta.sentinel(@TypeOf(path)) == null) { + break :brk Syscall.openatA(from(dir_fd).handle, path, O.RDONLY, 0); + } + + break :brk Syscall.openat(from(dir_fd).handle, path, O.RDONLY, 0); + }; + + const this = switch (rc) { .err => |err| return .{ .err = err }, .result => |fd| from(fd), }; @@ -2790,7 +2822,7 @@ pub const File = struct { /// 2. Read the file to a buffer /// 3. Close the file /// 4. Return the buffer - pub fn readFrom(dir_fd: anytype, path: [:0]const u8, allocator: std.mem.Allocator) Maybe([]u8) { + pub fn readFrom(dir_fd: anytype, path: anytype, allocator: std.mem.Allocator) Maybe([]u8) { const file, const bytes = switch (readFileFrom(dir_fd, path, allocator)) { .err => |err| return .{ .err = err }, .result => |result| result, @@ -2800,21 +2832,16 @@ pub const File = struct { return .{ .result = bytes }; } - pub fn toSource(path: anytype, allocator: std.mem.Allocator) Maybe(bun.logger.Source) { - if (std.meta.sentinel(@TypeOf(path)) == null) { - return toSource( - &(std.os.toPosixPath(path) catch return .{ - .err = Error.oom, - }), - allocator, - ); - } - - return switch (readFrom(std.fs.cwd(), path, allocator)) { + pub fn toSourceAt(dir_fd: anytype, path: anytype, allocator: std.mem.Allocator) Maybe(bun.logger.Source) { + return switch (readFrom(dir_fd, path, allocator)) { .err => |err| .{ .err = err }, .result => |bytes| .{ .result = bun.logger.Source.initPathString(path, bytes) }, }; } + + pub fn toSource(path: anytype, allocator: std.mem.Allocator) Maybe(bun.logger.Source) { + return toSourceAt(std.fs.cwd(), path, allocator); + } }; pub inline fn toLibUVOwnedFD( diff --git a/test/bundler/esbuild/packagejson.test.ts b/test/bundler/esbuild/packagejson.test.ts index fcb2c10eccb2a2..0b065f697b7b57 100644 --- a/test/bundler/esbuild/packagejson.test.ts +++ b/test/bundler/esbuild/packagejson.test.ts @@ -28,7 +28,7 @@ describe("bundler", () => { stdout: "123", }, }); - itBundled("packagejson/BadMain", { + itBundled("packagejson/trailing-comma", { files: { "/Users/user/project/src/entry.js": /* js */ ` import fn from 'demo-pkg' @@ -36,10 +36,13 @@ describe("bundler", () => { `, "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` { - "main": "./does-not-exist.js" + // very comment!! + /** even multi-line comment!! */ + /** such feature much compatible very ecosystem */ + "main": "./custom-main.js", } `, - "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` + "/Users/user/project/node_modules/demo-pkg/custom-main.js": /* js */ ` module.exports = function() { return 123 } @@ -49,30 +52,7 @@ describe("bundler", () => { stdout: "123", }, }); - itBundled("packagejson/SyntaxErrorComment", { - todo: true, - files: { - "/Users/user/project/src/entry.js": /* js */ ` - import fn from 'demo-pkg' - console.log(fn()) - `, - "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` - { - // Single-line comment - "a": 1 - } - `, - "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` - module.exports = function() { - return 123 - } - `, - }, - bundleErrors: { - "/Users/user/project/node_modules/demo-pkg/package.json": ["JSON does not support comments"], - }, - }); - itBundled("packagejson/SyntaxErrorTrailingComma", { + itBundled("packagejson/BadMain", { files: { "/Users/user/project/src/entry.js": /* js */ ` import fn from 'demo-pkg' @@ -80,8 +60,7 @@ describe("bundler", () => { `, "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` { - "a": 1, - "b": 2, + "main": "./does-not-exist.js" } `, "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` @@ -90,10 +69,55 @@ describe("bundler", () => { } `, }, - bundleErrors: { - "/Users/user/project/node_modules/demo-pkg/package.json": ["JSON does not support trailing commas"], + run: { + stdout: "123", }, }); + // itBundled("packagejson/SyntaxErrorComment", { + // todo: true, + // files: { + // "/Users/user/project/src/entry.js": /* js */ ` + // import fn from 'demo-pkg' + // console.log(fn()) + // `, + // "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` + // { + // // Single-line comment + // "a": 1 + // } + // `, + // "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` + // module.exports = function() { + // return 123 + // } + // `, + // }, + // bundleErrors: { + // "/Users/user/project/node_modules/demo-pkg/package.json": ["JSON does not support comments"], + // }, + // }); + // itBundled("packagejson/SyntaxErrorTrailingComma", { + // files: { + // "/Users/user/project/src/entry.js": /* js */ ` + // import fn from 'demo-pkg' + // console.log(fn()) + // `, + // "/Users/user/project/node_modules/demo-pkg/package.json": /* json */ ` + // { + // "a": 1, + // "b": 2, + // } + // `, + // "/Users/user/project/node_modules/demo-pkg/index.js": /* js */ ` + // module.exports = function() { + // return 123 + // } + // `, + // }, + // bundleErrors: { + // "/Users/user/project/node_modules/demo-pkg/package.json": ["JSON does not support trailing commas"], + // }, + // }); itBundled("packagejson/Module", { // GENERATED files: { diff --git a/test/cli/install/bun-install.test.ts b/test/cli/install/bun-install.test.ts index 7e8c1c1e8b6c56..4eab91b7c7818b 100644 --- a/test/cli/install/bun-install.test.ts +++ b/test/cli/install/bun-install.test.ts @@ -44,6 +44,47 @@ afterAll(dummyAfterAll); beforeEach(dummyBeforeEach); afterEach(dummyAfterEach); +it("should not error when package.json has comments and trailing commas", async () => { + const urls: string[] = []; + setHandler(dummyRegistry(urls)); + await writeFile( + join(package_dir, "package.json"), + ` + { + // such comment! + "name": "foo", + /** even multi-line comment!! */ + "version": "0.0.1", + "dependencies": { + "bar": "^1", + }, + } +`, + ); + const { stdout, stderr, exited } = spawn({ + cmd: [bunExe(), "install"], + cwd: package_dir, + stdout: "pipe", + stdin: "pipe", + stderr: "pipe", + env, + }); + expect(stderr).toBeDefined(); + const err = await new Response(stderr).text(); + expect(err).toContain('error: No version matching "^1" found for specifier "bar" (but package exists)'); + expect(stdout).toBeDefined(); + expect(await new Response(stdout).text()).toBeEmpty(); + expect(await exited).toBe(1); + expect(urls.sort()).toEqual([`${root_url}/bar`]); + expect(requested).toBe(1); + try { + await access(join(package_dir, "bun.lockb")); + expect(() => {}).toThrow(); + } catch (err: any) { + expect(err.code).toBe("ENOENT"); + } +}); + describe("chooses", () => { async function runTest(latest: string, range: string, chosen = "0.0.5") { const exeName: string = { diff --git a/test/cli/install/bun-run.test.ts b/test/cli/install/bun-run.test.ts index efbfbe0be7f23c..9b8c8cb08f02e2 100644 --- a/test/cli/install/bun-run.test.ts +++ b/test/cli/install/bun-run.test.ts @@ -17,15 +17,19 @@ beforeEach(async () => { for (let withRun of [false, true]) { describe(withRun ? "bun run" : "bun", () => { describe("should work with .", () => { - it("respecting 'main' field", async () => { + it("respecting 'main' field and allowing trailing commas/comments in package.json", async () => { await writeFile(join(run_dir, "test.js"), "console.log('Hello, world!');"); await writeFile( join(run_dir, "package.json"), - JSON.stringify({ - name: "test", - version: "0.0.0", - main: "test.js", - }), + `{ + // single-line comment + "name": "test", + /** even multi-line comment!! + * such feature much compatible very ecosystem + */ + "version": "0.0.0", + "main": "test.js", + }`, ); const { stdout, stderr, exitCode } = spawnSync({ cmd: [bunExe(), withRun ? "run" : "", "."].filter(Boolean),