Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

install: fix issues with patching hoisted dependencies in workspaces #12141

Merged
merged 6 commits into from
Jul 2, 2024
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
230 changes: 160 additions & 70 deletions src/install/install.zig
Original file line number Diff line number Diff line change
Expand Up @@ -10152,6 +10152,16 @@ pub const PackageManager = struct {
}
}

fn nodeModulesFolderForDependencyIDs(iterator: *Lockfile.Tree.Iterator, ids: []const IdPair) !?Lockfile.Tree.NodeModulesFolder {
while (iterator.nextNodeModulesFolder(null)) |node_modules| {
for (ids) |id| {
_ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, id[0]) orelse continue;
return node_modules;
}
}
return null;
}

fn nodeModulesFolderForDependencyID(iterator: *Lockfile.Tree.Iterator, dependency_id: DependencyID) !?Lockfile.Tree.NodeModulesFolder {
while (iterator.nextNodeModulesFolder(null)) |node_modules| {
_ = std.mem.indexOfScalar(DependencyID, node_modules.dependencies, dependency_id) orelse continue;
Expand All @@ -10161,65 +10171,154 @@ pub const PackageManager = struct {
return null;
}

fn pkgDepIdForNameAndVersion(
const IdPair = struct { DependencyID, PackageID };

fn pkgInfoForNameAndVersion(
lockfile: *Lockfile,
iterator: *Lockfile.Tree.Iterator,
pkg_maybe_version_to_patch: []const u8,
name: []const u8,
version: ?[]const u8,
) struct { PackageID, DependencyID } {
) struct { PackageID, Lockfile.Tree.NodeModulesFolder } {
var sfb = std.heap.stackFallback(@sizeOf(IdPair) * 4, lockfile.allocator);
var pairs = std.ArrayList(IdPair).initCapacity(sfb.get(), 8) catch bun.outOfMemory();
defer pairs.deinit();

const name_hash = String.Builder.stringHash(name);

const strbuf = lockfile.buffers.string_bytes.items;

const dependency_id: DependencyID, const pkg_id: PackageID = brk: {
var buf: [1024]u8 = undefined;
const dependencies = lockfile.buffers.dependencies.items;

var matches_found: u32 = 0;
var maybe_first_match: ?struct { DependencyID, PackageID } = null;
for (dependencies, 0..) |dep, dep_id| {
if (dep.name_hash != name_hash) continue;
matches_found += 1;
const pkg_id = lockfile.buffers.resolutions.items[dep_id];
if (pkg_id == invalid_package_id) continue;
const pkg = lockfile.packages.get(pkg_id);
if (version) |v| {
const label = std.fmt.bufPrint(buf[0..], "{}", .{pkg.resolution.fmt(strbuf, .posix)}) catch @panic("Resolution name too long");
if (std.mem.eql(u8, label, v)) break :brk .{ @intCast(dep_id), pkg_id };
} else maybe_first_match = .{ @intCast(dep_id), pkg_id };
var buf: [1024]u8 = undefined;
const dependencies = lockfile.buffers.dependencies.items;

for (dependencies, 0..) |dep, dep_id| {
if (dep.name_hash != name_hash) continue;
const pkg_id = lockfile.buffers.resolutions.items[dep_id];
if (pkg_id == invalid_package_id) continue;
const pkg = lockfile.packages.get(pkg_id);
if (version) |v| {
const label = std.fmt.bufPrint(buf[0..], "{}", .{pkg.resolution.fmt(strbuf, .posix)}) catch @panic("Resolution name too long");
if (std.mem.eql(u8, label, v)) {
pairs.append(.{ @intCast(dep_id), pkg_id }) catch bun.outOfMemory();
}
} else {
pairs.append(.{ @intCast(dep_id), pkg_id }) catch bun.outOfMemory();
}
}

if (pairs.items.len == 0) {
Output.prettyErrorln("\n<r><red>error<r>: package <b>{s}<r> not found<r>", .{pkg_maybe_version_to_patch});
Global.crash();
return;
}

const first_match = maybe_first_match orelse {
Output.prettyErrorln("\n<r><red>error<r>: package <b>{s}<r> not found<r>", .{pkg_maybe_version_to_patch});
// user supplied a version e.g. `is-even@1.0.0`
if (version != null) {
if (pairs.items.len == 1) {
const dep_id, const pkg_id = pairs.items[0];
const folder = (try nodeModulesFolderForDependencyID(iterator, dep_id)) orelse {
Output.prettyError(
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
.{pkg_maybe_version_to_patch},
);
Global.crash();
};
return .{
pkg_id,
folder,
};
}

// we found multiple dependents of the supplied pkg + version
// the final package in the node_modules might be hoisted
// so we are going to try looking for each dep id in node_modules
_, const pkg_id = pairs.items[0];
const folder = (try nodeModulesFolderForDependencyIDs(iterator, pairs.items)) orelse {
Output.prettyError(
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
.{pkg_maybe_version_to_patch},
);
Global.crash();
return;
};

if (matches_found > 1) {
Output.prettyErrorln(
"\n<r><red>error<r>: Found multiple versions of <b>{s}<r>, please specify a precise version from the following list:<r>\n",
.{name},
return .{
pkg_id,
folder,
};
}

// Otherwise the user did not supply a version, just the pkg name

// Only one match, let's use it
if (pairs.items.len == 1) {
const dep_id, const pkg_id = pairs.items[0];
const folder = (try nodeModulesFolderForDependencyID(iterator, dep_id)) orelse {
Output.prettyError(
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
.{pkg_maybe_version_to_patch},
);
var i: usize = 0;
const pkg_hashes = lockfile.packages.items(.name_hash);
while (i < lockfile.packages.len) {
if (std.mem.indexOfScalar(u64, pkg_hashes[i..], name_hash)) |idx| {
defer i += idx + 1;
const pkg_id = i + idx;
const pkg = lockfile.packages.get(pkg_id);
if (!std.mem.eql(u8, pkg.name.slice(strbuf), name)) continue;

Output.prettyError(" {s}@<blue>{}<r>\n", .{ pkg.name.slice(strbuf), pkg.resolution.fmt(strbuf, .posix) });
} else break;
}
Global.crash();
return;
}
};
return .{
pkg_id,
folder,
};
}

// Otherwise we have multiple matches
//
// There are two cases:
// a) the multiple matches are all the same underlying package (this happens because there could be multiple dependents of the same package)
// b) the matches are actually different packages, we'll prompt the user to select which one

break :brk .{ first_match[0], first_match[1] };
_, const pkg_id = pairs.items[0];
const count = count: {
var count: u32 = 0;
for (pairs.items) |pair| {
if (pair[1] == pkg_id) count += 1;
}
break :count count;
};

return .{ pkg_id, dependency_id };
// Disambiguate case a) from b)
if (count == pairs.items.len) {
// It may be hoisted, so we'll try the first one that matches
const folder = (try nodeModulesFolderForDependencyIDs(iterator, pairs.items)) orelse {
Output.prettyError(
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
.{pkg_maybe_version_to_patch},
);
Global.crash();
};
return .{
pkg_id,
folder,
};
}

Output.prettyErrorln(
"\n<r><red>error<r>: Found multiple versions of <b>{s}<r>, please specify a precise version from the following list:<r>\n",
.{name},
);
var i: usize = 0;
while (i < pairs.items.len) : (i += 1) {
_, const pkgid = pairs.items[i];
if (pkgid == invalid_package_id)
continue;

const pkg = lockfile.packages.get(pkgid);

Output.prettyError(" {s}@<blue>{}<r>\n", .{ pkg.name.slice(strbuf), pkg.resolution.fmt(strbuf, .posix) });

if (i + 1 < pairs.items.len) {
for (pairs.items[i + 1 ..]) |*p| {
if (p[1] == pkgid) {
p[1] = invalid_package_id;
}
}
}
}
Global.crash();
}

const PatchArgKind = enum {
Expand Down Expand Up @@ -10375,17 +10474,7 @@ pub const PackageManager = struct {
.name_and_version => brk: {
const pkg_maybe_version_to_patch = argument;
const name, const version = Dependency.splitNameAndVersion(pkg_maybe_version_to_patch);
const result = pkgDepIdForNameAndVersion(manager.lockfile, pkg_maybe_version_to_patch, name, version);
const pkg_id = result[0];
const dependency_id = result[1];

const folder = (try nodeModulesFolderForDependencyID(&iterator, dependency_id)) orelse {
Output.prettyError(
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
.{pkg_maybe_version_to_patch},
);
Global.crash();
};
const pkg_id, const folder = pkgInfoForNameAndVersion(manager.lockfile, &iterator, pkg_maybe_version_to_patch, name, version);

const pkg = manager.lockfile.packages.get(pkg_id);
const pkg_name = pkg.name.slice(strbuf);
Expand Down Expand Up @@ -10471,6 +10560,7 @@ pub const PackageManager = struct {
out_dir: if (bun.Environment.isWindows) []const u16 else void,
buf1: if (bun.Environment.isWindows) []u16 else void,
buf2: if (bun.Environment.isWindows) []u16 else void,
tmpdir_in_node_modules: if (bun.Environment.isWindows) std.fs.Dir else void,
) !u32 {
var real_file_count: u32 = 0;

Expand All @@ -10485,10 +10575,10 @@ pub const PackageManager = struct {
const openFile = std.fs.Dir.openFile;
const createFile = std.fs.Dir.createFile;

// - rename node_modules/$PKG/$FILE -> node_modules/$PKG/$TMPNAME
// - create node_modules/$PKG/$FILE
// - copy: cache/$PKG/$FILE -> node_modules/$PKG/$FILE
// - unlink: $TMPDIR/$FILE
// 1. rename original file in node_modules to tmp_dir_in_node_modules
// 2. create the file again
// 3. copy cache flie to the newly re-created file
// 4. profit
if (comptime bun.Environment.isWindows) {
var tmpbuf: [1024]u8 = undefined;
const basename = bun.strings.fromWPath(pathbuf2[0..], entry.basename);
Expand All @@ -10504,7 +10594,7 @@ pub const PackageManager = struct {
if (bun.sys.renameatConcurrently(
bun.toFD(destination_dir_.fd),
entrypathZ,
bun.toFD(destination_dir_.fd),
bun.toFD(tmpdir_in_node_modules.fd),
tmpname,
.{ .move_fallback = true },
).asErr()) |e| {
Expand Down Expand Up @@ -10584,13 +10674,23 @@ pub const PackageManager = struct {
Global.crash();
}
out_dir = buf2[0..outlen];
var tmpbuf: [1024]u8 = undefined;
const tmpname = bun.span(bun.fs.FileSystem.instance.tmpname("tffbp", tmpbuf[0..], bun.fastRandom()) catch |e| {
Output.prettyError("<r><red>error<r>: copying file {s}", .{@errorName(e)});
Global.crash();
});
const temp_folder_in_node_modules = try node_modules_folder.makeOpenPath(tmpname, .{});
defer {
node_modules_folder.deleteTree(tmpname) catch {};
}
_ = try FileCopier.copy(
node_modules_folder,
&walker,
in_dir,
out_dir,
&buf1,
&buf2,
temp_folder_in_node_modules,
);
} else if (Environment.isPosix) {
_ = try FileCopier.copy(
Expand All @@ -10600,6 +10700,7 @@ pub const PackageManager = struct {
{},
{},
{},
{},
);
}
}
Expand Down Expand Up @@ -10772,19 +10873,8 @@ pub const PackageManager = struct {
},
.name_and_version => brk: {
const name, const version = Dependency.splitNameAndVersion(argument);
const result = pkgDepIdForNameAndVersion(lockfile, argument, name, version);
const pkg_id: PackageID = result[0];
const dependency_id: DependencyID = result[1];
const node_modules = (try nodeModulesFolderForDependencyID(
&iterator,
dependency_id,
)) orelse {
Output.prettyError(
"<r><red>error<r>: could not find the folder for <b>{s}<r> in node_modules<r>\n<r>",
.{argument},
);
Global.crash();
};
const pkg_id, const node_modules = pkgInfoForNameAndVersion(lockfile, &iterator, argument, name, version);

const changes_dir = bun.path.joinZBuf(pathbuf[0..], &[_][]const u8{
node_modules.relative_path,
name,
Expand Down
4 changes: 2 additions & 2 deletions src/install/patch_install.zig
Original file line number Diff line number Diff line change
Expand Up @@ -367,7 +367,7 @@ pub const PatchTask = struct {
)) {
.result => |fd| fd,
.err => |e| {
return try log.addErrorFmtNoLoc(this.manager.allocator, "{}", .{e});
return try log.addErrorFmtNoLoc(this.manager.allocator, "failed adding bun tag: {}", .{e.withPath(buntagbuf[0 .. bun_tag_prefix.len + hashlen :0])});
},
};
_ = bun.sys.close(buntagfd);
Expand All @@ -387,7 +387,7 @@ pub const PatchTask = struct {
bun.toFD(this.callback.apply.cache_dir.fd),
this.callback.apply.cache_dir_subpath,
.{ .move_fallback = true },
).asErr()) |e| return try log.addErrorFmtNoLoc(this.manager.allocator, "{}", .{e});
).asErr()) |e| return try log.addErrorFmtNoLoc(this.manager.allocator, "renaming changes to cache dir: {}", .{e.withPath(this.callback.apply.cache_dir_subpath)});
}

pub fn calcHash(this: *PatchTask) ?u64 {
Expand Down
35 changes: 17 additions & 18 deletions src/sys.zig
Original file line number Diff line number Diff line change
Expand Up @@ -1831,10 +1831,6 @@ pub fn renameatConcurrentlyWithoutFallback(
to: [:0]const u8,
) Maybe(void) {
var did_atomically_replace = false;
if (comptime Environment.isWindows) {
// Windows doesn't have an equivalent
return renameat(from_dir_fd, from, to_dir_fd, to);
}

attempt_atomic_rename_and_fallback_to_racy_delete: {
{
Expand All @@ -1847,26 +1843,29 @@ pub fn renameatConcurrentlyWithoutFallback(
.result => break :attempt_atomic_rename_and_fallback_to_racy_delete,
};

// Fallback path: the folder exists in the cache dir, it might be in a strange state
// let's attempt to atomically replace it with the temporary folder's version
if (switch (err.getErrno()) {
.EXIST, .NOTEMPTY, .OPNOTSUPP => true,
else => false,
}) {
did_atomically_replace = true;
switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{
.exchange = true,
})) {
.err => {},
.result => break :attempt_atomic_rename_and_fallback_to_racy_delete,
// Windows doesn't have any equivalent with renameat with swap
if (!bun.Environment.isWindows) {
// Fallback path: the folder exists in the cache dir, it might be in a strange state
// let's attempt to atomically replace it with the temporary folder's version
if (switch (err.getErrno()) {
.EXIST, .NOTEMPTY, .OPNOTSUPP => true,
else => false,
}) {
did_atomically_replace = true;
switch (bun.sys.renameat2(from_dir_fd, from, to_dir_fd, to, .{
.exchange = true,
})) {
.err => {},
.result => break :attempt_atomic_rename_and_fallback_to_racy_delete,
}
did_atomically_replace = false;
}
did_atomically_replace = false;
}
}

// sad path: let's try to delete the folder and then rename it
var to_dir = to_dir_fd.asDir();
to_dir.deleteTree(from) catch {};
to_dir.deleteTree(to) catch {};
switch (bun.sys.renameat(from_dir_fd, from, to_dir_fd, to)) {
.err => |err| {
return .{ .err = err };
Expand Down
Loading
Loading