Skip to content

Commit

Permalink
Updated project for Zig v0.12
Browse files Browse the repository at this point in the history
  • Loading branch information
Aandreba committed Apr 24, 2024
1 parent 3d15c44 commit b5073e0
Show file tree
Hide file tree
Showing 5 changed files with 102 additions and 36 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/static.yml
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ jobs:
- name: Setup Zig
uses: goto-bus-stop/setup-zig@v2
with:
version: 0.11.0
version: 0.12.0
- name: Generate docs
run: zig build
- name: Upload artifact
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,5 +14,5 @@ jobs:
- uses: actions/checkout@v3
- uses: goto-bus-stop/setup-zig@v2
with:
version: 0.11.0
version: 0.12.0
- run: zig test src/tests.zig
13 changes: 13 additions & 0 deletions Justfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
coverage: clean
zig build test
kcov --include-pattern=src/main.zig,src/tests.zig kcov-out zig-cache/o/**/test

docs:
zig build

test:
zig test src/tests.zig

clean:
rm -rf zig-cache
rm -rf zig-out
63 changes: 62 additions & 1 deletion build.zig
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,10 @@ const Os = Target.Os.Tag;
pub fn build(b: *Builder) void {
if (comptime builtin.zig_version.minor <= 10) {
build_v10(b);
} else {
} else if (comptime builtin.zig_version.minor <= 11) {
build_v11(b);
} else {
build_v12(b);
}
}

Expand Down Expand Up @@ -112,6 +114,65 @@ fn build_v11(b: *std.Build) void {
example_step.dependOn(&run_example.step);
}

fn build_v12(b: *std.Build) void {
// Standard target options allows the person running `zig build` to choose
// what target to build for. Here we do not override the defaults, which
// means any target is allowed, and the default is native. Other options
// for restricting supported target set are available.
const target = b.standardTargetOptions(.{});

// Standard optimization options allow the person running `zig build` to select
// between Debug, ReleaseSafe, ReleaseFast, and ReleaseSmall. Here we do not
// set a preferred release mode, allowing the user to decide how to optimize.
const optimize = b.standardOptimizeOption(.{});

const coverage = b.option(bool, "coverage", "Generate test coverage") orelse false;

// Docs
const docs = b.addStaticLibrary(.{
.name = "zig-rc",
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});

const docsget = b.addInstallDirectory(.{
.source_dir = docs.getEmittedDocs(),
.install_dir = .prefix,
.install_subdir = "docs",
});

b.default_step.dependOn(&docsget.step);

b.installArtifact(docs);

// Tests
const main_tests = b.addTest(.{
.root_source_file = b.path("src/tests.zig"),
});
const run_main_tests = b.addRunArtifact(main_tests);

if (coverage) {
main_tests.setExecCmd(&[_]?[]const u8{
"kcov",
"--include-pattern=src/main.zig,src/tests.zig",
"kcov-out",
null, // to get zig to use the --test-cmd-bin flag
});
}

const test_step = b.step("test", "Run library tests");
test_step.dependOn(&run_main_tests.step);

// Examples
const example = b.addTest(.{
.root_source_file = b.path("src/example.zig"),
});
const run_example = b.addRunArtifact(example);
const example_step = b.step("example", "Run library example");
example_step.dependOn(&run_example.step);
}

fn default_target(arch: Target.Cpu.Arch, os_tag: Target.Os.Tag) Target {
const os = os_tag.defaultVersionRange(arch);
return Target{
Expand Down
58 changes: 25 additions & 33 deletions src/main.zig
Original file line number Diff line number Diff line change
@@ -1,10 +1,8 @@
const std = @import("std");
const builtin = @import("builtin");

/// This variable is `true` if an atomic reference-counter is used for `Arc`, `false` otherwise.
///
/// If the target is single-threaded, `Arc` is optimized to a regular `Rc`.
pub const atomic_arc = !builtin.single_threaded or (builtin.target.isWasm() and std.Target.wasm.featureSetHas(builtin.cpu.features, .atomics));
/// DEPRECATED: It's now simply equal to `!builtin.single_threaded`
pub const atomic_arc = !builtin.single_threaded;

/// A single threaded, strong reference to a reference-counted value.
pub fn Rc(comptime T: type) type {
Expand Down Expand Up @@ -145,7 +143,7 @@ pub fn Rc(comptime T: type) type {
}

inline fn innerPtr(self: *const Self) *Inner {
return @fieldParentPtr(Inner, "value", self.value);
return @alignCast(@fieldParentPtr("value", self.value));
}

/// A single threaded, weak reference to a reference-counted value.
Expand All @@ -163,10 +161,7 @@ pub fn Rc(comptime T: type) type {
/// Creates a new weak reference object from a pointer to it's underlying value,
/// without increasing the weak count.
pub fn fromValuePtr(value: *T, alloc: std.mem.Allocator) Weak {
return .{
.inner = @fieldParentPtr(Inner, "value", value),
.alloc = alloc
};
return .{ .inner = @fieldParentPtr("value", value), .alloc = alloc };
}

/// Gets the number of strong references to this value.
Expand Down Expand Up @@ -296,23 +291,23 @@ pub fn Arc(comptime T: type) type {
// otherwise.
inner.value = data_fn(&weak);

std.debug.assert(@atomicRmw(usize, &inner.strong, .Add, 1, .Release) == 0);
std.debug.assert(@atomicRmw(usize, &inner.strong, .Add, 1, .release) == 0);
return Self{ .value = &inner.value, .alloc = alloc };
}

/// Gets the number of strong references to this value.
pub fn strongCount(self: *const Self) usize {
return @atomicLoad(usize, &self.innerPtr().strong, .Acquire);
return @atomicLoad(usize, &self.innerPtr().strong, .acquire);
}

/// Gets the number of weak references to this value.
pub fn weakCount(self: *const Self) usize {
return @atomicLoad(usize, &self.innerPtr().weak, .Acquire) - 1;
return @atomicLoad(usize, &self.innerPtr().weak, .acquire) - 1;
}

/// Increments the strong count.
pub fn retain(self: *Self) Self {
_ = @atomicRmw(usize, &self.innerPtr().strong, .Add, 1, .AcqRel);
_ = @atomicRmw(usize, &self.innerPtr().strong, .Add, 1, .acq_rel);
return self.*;
}

Expand All @@ -326,8 +321,8 @@ pub fn Arc(comptime T: type) type {
pub fn release(self: Self) void {
const ptr = self.innerPtr();

if (@atomicRmw(usize, &ptr.strong, .Sub, 1, .AcqRel) == 1) {
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .AcqRel) == 1) {
if (@atomicRmw(usize, &ptr.strong, .Sub, 1, .acq_rel) == 1) {
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) {
self.alloc.destroy(ptr);
}
}
Expand All @@ -339,9 +334,9 @@ pub fn Arc(comptime T: type) type {
pub fn releaseWithFn(self: Self, comptime f: fn (T) void) void {
const ptr = self.innerPtr();

if (@atomicRmw(usize, &ptr.strong, .Sub, 1, .AcqRel) == 1) {
if (@atomicRmw(usize, &ptr.strong, .Sub, 1, .acq_rel) == 1) {
f(self.value.*);
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .AcqRel) == 1) {
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) {
self.alloc.destroy(ptr);
}
}
Expand All @@ -354,10 +349,10 @@ pub fn Arc(comptime T: type) type {
pub fn tryUnwrap(self: Self) ?T {
const ptr = self.innerPtr();

if (@cmpxchgStrong(usize, &ptr.strong, 1, 0, .Monotonic, .Monotonic) == null) {
if (@cmpxchgStrong(usize, &ptr.strong, 1, 0, .monotonic, .monotonic) == null) {
ptr.strong = 0;
const tmp = self.value.*;
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .AcqRel) == 1) {
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) {
self.alloc.destroy(ptr);
}
return tmp;
Expand All @@ -379,7 +374,7 @@ pub fn Arc(comptime T: type) type {
}

inline fn innerPtr(self: *const Self) *Inner {
return @fieldParentPtr(Inner, "value", self.value);
return @alignCast(@fieldParentPtr("value", self.value));
}

/// A multi-threaded, weak reference to a reference-counted value.
Expand All @@ -390,31 +385,28 @@ pub fn Arc(comptime T: type) type {
/// Creates a new weak reference.
pub fn init(parent: *Arc(T)) Weak {
const ptr = parent.innerPtr();
_ = @atomicRmw(usize, &ptr.weak, .Add, 1, .AcqRel);
_ = @atomicRmw(usize, &ptr.weak, .Add, 1, .acq_rel);
return Weak{ .inner = ptr, .alloc = parent.alloc };
}

/// Creates a new weak reference object from a pointer to it's underlying value,
/// without increasing the weak count.
pub fn fromValuePtr(value: *T, alloc: std.mem.Allocator) Weak {
return .{
.inner = @fieldParentPtr(Inner, "value", value),
.alloc = alloc
};
return .{ .inner = @fieldParentPtr("value", value), .alloc = alloc };
}

/// Gets the number of strong references to this value.
pub fn strongCount(self: *const Weak) usize {
const ptr = self.innerPtr() orelse return 0;
return @atomicLoad(usize, &ptr.strong, .Acquire);
return @atomicLoad(usize, &ptr.strong, .acquire);
}

/// Gets the number of weak references to this value.
pub fn weakCount(self: *const Weak) usize {
const ptr = self.innerPtr() orelse return 1;
const weak = @atomicLoad(usize, &ptr.weak, .Acquire);
const weak = @atomicLoad(usize, &ptr.weak, .acquire);

if (@atomicLoad(usize, &ptr.strong, .Acquire) == 0) {
if (@atomicLoad(usize, &ptr.strong, .acquire) == 0) {
return weak;
} else {
return weak - 1;
Expand All @@ -424,7 +416,7 @@ pub fn Arc(comptime T: type) type {
/// Increments the weak count.
pub fn retain(self: *Weak) Weak {
if (self.innerPtr()) |ptr| {
_ = @atomicRmw(usize, &ptr.weak, .Add, 1, .AcqRel);
_ = @atomicRmw(usize, &ptr.weak, .Add, 1, .acq_rel);
}
return self.*;
}
Expand All @@ -436,17 +428,17 @@ pub fn Arc(comptime T: type) type {
const ptr = self.innerPtr() orelse return null;

while (true) {
const prev = @atomicLoad(usize, &ptr.strong, .Acquire);
const prev = @atomicLoad(usize, &ptr.strong, .acquire);

if (prev == 0) {
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .AcqRel) == 1) {
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) {
self.alloc.destroy(ptr);
self.inner = null;
}
return null;
}

if (@cmpxchgStrong(usize, &ptr.strong, prev, prev + 1, .Acquire, .Monotonic) == null) {
if (@cmpxchgStrong(usize, &ptr.strong, prev, prev + 1, .acquire, .monotonic) == null) {
return Arc(T){
.value = &ptr.value,
.alloc = self.alloc,
Expand All @@ -461,7 +453,7 @@ pub fn Arc(comptime T: type) type {
/// The continued use of the pointer after calling `release` is undefined behaviour.
pub fn release(self: Weak) void {
if (self.innerPtr()) |ptr| {
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .AcqRel) == 1) {
if (@atomicRmw(usize, &ptr.weak, .Sub, 1, .acq_rel) == 1) {
self.alloc.destroy(ptr);
}
}
Expand Down

0 comments on commit b5073e0

Please sign in to comment.