ona/src/coral/heap.zig

76 lines
2.0 KiB
Zig
Raw Normal View History

const builtin = @import("builtin");
const std = @import("std");
pub fn RefCounting(comptime Payload: type, comptime finalize: fn (*Payload) void) type {
const AtomicCount = std.atomic.Value(usize);
return opaque {
const Layout = struct {
ref_count: AtomicCount,
payload: Payload,
fn get(self: *Self) *Layout {
return @ptrCast(@alignCast(self));
}
fn get_const(self: *const Self) *const Layout {
return @ptrCast(@alignCast(self));
}
};
const Self = @This();
pub fn acquire(self: *Self) *const Payload {
const layout = Layout.get(self);
const ref_count = layout.ref_count.fetchAdd(1, .monotonic);
std.debug.assert(ref_count != 0);
return &layout.payload;
}
pub fn create(payload: Payload) std.mem.Allocator.Error!*Self {
const allocation = try allocator.create(Layout);
errdefer {
allocator.destroy(allocation);
}
allocation.* = .{
.ref_count = AtomicCount.init(1),
.payload = payload,
};
return @ptrCast(allocation);
}
pub fn release(self: *Self) void {
const layout = Layout.get(self);
const ref_count = layout.ref_count.fetchSub(1, .monotonic);
std.debug.assert(ref_count != 0);
if (ref_count == 1) {
finalize(&layout.payload);
allocator.destroy(layout);
}
}
pub fn weak(self: *const Self) *const Payload {
return &Layout.get_const(self).payload;
}
};
}
pub const allocator = switch (builtin.mode) {
.ReleaseFast => std.heap.smp_allocator,
else => gpa.allocator(),
};
var gpa = std.heap.GeneralPurposeAllocator(.{ .thread_safe = true }){};
pub fn traceLeaks() void {
_ = gpa.detectLeaks();
}