Add fixed stack-backed allocator implementation
continuous-integration/drone/pr Build is passing Details
continuous-integration/drone/push Build is passing Details

This commit is contained in:
kayomn 2022-11-02 17:45:17 +00:00
parent fcd4ecd85d
commit 14b3921001
4 changed files with 102 additions and 44 deletions

View File

@ -4,11 +4,12 @@ const stack = @import("./stack.zig");
const testing = @import("./testing.zig");
///
///
/// Allocation options for an [Allocator].
///
pub const Allocation = struct {
existing: ?[*]u8,
size: usize
alignment: u29,
size: usize,
};
///
@ -17,29 +18,7 @@ pub const Allocation = struct {
pub const Allocator = meta.Function(Allocation, ?[*]u8);
///
///
///
pub const ArenaAllocator = struct {
region: []u8,
cursor: usize = 0,
///
///
///
pub fn allocator(arena_allocator: *ArenaAllocator) Allocator {
return Allocator.fromClosure(arena_allocator, struct {
fn call(context: *ArenaAllocator, allocation: Allocation) ?[*]u8 {
_ = allocation;
_ = context;
return null;
}
}.call);
}
};
///
///
/// [MakeError.OutOfMemory] if the requested amount of memory could not be allocated.
///
pub const MakeError = error {
OutOfMemory,
@ -259,6 +238,21 @@ test "Check memory is equal" {
try testing.expect(!equals(u8, bytes_sequence, &.{69, 42}));
}
///
/// Fills the contents of `target` with `source`.
///
pub fn fill(comptime Element: type, target: []Element, source: Element) void {
for (target) |_, index| target[index] = source;
}
test "Fill data" {
var buffer = [_]u32{0} ** 8;
fill(u32, &buffer, 1);
for (buffer) |element| try testing.expect(element == 1);
}
///
/// Searches for the first instance of an `Element` equal to `needle` in `haystack`, returning its
/// index or `null` if nothing was found.
@ -318,7 +312,10 @@ test "Find first of sequence" {
}
///
/// Frees `allocated_memory` using `allocator`.
///
/// *Note* that only memory known to be freeable by `allocator` should be passed via
/// `allocated_memory`. Anything else will result is considered unreachable logic.
///
pub fn free(allocator: Allocator, allocated_memory: anytype) void {
if (allocator.call(.{
@ -332,6 +329,7 @@ pub fn free(allocator: Allocator, allocated_memory: anytype) void {
}),
.size = 0,
.alignment = 0,
}) != null) unreachable;
}
@ -356,13 +354,21 @@ test "Hashing bytes" {
}
///
///
/// Attempts to allocate a buffer of `size` `Element`s using `allocator`, returning it or a
/// [MakeError] if it failed.
///
pub fn makeMany(comptime Element: type, allocator: Allocator, size: usize) MakeError![*]Element {
return @ptrCast([*]Element, @alignCast(@alignOf(Element), allocator.call(.{
const alignment = @alignOf(Element);
if (allocator.call(.{
.existing = null,
.size = size,
}) orelse return error.OutOfMemory));
.size = @sizeOf(Element) * size,
.alignment = alignment,
})) |buffer| {
return @ptrCast([*]Element, @alignCast(alignment, buffer));
}
return error.OutOfMemory;
}
///
@ -385,7 +391,8 @@ test "Data swapping" {
}
///
/// [Writer] that silently consumes all given data without failure and throws it away.
/// Thread-safe and lock-free [Writer] that silently consumes all given data without failure and
/// throws it away.
///
/// This is commonly used for testing or redirected otherwise unwanted output data that has to be
/// sent somewhere for whatever reason.

View File

@ -69,18 +69,16 @@ pub fn Function(comptime In: type, comptime Out: type) type {
else => @compileError("`context` must be a pointer"),
}
var function = Self{
return Self{
.context = @ptrCast(*anyopaque, context),
.callErased = struct {
fn callErased(erased: *anyopaque, input: In) Out {
return if (Context == void) invoke(input) else invoke(@ptrCast(
*Context, @alignCast(@alignOf(Context), erased)).*, input);
Context, @alignCast(@alignOf(Context), erased)), input);
}
}.callErased,
};
return function;
}
};
}

View File

@ -39,7 +39,7 @@ pub fn Fixed(comptime Element: type) type {
}
///
/// Attempts to push `element` into `self`, returning a [FixedPushError] if it failed.
/// Attempts to push `element` into `self`, returning a [PushError] if it failed.
///
pub fn push(self: *Self, element: Element) PushError!void {
if (self.filled == self.buffer.len) return error.OutOfMemory;
@ -49,8 +49,7 @@ pub fn Fixed(comptime Element: type) type {
}
///
/// Attempts to push all of `elements` into `self`, returning a [FixedPushError] if it
/// failed.
/// Attempts to push all of `elements` into `self`, returning a [PushError] if it failed.
///
pub fn pushAll(self: *Self, elements: []const Element) PushError!void {
const filled = (self.filled + elements.len);
@ -61,6 +60,20 @@ pub fn Fixed(comptime Element: type) type {
self.filled = filled;
}
///
/// Attempts to push `count` instances of `element` into `self`, returning a [PushError] if
/// it failed.
///
pub fn pushMany(self: *Self, element: Element, count: usize) PushError!void {
const filled = (self.filled + count);
if (filled > self.buffer.len) return error.OutOfMemory;
io.fill(Element, self.buffer[self.filled ..], element);
self.filled = filled;
}
};
}
@ -115,6 +128,45 @@ test "Fixed stack of string literals" {
///
pub const PushError = io.MakeError;
///
/// Creates and returns a [io.Allocator] value wrapping `fixed_stack`.
///
/// The returned [io.Allocator] uses `fixed_stack` and its backing memory buffer as a fixed-length
/// memory pool to linearly allocate memory from.
///
pub fn fixedAllocator(fixed_stack: *Fixed(u8)) io.Allocator {
return io.Allocator.fromClosure(fixed_stack, struct {
fn alloc(stack: *Fixed(u8), allocation: io.Allocation) ?[*]u8 {
if (allocation.existing) |buffer| if (allocation.size == 0) {
// Deallocate the memory.
const buffer_address = @ptrToInt(buffer);
const stack_buffer_address = @ptrToInt(stack.buffer.ptr);
// Check the buffer is within the address space of the stack buffer. If not, it
// should just be returned to let the caller know it cannot be freed.
if ((buffer_address < stack_buffer_address) or
(buffer_address >= (stack_buffer_address + stack.filled))) return buffer;
// TODO: Investigate ways of freeing if it is the last allocation.
return null;
};
// Reallocate / allocate the memory.
// TODO: Remove stdlib dependency.
const adjusted_offset = @import("std").mem.alignPointerOffset(stack.buffer.ptr +
stack.filled, allocation.alignment) orelse return null;
const head = stack.filled + adjusted_offset;
const tail = head + allocation.size;
stack.pushMany(0, tail) catch return null;
return stack.buffer[head .. tail].ptr;
}
}.alloc);
}
///
/// Returns an [io.Writer] wrapping `fixed_stack`.
///

View File

@ -1,4 +1,5 @@
const io = @import("./io.zig");
const stack = @import("./stack.zig");
const testing = @import("./testing.zig");
///
@ -48,10 +49,10 @@ pub fn Hashed(comptime Key: type, comptime Value: type,
/// Returns a new [Self] value or an [io.MakeError] if initializing failed.
///
pub fn init(allocator: Allocator) io.MakeError!Self {
const initial_capacity = 4;
const capacity = 4;
return Self{
.buckets = (try io.makeMany(Bucket, allocator, initial_capacity))[0 .. initial_capacity],
.buckets = (try io.makeMany(Bucket, allocator, capacity))[0 .. capacity],
.filled = 0,
.allocator = allocator,
.load_limit = 0.75,
@ -197,15 +198,15 @@ pub const string_literal_context = KeyContext([]const u8){
};
test "Hash table manipulation with string literal context" {
var buffer = [_]u8{0} ** 1024;
var arena_allocator = io.ArenaAllocator{.region = &buffer};
var buffer = [_]u8{0} ** 4096;
var fixed_stack = stack.Fixed(u8){.buffer = &buffer};
var table =
try Hashed([]const u8, u32, string_literal_context).init(arena_allocator.allocator());
var table = try Hashed([]const u8, u32, string_literal_context).
init(stack.fixedAllocator(&fixed_stack));
defer table.deinit();
const foo = @as(u32, 69);
const foo = 69;
try testing.expect(table.remove("foo") == null);
try table.insert("foo", foo);