Get rid of extraneous and unused code
This commit is contained in:
parent
7604594630
commit
c1f174a513
|
@ -2,20 +2,12 @@
|
||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Runtime",
|
"name": "Runner",
|
||||||
"type": "gdb",
|
"type": "gdb",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"target": "${workspaceRoot}/zig-out/bin/ona-runner",
|
"target": "${workspaceRoot}/zig-out/bin/runner",
|
||||||
"cwd": "${workspaceRoot}/debug/",
|
"cwd": "${workspaceRoot}/debug/",
|
||||||
"valuesFormatting": "parseText"
|
"valuesFormatting": "parseText"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "Build Script",
|
|
||||||
"type": "python",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "./build.py",
|
|
||||||
"console": "integratedTerminal",
|
|
||||||
"justMyCode": true
|
|
||||||
}
|
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,13 +1,13 @@
|
||||||
{
|
{
|
||||||
|
"files.insertFinalNewline": true,
|
||||||
|
"files.trimTrailingWhitespace": true,
|
||||||
|
|
||||||
|
"[zig]": {
|
||||||
|
"editor.formatOnSave": false,
|
||||||
|
"files.eol": "\n",
|
||||||
"editor.minimap.maxColumn": 120,
|
"editor.minimap.maxColumn": 120,
|
||||||
"editor.detectIndentation": false,
|
"editor.detectIndentation": false,
|
||||||
"editor.insertSpaces": false,
|
"editor.insertSpaces": false,
|
||||||
"editor.rulers": [120],
|
"editor.rulers": [120],
|
||||||
"files.trimTrailingWhitespace": true,
|
}
|
||||||
"files.insertFinalNewline": true,
|
|
||||||
"zig.formattingProvider": "off",
|
|
||||||
"zig.zls.enableAutofix": false,
|
|
||||||
"editor.formatOnSave": false,
|
|
||||||
"spellright.language": ["en-US-10-1."],
|
|
||||||
"spellright.documentTypes": ["markdown"],
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,24 +2,23 @@
|
||||||
"version": "2.0.0",
|
"version": "2.0.0",
|
||||||
"tasks": [
|
"tasks": [
|
||||||
{
|
{
|
||||||
"label": "build",
|
"label": "Build All",
|
||||||
"type": "process",
|
"type": "shell",
|
||||||
"command": "zig",
|
"command": "zig build",
|
||||||
"args": ["build"],
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
"problemMatcher": "$gcc",
|
"problemMatcher": "$gcc",
|
||||||
"presentation": {
|
"presentation": {
|
||||||
"echo": true,
|
"echo": true,
|
||||||
"reveal": "silent",
|
"reveal": "silent",
|
||||||
"focus": false,
|
"focus": false,
|
||||||
"panel": "shared",
|
"panel": "shared",
|
||||||
"showReuseMessage": true,
|
"showReuseMessage": false,
|
||||||
"clear": true,
|
"clear": true,
|
||||||
"revealProblems": "onProblem"
|
"revealProblems": "onProblem"
|
||||||
},
|
}
|
||||||
"group": {
|
|
||||||
"kind": "build",
|
|
||||||
"isDefault": true
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
|
@ -1,6 +1,9 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
pub fn build(b: *std.Build) void {
|
pub fn build(b: *std.Build) void {
|
||||||
|
const target = b.standardTargetOptions(.{});
|
||||||
|
const optimize = b.standardOptimizeOption(.{});
|
||||||
|
|
||||||
const coral_module = b.createModule(.{.source_file = .{.path = "./source/coral/coral.zig"}});
|
const coral_module = b.createModule(.{.source_file = .{.path = "./source/coral/coral.zig"}});
|
||||||
|
|
||||||
const ona_module = b.createModule(.{
|
const ona_module = b.createModule(.{
|
||||||
|
@ -14,37 +17,18 @@ pub fn build(b: *std.Build) void {
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
const cross_target = b.standardTargetOptions(.{});
|
b.installArtifact(create: {
|
||||||
const optimize_mode = std.builtin.Mode.Debug;
|
const runner_exe = b.addExecutable(.{
|
||||||
|
.name = "runner",
|
||||||
// Ona runner.
|
.root_source_file = .{ .path = "source/runner.zig" },
|
||||||
{
|
.target = target,
|
||||||
const ona_exe = b.addExecutable(.{
|
.optimize = optimize,
|
||||||
.name = "ona-runner",
|
|
||||||
.root_source_file = .{.path = "./source/runner.zig"},
|
|
||||||
.target = cross_target,
|
|
||||||
.optimize = optimize_mode,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
ona_exe.addModule("coral", coral_module);
|
runner_exe.addModule("ona", ona_module);
|
||||||
ona_exe.addModule("ona", ona_module);
|
runner_exe.linkLibC();
|
||||||
|
runner_exe.linkSystemLibrary("SDL2");
|
||||||
|
|
||||||
// ona_exe.addIncludeDir("./ext");
|
break: create runner_exe;
|
||||||
ona_exe.linkSystemLibrary("SDL2");
|
|
||||||
ona_exe.linkLibC();
|
|
||||||
b.installArtifact(ona_exe);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Test step.
|
|
||||||
{
|
|
||||||
const exe_tests = b.addTest(.{
|
|
||||||
.root_source_file = .{.path = "source/test.zig"},
|
|
||||||
.target = cross_target,
|
|
||||||
.optimize = optimize_mode,
|
|
||||||
});
|
});
|
||||||
|
|
||||||
const test_step = b.step("test", "Run unit tests");
|
|
||||||
|
|
||||||
test_step.dependOn(&exe_tests.step);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,4 +3,5 @@ return {
|
||||||
title = "Afterglow",
|
title = "Afterglow",
|
||||||
width = 1280,
|
width = 1280,
|
||||||
height = 800,
|
height = 800,
|
||||||
|
tick_rate = 60,
|
||||||
}
|
}
|
||||||
|
|
|
@ -26,8 +26,6 @@ Ona is also the Catalan word for "wave".
|
||||||
|
|
||||||
* Provide utilities for handling rendering but otherwise leave the higher-level game logic and data structuring to the programmer.
|
* Provide utilities for handling rendering but otherwise leave the higher-level game logic and data structuring to the programmer.
|
||||||
|
|
||||||
* Provide a simple scripting interface for people who want to do something quick and a powerful plug-in API for engine-level extensions and speed-critical application logic.
|
|
||||||
|
|
||||||
## Technical Details
|
## Technical Details
|
||||||
|
|
||||||
### Requirements
|
### Requirements
|
||||||
|
|
|
@ -12,46 +12,23 @@ pub const Stacking = struct {
|
||||||
allocations: list.Stack(usize),
|
allocations: list.Stack(usize),
|
||||||
pages: list.Stack(Page),
|
pages: list.Stack(Page),
|
||||||
|
|
||||||
|
const AllocationsList = list.Stack(usize);
|
||||||
|
|
||||||
const Page = struct {
|
const Page = struct {
|
||||||
buffer: []u8,
|
buffer: []io.Byte,
|
||||||
used: usize,
|
used: usize,
|
||||||
|
|
||||||
const Self = @This();
|
fn available(self: Page) usize {
|
||||||
|
|
||||||
fn available(self: Self) usize {
|
|
||||||
return self.buffer.len - self.used;
|
return self.buffer.len - self.used;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn allocate(self: *Stacking, allocation_size: usize) io.AllocationError![]u8 {
|
const PageList = list.Stack(Page);
|
||||||
const alignment = @as(usize, 4);
|
|
||||||
const aligned_allocation_size = (allocation_size + alignment - 1) & ~(alignment - 1);
|
|
||||||
|
|
||||||
if (self.pages.values.len == 0) {
|
|
||||||
const page = try self.allocate_page(math.max(self.min_page_size, aligned_allocation_size));
|
|
||||||
|
|
||||||
page.used = allocation_size;
|
|
||||||
|
|
||||||
return page.buffer[0 .. allocation_size];
|
|
||||||
}
|
|
||||||
|
|
||||||
var page = self.current_page() orelse unreachable;
|
|
||||||
|
|
||||||
if (page.available() <= aligned_allocation_size) {
|
|
||||||
page = try self.allocate_page(math.max(self.min_page_size, aligned_allocation_size));
|
|
||||||
}
|
|
||||||
|
|
||||||
debug.assert(page.available() >= allocation_size);
|
|
||||||
|
|
||||||
defer page.used += aligned_allocation_size;
|
|
||||||
|
|
||||||
return page.buffer[page.used .. (page.used + allocation_size)];
|
|
||||||
}
|
|
||||||
|
|
||||||
fn allocate_page(self: *Stacking, page_size: usize) io.AllocationError!*Page {
|
fn allocate_page(self: *Stacking, page_size: usize) io.AllocationError!*Page {
|
||||||
var buffer = try io.allocate_many(self.page_allocator, page_size, u8);
|
var buffer = try self.page_allocator.reallocate(null, page_size);
|
||||||
|
|
||||||
errdefer io.deallocate(self.page_allocator, buffer);
|
errdefer self.page_allocator.deallocate(buffer);
|
||||||
|
|
||||||
try self.pages.push_one(.{
|
try self.pages.push_one(.{
|
||||||
.buffer = buffer,
|
.buffer = buffer,
|
||||||
|
@ -62,25 +39,10 @@ pub const Stacking = struct {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn as_allocator(self: *Stacking) io.Allocator {
|
pub fn as_allocator(self: *Stacking) io.Allocator {
|
||||||
return io.Allocator.bind(Stacking, self, struct {
|
return io.Allocator.bind(Stacking, self, .{
|
||||||
fn reallocate(stacking: *Stacking, options: io.AllocationOptions) ?[]u8 {
|
.deallocate = deallocate,
|
||||||
const allocation = options.allocation orelse {
|
.reallocate = reallocate,
|
||||||
return stacking.allocate(options.size) catch null;
|
});
|
||||||
};
|
|
||||||
|
|
||||||
if (allocation.len == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const reallocation = stacking.allocate(allocation.len) catch {
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
|
|
||||||
io.copy(reallocation, allocation);
|
|
||||||
|
|
||||||
return reallocation;
|
|
||||||
}
|
|
||||||
}.reallocate);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn current_page(self: Stacking) ?*Page {
|
fn current_page(self: Stacking) ?*Page {
|
||||||
|
@ -91,20 +53,58 @@ pub const Stacking = struct {
|
||||||
return &self.pages.values[self.pages.values.len - 1];
|
return &self.pages.values[self.pages.values.len - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deinit(self: *Stacking) void {
|
pub fn free(self: *Stacking) void {
|
||||||
for (self.pages.values) |page| {
|
for (self.pages.values) |page| {
|
||||||
io.deallocate(self.page_allocator, page.buffer);
|
self.page_allocator.deallocate(page.buffer);
|
||||||
}
|
}
|
||||||
|
|
||||||
self.pages.deinit();
|
self.pages.free();
|
||||||
self.allocations.deinit();
|
self.allocations.free();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init(allocator: io.Allocator, min_page_size: usize) io.AllocationError!Stacking {
|
pub fn deallocate(_: *Stacking, _: []io.Byte) void {
|
||||||
|
// TODO: Decide how to implement.
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reallocate(self: *Stacking, return_address: usize, existing_allocation: ?[]io.Byte, size: usize) io.AllocationError![]io.Byte {
|
||||||
|
// TODO: Safety-check existing allocation is from allocator or null.
|
||||||
|
_ = return_address;
|
||||||
|
|
||||||
|
const alignment = @as(usize, 4);
|
||||||
|
const aligned_size = (size + alignment - 1) & ~(alignment - 1);
|
||||||
|
|
||||||
|
if (self.pages.values.len == 0) {
|
||||||
|
const page = try self.allocate_page(math.max(self.min_page_size, aligned_size));
|
||||||
|
|
||||||
|
page.used = size;
|
||||||
|
|
||||||
|
return page.buffer[0 .. size];
|
||||||
|
}
|
||||||
|
|
||||||
|
var page = self.current_page() orelse unreachable;
|
||||||
|
|
||||||
|
if (page.available() <= aligned_size) {
|
||||||
|
page = try self.allocate_page(math.max(self.min_page_size, aligned_size));
|
||||||
|
}
|
||||||
|
|
||||||
|
debug.assert(page.available() >= size);
|
||||||
|
|
||||||
|
defer page.used += aligned_size;
|
||||||
|
|
||||||
|
const reallocation = page.buffer[page.used .. (page.used + size)];
|
||||||
|
|
||||||
|
if (existing_allocation) |allocation| {
|
||||||
|
io.copy(reallocation, allocation);
|
||||||
|
}
|
||||||
|
|
||||||
|
return reallocation;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make(allocator: io.Allocator, min_page_size: usize) Stacking {
|
||||||
return Stacking{
|
return Stacking{
|
||||||
|
.allocations = AllocationsList.make(allocator),
|
||||||
|
.pages = PageList.make(allocator),
|
||||||
.page_allocator = allocator,
|
.page_allocator = allocator,
|
||||||
.allocations = .{.allocator = allocator},
|
|
||||||
.pages = .{.allocator = allocator},
|
|
||||||
.min_page_size = min_page_size,
|
.min_page_size = min_page_size,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,44 +1,14 @@
|
||||||
///
|
|
||||||
/// Arena-based memory allocation strategies.
|
|
||||||
///
|
|
||||||
pub const arena = @import("./arena.zig");
|
pub const arena = @import("./arena.zig");
|
||||||
|
|
||||||
///
|
|
||||||
/// Debug build-only utilities and sanity-checkers.
|
|
||||||
///
|
|
||||||
pub const debug = @import("./debug.zig");
|
pub const debug = @import("./debug.zig");
|
||||||
|
|
||||||
///
|
|
||||||
/// Platform-agnostic data input and output operations.
|
|
||||||
///
|
|
||||||
pub const io = @import("./io.zig");
|
pub const io = @import("./io.zig");
|
||||||
|
|
||||||
///
|
|
||||||
/// Data structures and utilities for sequential, "list-like" collections.
|
|
||||||
///
|
|
||||||
pub const list = @import("./list.zig");
|
pub const list = @import("./list.zig");
|
||||||
|
|
||||||
///
|
pub const map = @import("./map.zig");
|
||||||
/// Types and functions designed for mathematics in interactive media applications.
|
|
||||||
///
|
|
||||||
pub const math = @import("./math.zig");
|
pub const math = @import("./math.zig");
|
||||||
|
|
||||||
///
|
|
||||||
/// Data structures and utilities for fragmented, "heap-like" collections.
|
|
||||||
///
|
|
||||||
pub const slab = @import("./slab.zig");
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Data structures and utilities for the highly-specialized "slotmap" collection.
|
|
||||||
///
|
|
||||||
pub const slots = @import("./slots.zig");
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Data structures and utilities for associative, "table-like" collections.
|
|
||||||
///
|
|
||||||
pub const table = @import("./table.zig");
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Converters, parsers, and validators for sequences of bytes treated as UTF8 unicode strings.
|
|
||||||
///
|
|
||||||
pub const utf8 = @import("./utf8.zig");
|
pub const utf8 = @import("./utf8.zig");
|
||||||
|
|
|
@ -1,8 +1,4 @@
|
||||||
///
|
|
||||||
/// Active code comment to assert that `condition` should always be true.
|
|
||||||
///
|
|
||||||
/// Safety-checked behavior is invoked where `condition` evaluates to false.
|
|
||||||
///
|
|
||||||
pub fn assert(condition: bool) void {
|
pub fn assert(condition: bool) void {
|
||||||
if (!condition) {
|
if (!condition) {
|
||||||
unreachable;
|
unreachable;
|
||||||
|
|
|
@ -6,23 +6,105 @@ pub const AllocationError = error {
|
||||||
OutOfMemory,
|
OutOfMemory,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const AllocationOptions = struct {
|
pub const Allocator = struct {
|
||||||
return_address: usize,
|
context: *anyopaque,
|
||||||
allocation: ?[]u8 = null,
|
|
||||||
size: usize,
|
actions: *const struct {
|
||||||
|
deallocate: *const fn (context: *anyopaque, allocation: []Byte) void,
|
||||||
|
reallocate: *const fn (context: *anyopaque, return_address: usize, existing_allocation: ?[]Byte, size: usize) AllocationError![]Byte,
|
||||||
|
},
|
||||||
|
|
||||||
|
pub fn Actions(comptime State: type) type {
|
||||||
|
return struct {
|
||||||
|
deallocate: fn (state: *State, allocation: []Byte) void,
|
||||||
|
reallocate: fn (state: *State, return_address: usize, existing_allocation: ?[]Byte, size: usize) AllocationError![]Byte,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn bind(comptime State: type, state: *State, comptime actions: Actions(State)) Allocator {
|
||||||
|
const is_zero_aligned = @alignOf(State) == 0;
|
||||||
|
|
||||||
|
return .{
|
||||||
|
.context = if (is_zero_aligned) state else @ptrCast(state),
|
||||||
|
|
||||||
|
.actions = &.{
|
||||||
|
.deallocate = struct {
|
||||||
|
fn deallocate(context: *anyopaque, allocation: []Byte) void {
|
||||||
|
if (is_zero_aligned) {
|
||||||
|
return actions.deallocator(@ptrCast(context), allocation);
|
||||||
|
}
|
||||||
|
|
||||||
|
return actions.deallocate(@ptrCast(@alignCast(context)), allocation);
|
||||||
|
}
|
||||||
|
}.deallocate,
|
||||||
|
|
||||||
|
.reallocate = struct {
|
||||||
|
fn reallocate(context: *anyopaque, return_address: usize, existing_allocation: ?[]Byte, size: usize) AllocationError![]Byte {
|
||||||
|
if (is_zero_aligned) {
|
||||||
|
return actions.reallocator(@ptrCast(context), return_address, existing_allocation, size);
|
||||||
|
}
|
||||||
|
|
||||||
|
return actions.reallocate(@ptrCast(@alignCast(context)), return_address, existing_allocation, size);
|
||||||
|
}
|
||||||
|
}.reallocate,
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deallocate(self: Allocator, allocation: anytype) void {
|
||||||
|
switch (@typeInfo(@TypeOf(allocation))) {
|
||||||
|
.Pointer => |pointer| {
|
||||||
|
self.actions.deallocate(self.context, switch (pointer.size) {
|
||||||
|
.One => @as([*]Byte, @ptrCast(allocation))[0 .. @sizeOf(pointer.child)],
|
||||||
|
.Slice => @as([*]Byte, @ptrCast(allocation.ptr))[0 .. (@sizeOf(pointer.child) * allocation.len)],
|
||||||
|
.Many, .C => @compileError("length of allocation must be known to deallocate"),
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
else => @compileError("cannot deallocate " ++ allocation),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn reallocate(self: Allocator, allocation: ?[]Byte, allocation_size: usize) AllocationError![]Byte {
|
||||||
|
return self.actions.reallocate(self.context, @returnAddress(), allocation, allocation_size);
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Allocator = Generator(?[]u8, AllocationOptions);
|
|
||||||
|
|
||||||
///
|
|
||||||
///
|
|
||||||
///
|
|
||||||
pub const Byte = u8;
|
pub const Byte = u8;
|
||||||
|
|
||||||
///
|
pub const FixedBuffer = struct {
|
||||||
/// Function pointer coupled with an immutable state context for providing dynamic dispatch over a given `Input` and
|
bytes: []Byte,
|
||||||
/// `Output`.
|
|
||||||
///
|
pub fn as_writer(self: *FixedBuffer) Writer {
|
||||||
|
return Writer.bind(FixedBuffer, self, struct {
|
||||||
|
fn write(writable_memory: *FixedBuffer, data: []const Byte) ?usize {
|
||||||
|
return writable_memory.write(data);
|
||||||
|
}
|
||||||
|
}.write);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn put(self: *FixedBuffer, byte: Byte) bool {
|
||||||
|
if (self.bytes.len == 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.bytes[0] = byte;
|
||||||
|
self.bytes = self.bytes[1 ..];
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write(self: *FixedBuffer, bytes: []const Byte) usize {
|
||||||
|
const writable = math.min(self.bytes.len, bytes.len);
|
||||||
|
|
||||||
|
copy(self.bytes, bytes);
|
||||||
|
|
||||||
|
self.bytes = self.bytes[writable ..];
|
||||||
|
|
||||||
|
return writable;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
pub fn Functor(comptime Output: type, comptime Input: type) type {
|
pub fn Functor(comptime Output: type, comptime Input: type) type {
|
||||||
return struct {
|
return struct {
|
||||||
context: *const anyopaque,
|
context: *const anyopaque,
|
||||||
|
@ -31,21 +113,20 @@ pub fn Functor(comptime Output: type, comptime Input: type) type {
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
|
||||||
pub fn bind(comptime State: type, state: *const State, comptime invoker: fn (capture: *const State, input: Input) Output) Self {
|
pub fn bind(comptime State: type, state: *const State, comptime invoker: fn (capture: *const State, input: Input) Output) Self {
|
||||||
const alignment = @alignOf(State);
|
const is_zero_aligned = @alignOf(State) == 0;
|
||||||
const is_zero_aligned = alignment == 0;
|
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
.context = if (is_zero_aligned) state else @ptrCast(*const anyopaque, state),
|
.context = if (is_zero_aligned) state else @ptrCast(state),
|
||||||
|
|
||||||
.invoker = struct {
|
.invoker = struct {
|
||||||
fn invoke_opaque(context: *const anyopaque, input: Input) Output {
|
fn invoke(context: *const anyopaque, input: Input) Output {
|
||||||
if (is_zero_aligned) {
|
if (is_zero_aligned) {
|
||||||
return invoker(@ptrCast(*const State, context), input);
|
return invoker(@ptrCast(context), input);
|
||||||
}
|
}
|
||||||
|
|
||||||
return invoker(@ptrCast(*const State, @alignCast(alignment, context)), input);
|
return invoker(@ptrCast(@alignCast(context)), input);
|
||||||
}
|
}
|
||||||
}.invoke_opaque,
|
}.invoke,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -55,10 +136,6 @@ pub fn Functor(comptime Output: type, comptime Input: type) type {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
|
||||||
/// Function pointer coupled with a mutable state context for providing dynamic dispatch over a given `Input` and
|
|
||||||
/// `Output`.
|
|
||||||
///
|
|
||||||
pub fn Generator(comptime Output: type, comptime Input: type) type {
|
pub fn Generator(comptime Output: type, comptime Input: type) type {
|
||||||
return struct {
|
return struct {
|
||||||
context: *anyopaque,
|
context: *anyopaque,
|
||||||
|
@ -67,21 +144,20 @@ pub fn Generator(comptime Output: type, comptime Input: type) type {
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
|
||||||
pub fn bind(comptime State: type, state: *State, comptime invoker: fn (capture: *State, input: Input) Output) Self {
|
pub fn bind(comptime State: type, state: *State, comptime invoker: fn (capture: *State, input: Input) Output) Self {
|
||||||
const alignment = @alignOf(State);
|
const is_zero_aligned = @alignOf(State) == 0;
|
||||||
const is_zero_aligned = alignment == 0;
|
|
||||||
|
|
||||||
return .{
|
return .{
|
||||||
.context = if (is_zero_aligned) state else @ptrCast(*anyopaque, state),
|
.context = if (is_zero_aligned) state else @ptrCast(state),
|
||||||
|
|
||||||
.invoker = struct {
|
.invoker = struct {
|
||||||
fn invoke_opaque(context: *anyopaque, input: Input) Output {
|
fn invoke(context: *anyopaque, input: Input) Output {
|
||||||
if (is_zero_aligned) {
|
if (is_zero_aligned) {
|
||||||
return invoker(@ptrCast(*State, context), input);
|
return invoker(@ptrCast(context), input);
|
||||||
}
|
}
|
||||||
|
|
||||||
return invoker(@ptrCast(*State, @alignCast(alignment, context)), input);
|
return invoker(@ptrCast(@alignCast(context)), input);
|
||||||
}
|
}
|
||||||
}.invoke_opaque,
|
}.invoke,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -91,13 +167,6 @@ pub fn Generator(comptime Output: type, comptime Input: type) type {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const Reader = Generator(?usize, []u8);
|
|
||||||
|
|
||||||
pub const StreamError = error {
|
|
||||||
ReadFailure,
|
|
||||||
WriteFailure,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn Tag(comptime Element: type) type {
|
pub fn Tag(comptime Element: type) type {
|
||||||
return switch (@typeInfo(Element)) {
|
return switch (@typeInfo(Element)) {
|
||||||
.Enum => |info| info.tag_type,
|
.Enum => |info| info.tag_type,
|
||||||
|
@ -106,136 +175,80 @@ pub fn Tag(comptime Element: type) type {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const FixedBuffer = struct {
|
|
||||||
slice: []u8,
|
|
||||||
|
|
||||||
pub fn as_writer(self: *FixedBuffer) Writer {
|
|
||||||
return Writer.bind(FixedBuffer, self, struct {
|
|
||||||
fn write(writable_memory: *FixedBuffer, data: []const u8) ?usize {
|
|
||||||
return writable_memory.write(data);
|
|
||||||
}
|
|
||||||
}.write);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn put(self: *FixedBuffer, byte: u8) bool {
|
|
||||||
if (self.slice.len == 0) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.slice[0] = byte;
|
|
||||||
self.slice = self.slice[1 ..];
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn write(self: *FixedBuffer, bytes: []const u8) usize {
|
|
||||||
const writable = math.min(self.slice.len, bytes.len);
|
|
||||||
|
|
||||||
copy(self.slice, bytes);
|
|
||||||
|
|
||||||
self.slice = self.slice[writable ..];
|
|
||||||
|
|
||||||
return writable;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Writer = Generator(?usize, []const Byte);
|
pub const Writer = Generator(?usize, []const Byte);
|
||||||
|
|
||||||
pub fn allocate_many(allocator: Allocator, amount: usize, comptime Type: type) AllocationError![]Type {
|
pub fn allocate_copy(allocator: Allocator, source: []const Byte) AllocationError![]Byte {
|
||||||
if (@sizeOf(Type) == 0) {
|
const allocation = try allocator.actions.reallocate(allocator.context, @returnAddress(), null, source.len);
|
||||||
@compileError("Cannot allocate memory for 0-byte type " ++ @typeName(Type));
|
|
||||||
}
|
|
||||||
|
|
||||||
return @ptrCast([*]Type, @alignCast(@alignOf(Type), allocator.invoke(.{
|
copy(allocation, source);
|
||||||
.size = @sizeOf(Type) * amount,
|
|
||||||
.return_address = @returnAddress(),
|
return allocation;
|
||||||
}) orelse return error.OutOfMemory))[0 .. amount];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn allocate_one(allocator: Allocator, value: anytype) AllocationError!*@TypeOf(value) {
|
pub fn allocate_one(allocator: Allocator, value: anytype) AllocationError!*@TypeOf(value) {
|
||||||
const Type = @TypeOf(value);
|
const Type = @TypeOf(value);
|
||||||
|
const typeSize = @sizeOf(Type);
|
||||||
|
|
||||||
if (@sizeOf(Type) == 0) {
|
if (typeSize == 0) {
|
||||||
@compileError("Cannot allocate memory for 0-byte type " ++ @typeName(Type));
|
@compileError("Cannot allocate memory for 0-byte sized type " ++ @typeName(Type));
|
||||||
}
|
}
|
||||||
|
|
||||||
const allocation = @ptrCast(*Type, @alignCast(@alignOf(Type), allocator.invoke(.{
|
const allocation = @as(*Type, @ptrCast(@alignCast(try allocator.actions.reallocate(
|
||||||
.size = @sizeOf(Type),
|
allocator.context,
|
||||||
.return_address = @returnAddress(),
|
@returnAddress(),
|
||||||
}) orelse return error.OutOfMemory));
|
null,
|
||||||
|
typeSize))));
|
||||||
|
|
||||||
allocation.* = value;
|
allocation.* = value;
|
||||||
|
|
||||||
return allocation;
|
return allocation;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bytes_of(value: anytype) []const u8 {
|
pub fn bytes_of(value: anytype) []const Byte {
|
||||||
const pointer_info = @typeInfo(@TypeOf(value)).Pointer;
|
const pointer_info = @typeInfo(@TypeOf(value)).Pointer;
|
||||||
|
|
||||||
debug.assert(pointer_info.size == .One);
|
return switch (pointer_info.size) {
|
||||||
|
.One => @as([*]const Byte, @ptrCast(value))[0 .. @sizeOf(pointer_info.child)],
|
||||||
return @ptrCast([*]const u8, value)[0 .. @sizeOf(pointer_info.child)];
|
.Slice => @as([*]const Byte, @ptrCast(value.ptr))[0 .. @sizeOf(pointer_info.child) * value.len],
|
||||||
|
else => @compileError("`value` must be single-element pointer or slice type"),
|
||||||
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn compare(this: []const u8, that: []const u8) isize {
|
pub fn copy(target: []Byte, source: []const Byte) void {
|
||||||
const range = math.min(this.len, that.len);
|
|
||||||
var index: usize = 0;
|
var index: usize = 0;
|
||||||
|
|
||||||
while (index < range) : (index += 1) {
|
while (index < source.len) : (index += 1) {
|
||||||
const difference = @intCast(isize, this[index]) - @intCast(isize, that[index]);
|
target[index] = source[index];
|
||||||
|
|
||||||
if (difference != 0) {
|
|
||||||
return difference;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return @intCast(isize, this.len) - @intCast(isize, that.len);
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn copy(target: []u8, source: []const u8) void {
|
|
||||||
var index: usize = 0;
|
|
||||||
|
|
||||||
while (index < source.len) : (index += 1) target[index] = source[index];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn deallocate(allocator: Allocator, allocation: anytype) void {
|
|
||||||
switch (@typeInfo(@TypeOf(allocation))) {
|
|
||||||
.Pointer => |pointer| {
|
|
||||||
_ = allocator.invoke(.{
|
|
||||||
.allocation = switch (pointer.size) {
|
|
||||||
.One => @ptrCast([*]u8, allocation)[0 .. @sizeOf(pointer.child)],
|
|
||||||
.Slice => @ptrCast([*]u8, allocation.ptr)[0 .. (@sizeOf(pointer.child) * allocation.len)],
|
|
||||||
.Many, .C => @compileError("length of allocation must be known to deallocate"),
|
|
||||||
},
|
|
||||||
|
|
||||||
.return_address = @returnAddress(),
|
|
||||||
.size = 0,
|
|
||||||
});
|
|
||||||
},
|
|
||||||
|
|
||||||
else => @compileError("cannot deallocate " ++ allocation),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn ends_with(target: []const u8, match: []const u8) bool {
|
pub fn ends_with(target: []const Byte, match: []const Byte) bool {
|
||||||
if (target.len < match.len) return false;
|
if (target.len < match.len) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
var index = @as(usize, 0);
|
var index = @as(usize, 0);
|
||||||
|
|
||||||
while (index < match.len) : (index += 1) {
|
while (index < match.len) : (index += 1) {
|
||||||
if (target[target.len - (1 + index)] != match[match.len - (1 + index)]) return false;
|
if (target[target.len - (1 + index)] != match[match.len - (1 + index)]) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn equals(this: []const u8, that: []const u8) bool {
|
pub fn equals(target: []const Byte, match: []const Byte) bool {
|
||||||
if (this.len != that.len) return false;
|
if (target.len != match.len) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
{
|
for (0 .. target.len) |index| {
|
||||||
var index: usize = 0;
|
if (target[index] != match[index]) {
|
||||||
|
return false;
|
||||||
while (index < this.len) : (index += 1) if (this[index] != that[index]) return false;
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
@ -243,68 +256,28 @@ pub fn equals(this: []const u8, that: []const u8) bool {
|
||||||
|
|
||||||
var null_context = @as(usize, 0);
|
var null_context = @as(usize, 0);
|
||||||
|
|
||||||
pub const null_allocator = Allocator.bind(&null_context, struct {
|
pub const null_writer = Writer.bind(usize, &null_context, struct {
|
||||||
fn reallocate(context: *usize, options: AllocationOptions) ?[]u8 {
|
fn write(context: *usize, buffer: []const u8) ?usize {
|
||||||
debug.assert(context.* == 0);
|
|
||||||
debug.assert(options.allocation == null);
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
pub const null_writer = Writer.bind(&null_context, struct {
|
|
||||||
fn write(context: *usize, buffer: []const u8) usize {
|
|
||||||
debug.assert(context.* == 0);
|
debug.assert(context.* == 0);
|
||||||
|
|
||||||
return buffer.len;
|
return buffer.len;
|
||||||
}
|
}
|
||||||
}.write);
|
}.write);
|
||||||
|
|
||||||
pub fn reallocate(allocator: Allocator, allocation: anytype, amount: usize) AllocationError![]@typeInfo(@TypeOf(allocation)).Pointer.child {
|
pub fn slice_sentineled(comptime sen: anytype, ptr: [*:sen]const @TypeOf(sen)) [:sen]const @TypeOf(sen) {
|
||||||
const pointer_info = @typeInfo(@TypeOf(allocation)).Pointer;
|
var len = @as(usize, 0);
|
||||||
const Element = pointer_info.child;
|
|
||||||
|
|
||||||
return @ptrCast([*]Element, @alignCast(@alignOf(Element), (allocator.invoke(switch (pointer_info.size) {
|
while (ptr[len] != sen) {
|
||||||
.Slice => .{
|
len += 1;
|
||||||
.allocation = @ptrCast([*]u8, allocation.ptr)[0 .. (@sizeOf(Element) * allocation.len)],
|
|
||||||
.size = @sizeOf(Element) * amount,
|
|
||||||
},
|
|
||||||
|
|
||||||
.Many, .C, .One => @compileError("allocation must be a slice to reallocate"),
|
|
||||||
}) orelse return error.OutOfMemory).ptr))[0 .. amount];
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn sentinel_index(comptime element: type, comptime sentinel: element, sequence: [*:sentinel]const element) usize {
|
|
||||||
var index: usize = 0;
|
|
||||||
|
|
||||||
while (sequence[index] != sentinel) : (index += 1) {}
|
|
||||||
|
|
||||||
return index;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn stream(output: Writer, input: Reader, buffer: []u8) StreamError!u64 {
|
|
||||||
var total_written: u64 = 0;
|
|
||||||
var read = input.invoke(buffer) orelse return error.ReadFailure;
|
|
||||||
|
|
||||||
while (read != 0) {
|
|
||||||
total_written += output.invoke(buffer[0..read]) orelse return error.WriteFailure;
|
|
||||||
read = input.invoke(buffer) orelse return error.ReadFailure;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return total_written;
|
return ptr[0 .. len:sen];
|
||||||
}
|
|
||||||
|
|
||||||
pub fn swap(comptime Element: type, this: *Element, that: *Element) void {
|
|
||||||
const temp = this.*;
|
|
||||||
|
|
||||||
this.* = that.*;
|
|
||||||
that.* = temp;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn tag_of(comptime value: anytype) Tag(@TypeOf(value)) {
|
pub fn tag_of(comptime value: anytype) Tag(@TypeOf(value)) {
|
||||||
return @as(Tag(@TypeOf(value)), value);
|
return @as(Tag(@TypeOf(value)), value);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn zero(target: []u8) void {
|
pub fn zero(target: []Byte) void {
|
||||||
for (target) |*t| t.* = 0;
|
for (target) |*t| t.* = 0;
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,97 +1,54 @@
|
||||||
const debug = @import("./debug.zig");
|
|
||||||
|
|
||||||
const io = @import("./io.zig");
|
const io = @import("./io.zig");
|
||||||
|
|
||||||
const math = @import("./math.zig");
|
const math = @import("./math.zig");
|
||||||
|
|
||||||
///
|
pub const ByteStack = Stack(io.Byte);
|
||||||
/// Returns a dynamically sized stack capable of holding `Value`.
|
|
||||||
///
|
|
||||||
pub fn Stack(comptime Value: type) type {
|
pub fn Stack(comptime Value: type) type {
|
||||||
return struct {
|
return struct {
|
||||||
allocator: io.Allocator,
|
allocator: io.Allocator,
|
||||||
capacity: usize = 0,
|
capacity: usize,
|
||||||
values: []Value = &.{},
|
values: []Value,
|
||||||
|
|
||||||
///
|
|
||||||
/// Stack type.
|
|
||||||
///
|
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
|
||||||
///
|
|
||||||
/// Clears all elements from `self` while preserving the current internal buffer.
|
|
||||||
///
|
|
||||||
/// To clean up memory allocations made by the stack and deinitialize it, see [deinit] instead.
|
|
||||||
///
|
|
||||||
pub fn clear(self: *Self) void {
|
pub fn clear(self: *Self) void {
|
||||||
self.values = self.values[0 .. 0];
|
self.values = self.values[0 .. 0];
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
pub fn free(self: *Self) void {
|
||||||
/// Deinitializes `self` and sets it to an invalid state, freeing all memory allocated by `allocator`.
|
|
||||||
///
|
|
||||||
/// To clear all items from the stack while preserving the current internal buffer, see [clear] instead.
|
|
||||||
///
|
|
||||||
/// *Note* if the `capacity` field of `self` is a non-zero value, `allocator` must reference the same allocation
|
|
||||||
/// strategy as the one originally used to allocate the current internal buffer.
|
|
||||||
///
|
|
||||||
pub fn deinit(self: *Self) void {
|
|
||||||
if (self.capacity == 0) {
|
if (self.capacity == 0) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
io.deallocate(self.allocator, self.values.ptr[0 .. self.capacity]);
|
self.allocator.deallocate(self.values.ptr[0 .. self.capacity]);
|
||||||
|
|
||||||
self.values = &.{};
|
self.values = &.{};
|
||||||
self.capacity = 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to remove `amount` number of `Value`s from the stack, returning `bool` if it was successful,
|
|
||||||
/// otherwise `false` if the stack contains fewer elements than `amount`.
|
|
||||||
///
|
|
||||||
pub fn drop(self: *Self, amount: usize) bool {
|
|
||||||
if (amount > self.values.len) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.values = self.values[0 .. self.values.len - amount];
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to grow the internal buffer of `self` by `growth_amount` using `allocator`.
|
|
||||||
///
|
|
||||||
/// The function returns [io.AllocatorError] if `allocator` could not commit the memory required to grow the
|
|
||||||
/// internal buffer by `growth_amount`, leaving `self` in the same state that it was in prior to starting the
|
|
||||||
/// grow.
|
|
||||||
///
|
|
||||||
/// Growing ahead of multiple push operations is useful when the upper bound of pushes is well-understood, as it
|
|
||||||
/// can reduce the number of allocations required per push.
|
|
||||||
///
|
|
||||||
pub fn grow(self: *Self, growth_amount: usize) io.AllocationError!void {
|
pub fn grow(self: *Self, growth_amount: usize) io.AllocationError!void {
|
||||||
const grown_capacity = self.capacity + growth_amount;
|
const grown_capacity = self.capacity + growth_amount;
|
||||||
const values = (try io.allocate_many(self.allocator, grown_capacity, Value))[0 .. self.values.len];
|
const buffer = try self.allocator.reallocate(null, @sizeOf(Value) * grown_capacity);
|
||||||
|
|
||||||
errdefer io.deallocate(self.allocator, values);
|
errdefer self.allocator.deallocate(buffer);
|
||||||
|
|
||||||
if (self.capacity != 0) {
|
if (self.capacity != 0) {
|
||||||
for (0 .. self.values.len) |index| {
|
io.copy(buffer, io.bytes_of(self.values));
|
||||||
values[index] = self.values[index];
|
self.allocator.deallocate(self.values.ptr[0 .. self.capacity]);
|
||||||
}
|
}
|
||||||
|
|
||||||
io.deallocate(self.allocator, self.values.ptr[0 .. self.capacity]);
|
self.values = @as([*]Value, @ptrCast(@alignCast(buffer)))[0 .. self.values.len];
|
||||||
}
|
|
||||||
|
|
||||||
self.values = values;
|
|
||||||
self.capacity = grown_capacity;
|
self.capacity = grown_capacity;
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
pub fn make(allocator: io.Allocator) Self {
|
||||||
/// Attempts to remove the last element of `self` that was inserted, if one exists, returning it or `null` if
|
return .{
|
||||||
/// `self` is empty.
|
.allocator = allocator,
|
||||||
///
|
.capacity = 0,
|
||||||
|
.values = &.{},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub fn pop(self: *Self) ?Value {
|
pub fn pop(self: *Self) ?Value {
|
||||||
if (self.values.len == 0) {
|
if (self.values.len == 0) {
|
||||||
return null;
|
return null;
|
||||||
|
@ -104,59 +61,6 @@ pub fn Stack(comptime Value: type) type {
|
||||||
return self.values[last_index];
|
return self.values[last_index];
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to push every `Value` in `values` to `self` using `allocator` to grow the internal buffer as
|
|
||||||
/// necessary.
|
|
||||||
///
|
|
||||||
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
|
||||||
/// internal buffer of `self` when necessary.
|
|
||||||
///
|
|
||||||
pub fn push_all(self: *Self, values: []const Value) io.AllocationError!void {
|
|
||||||
const new_length = self.values.len + values.len;
|
|
||||||
|
|
||||||
if (new_length > self.capacity) {
|
|
||||||
try self.grow(values.len + values.len);
|
|
||||||
}
|
|
||||||
|
|
||||||
const offset_index = self.values.len;
|
|
||||||
|
|
||||||
self.values = self.values.ptr[0 .. new_length];
|
|
||||||
|
|
||||||
for (0 .. values.len) |index| {
|
|
||||||
self.values[offset_index + index] = values[index];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to push the `Value` in `value` to `self` by `amount` number of times using `allocator` to grow
|
|
||||||
/// the internal buffer as necessary.
|
|
||||||
///
|
|
||||||
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
|
||||||
/// internal buffer of `self` when necessary.
|
|
||||||
///
|
|
||||||
pub fn push_many(self: *Self, value: Value, amount: usize) io.AllocationError!void {
|
|
||||||
const new_length = self.values.len + amount;
|
|
||||||
|
|
||||||
if (new_length >= self.capacity) {
|
|
||||||
try self.grow(amount + amount);
|
|
||||||
}
|
|
||||||
|
|
||||||
const offset_index = self.values.len;
|
|
||||||
|
|
||||||
self.values = self.values.ptr[0 .. new_length];
|
|
||||||
|
|
||||||
for (0 .. amount) |index| {
|
|
||||||
self.values[offset_index + index] = value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to push the `Value` in `value` to `self` using `allocator` to grow the internal buffer as
|
|
||||||
/// necessary.
|
|
||||||
///
|
|
||||||
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
|
||||||
/// internal buffer of `self` when necessary.
|
|
||||||
///
|
|
||||||
pub fn push_one(self: *Self, value: Value) io.AllocationError!void {
|
pub fn push_one(self: *Self, value: Value) io.AllocationError!void {
|
||||||
if (self.values.len == self.capacity) {
|
if (self.values.len == self.capacity) {
|
||||||
try self.grow(math.max(1, self.capacity));
|
try self.grow(math.max(1, self.capacity));
|
||||||
|
@ -170,21 +74,3 @@ pub fn Stack(comptime Value: type) type {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
|
||||||
///
|
|
||||||
///
|
|
||||||
pub const ByteStack = Stack(io.Byte);
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns a [io.Writer] instance that binds a reference of `self` to the [write] operation.
|
|
||||||
///
|
|
||||||
pub fn stack_as_writer(self: *ByteStack) io.Writer {
|
|
||||||
return io.Writer.bind(ByteStack, self, struct {
|
|
||||||
fn write(stack: *ByteStack, bytes: []const io.Byte) ?usize {
|
|
||||||
stack.push_all(bytes) catch return null;
|
|
||||||
|
|
||||||
return bytes.len;
|
|
||||||
}
|
|
||||||
}.write);
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,304 @@
|
||||||
|
const debug = @import("./debug.zig");
|
||||||
|
|
||||||
|
const io = @import("./io.zig");
|
||||||
|
|
||||||
|
const list = @import("./list.zig");
|
||||||
|
|
||||||
|
const math = @import("./math.zig");
|
||||||
|
|
||||||
|
pub fn Slab(comptime Value: type) type {
|
||||||
|
return struct {
|
||||||
|
next_index: usize,
|
||||||
|
entries: EntryList,
|
||||||
|
|
||||||
|
const EntryList = list.Stack(union (enum) {
|
||||||
|
value: Value,
|
||||||
|
next_index: usize,
|
||||||
|
});
|
||||||
|
|
||||||
|
const Self = @This();
|
||||||
|
|
||||||
|
pub fn lookup(self: Self, key: usize) ?Value {
|
||||||
|
if (key == 0 or key > self.entries.values.len) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return switch (self.entries.values[key - 1]) {
|
||||||
|
.value => |value| value,
|
||||||
|
.next_index => null,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn free(self: *Self) void {
|
||||||
|
self.entries.free();
|
||||||
|
|
||||||
|
self.next_index = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(self: *Self, value: Value) io.AllocationError!usize {
|
||||||
|
if (self.next_index < self.entries.values.len) {
|
||||||
|
const index = self.next_index;
|
||||||
|
const entry = &self.entries.values[index];
|
||||||
|
|
||||||
|
debug.assert(entry.* == .next_index);
|
||||||
|
|
||||||
|
self.next_index = entry.next_index;
|
||||||
|
entry.* = .{.value = value};
|
||||||
|
|
||||||
|
return index + 1;
|
||||||
|
} else {
|
||||||
|
try self.entries.push_one(.{.value = value});
|
||||||
|
|
||||||
|
self.next_index += 1;
|
||||||
|
|
||||||
|
return self.next_index;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make(allocator: io.Allocator) Self {
|
||||||
|
return .{
|
||||||
|
.next_index = 0,
|
||||||
|
.entries = EntryList.make(allocator),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove(self: *Self, key: usize) ?Value {
|
||||||
|
if (key == 0 or key > self.entries.values.len) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const index = key - 1;
|
||||||
|
const entry = &self.entries.values[index];
|
||||||
|
|
||||||
|
return switch (entry.*) {
|
||||||
|
.next_index => null,
|
||||||
|
|
||||||
|
.value => get_value: {
|
||||||
|
const value = entry.value;
|
||||||
|
|
||||||
|
entry.* = .{.next_index = self.next_index};
|
||||||
|
self.next_index = index;
|
||||||
|
|
||||||
|
break: get_value value;
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn Table(comptime Key: type, comptime Value: type, comptime traits: TableTraits(Key)) type {
|
||||||
|
const load_max = 0.75;
|
||||||
|
|
||||||
|
return struct {
|
||||||
|
allocator: io.Allocator,
|
||||||
|
count: usize,
|
||||||
|
entries: []?Entry,
|
||||||
|
|
||||||
|
pub const Entry = struct {
|
||||||
|
key: Key,
|
||||||
|
value: Value,
|
||||||
|
|
||||||
|
fn write_into(self: Entry, entry_table: []?Entry) bool {
|
||||||
|
const hash_max = math.min(math.max_int(@typeInfo(usize).Int), entry_table.len);
|
||||||
|
var hashed_key = math.wrap(traits.hash(self.key), math.min_int(@typeInfo(usize).Int), hash_max);
|
||||||
|
var iterations = @as(usize, 0);
|
||||||
|
|
||||||
|
while (true) : (iterations += 1) {
|
||||||
|
debug.assert(iterations < entry_table.len);
|
||||||
|
|
||||||
|
const table_entry = &(entry_table[hashed_key] orelse {
|
||||||
|
entry_table[hashed_key] = .{
|
||||||
|
.key = self.key,
|
||||||
|
.value = self.value,
|
||||||
|
};
|
||||||
|
|
||||||
|
return true;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (traits.match(table_entry.key, self.key)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
hashed_key = (hashed_key +% 1) % hash_max;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Iterable = struct {
|
||||||
|
table: *Self,
|
||||||
|
iterations: usize = 0,
|
||||||
|
|
||||||
|
pub fn next(self: *Iterable) ?Entry {
|
||||||
|
while (self.iterations < self.table.entries.len) {
|
||||||
|
defer self.iterations += 1;
|
||||||
|
|
||||||
|
if (self.table.entries[self.iterations]) |entry| {
|
||||||
|
return entry;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const Self = @This();
|
||||||
|
|
||||||
|
pub fn replace(self: *Self, key: Key, value: Value) io.AllocationError!?Entry {
|
||||||
|
try self.rehash(load_max);
|
||||||
|
|
||||||
|
debug.assert(self.entries.len > self.count);
|
||||||
|
|
||||||
|
{
|
||||||
|
const hash_max = math.min(math.max_int(@typeInfo(usize).Int), self.entries.len);
|
||||||
|
var hashed_key = math.wrap(traits.hash(key), math.min_int(@typeInfo(usize).Int), hash_max);
|
||||||
|
|
||||||
|
while (true) {
|
||||||
|
const entry = &(self.entries[hashed_key] orelse {
|
||||||
|
self.entries[hashed_key] = .{
|
||||||
|
.key = key,
|
||||||
|
.value = value,
|
||||||
|
};
|
||||||
|
|
||||||
|
self.count += 1;
|
||||||
|
|
||||||
|
return null;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (traits.match(entry.key, key)) {
|
||||||
|
const original_entry = entry.*;
|
||||||
|
|
||||||
|
entry.* = .{
|
||||||
|
.key = key,
|
||||||
|
.value = value,
|
||||||
|
};
|
||||||
|
|
||||||
|
return original_entry;
|
||||||
|
}
|
||||||
|
|
||||||
|
hashed_key = (hashed_key +% 1) % hash_max;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn calculate_load_factor(self: Self) f32 {
|
||||||
|
return if (self.entries.len == 0) 1 else @as(f32, @floatFromInt(self.count)) / @as(f32, @floatFromInt(self.entries.len));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clear(self: *Self) void {
|
||||||
|
for (self.entries) |*entry| {
|
||||||
|
entry.* = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.count = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn free(self: *Self) void {
|
||||||
|
if (self.entries.len == 0) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
self.allocator.deallocate(self.entries);
|
||||||
|
|
||||||
|
self.entries = &.{};
|
||||||
|
self.count = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn insert(self: *Self, key: Key, value: Value) io.AllocationError!bool {
|
||||||
|
try self.rehash(load_max);
|
||||||
|
|
||||||
|
debug.assert(self.entries.len > self.count);
|
||||||
|
|
||||||
|
defer self.count += 1;
|
||||||
|
|
||||||
|
const entry = Entry{
|
||||||
|
.key = key,
|
||||||
|
.value = value,
|
||||||
|
};
|
||||||
|
|
||||||
|
return entry.write_into(self.entries);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn lookup(self: Self, key: Key) ?Value {
|
||||||
|
if (self.count == 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const hash_max = math.min(math.max_int(@typeInfo(usize).Int), self.entries.len);
|
||||||
|
var hashed_key = math.wrap(traits.hash(key), math.min_int(@typeInfo(usize).Int), hash_max);
|
||||||
|
var iterations = @as(usize, 0);
|
||||||
|
|
||||||
|
while (iterations < self.count) : (iterations += 1) {
|
||||||
|
const entry = &(self.entries[hashed_key] orelse return null);
|
||||||
|
|
||||||
|
if (traits.match(entry.key, key)) {
|
||||||
|
return entry.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
hashed_key = (hashed_key +% 1) % hash_max;
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make(allocator: io.Allocator) Self {
|
||||||
|
return .{
|
||||||
|
.allocator = allocator,
|
||||||
|
.count = 0,
|
||||||
|
.entries = &.{},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn rehash(self: *Self, max_load: f32) io.AllocationError!void {
|
||||||
|
if (self.calculate_load_factor() <= max_load) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const min_count = math.max(1, self.count);
|
||||||
|
const table_size = min_count * 2;
|
||||||
|
const allocation = @as([*]?Entry, @ptrCast(@alignCast(try self.allocator.reallocate(null, @sizeOf(?Entry) * table_size))))[0 .. table_size];
|
||||||
|
|
||||||
|
errdefer self.allocator.deallocate(allocation);
|
||||||
|
|
||||||
|
self.entries = replace_table: {
|
||||||
|
for (allocation) |*entry| {
|
||||||
|
entry.* = null;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (self.entries.len != 0) {
|
||||||
|
for (self.entries) |maybe_entry| {
|
||||||
|
if (maybe_entry) |entry| {
|
||||||
|
debug.assert(entry.write_into(allocation));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.allocator.deallocate(self.entries);
|
||||||
|
}
|
||||||
|
|
||||||
|
break: replace_table allocation;
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn TableTraits(comptime Key: type) type {
|
||||||
|
return struct {
|
||||||
|
hash: fn (key: Key) usize,
|
||||||
|
match: fn (key: Key, key: Key) bool,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub const string_table_traits = TableTraits([]const io.Byte){
|
||||||
|
.hash = struct {
|
||||||
|
fn hash(key: []const io.Byte) usize {
|
||||||
|
var hash_code = @as(usize, 5381);
|
||||||
|
|
||||||
|
for (key) |byte| {
|
||||||
|
hash_code = ((hash_code << 5) + hash_code) + byte;
|
||||||
|
}
|
||||||
|
|
||||||
|
return hash_code;
|
||||||
|
}
|
||||||
|
}.hash,
|
||||||
|
|
||||||
|
.match = io.equals,
|
||||||
|
};
|
|
@ -1,147 +1,21 @@
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
///
|
|
||||||
/// Errors that may occur during checked integer arithmetic operations.
|
|
||||||
///
|
|
||||||
pub const CheckedArithmeticError = error {
|
|
||||||
IntOverflow,
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns the float type described by `float`.
|
|
||||||
///
|
|
||||||
pub fn Float(comptime float: std.builtin.Type.Float) type {
|
|
||||||
return @Type(.{.Float = float});
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns the integer type described by `int`.
|
|
||||||
///
|
|
||||||
pub fn Int(comptime int: std.builtin.Type.Int) type {
|
|
||||||
return @Type(.{.Int = int});
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Two-dimensional vector type.
|
|
||||||
///
|
|
||||||
pub const Vector2 = extern struct {
|
|
||||||
x: f32,
|
|
||||||
y: f32,
|
|
||||||
|
|
||||||
///
|
|
||||||
/// A [Vector2] with a value of `0` assigned to all of the components.
|
|
||||||
///
|
|
||||||
pub const zero = Vector2{.x = 0, .y = 0};
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to perform a checked addition between `a` and `b`, returning the result or [CheckedArithmeticError] if the
|
|
||||||
/// operation tried to invoke safety-checked behavior.
|
|
||||||
///
|
|
||||||
/// `checked_add` can be seen as an alternative to the language-native addition operator (+) that exposes the safety-
|
|
||||||
/// checked behavior in the form of an error type that may be caught or tried on.
|
|
||||||
///
|
|
||||||
pub fn checked_add(a: anytype, b: anytype) CheckedArithmeticError!@TypeOf(a + b) {
|
|
||||||
const result = @addWithOverflow(a, b);
|
|
||||||
|
|
||||||
if (result.@"1" != 0) {
|
|
||||||
return error.IntOverflow;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.@"0";
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to perform a checked integer cast to the type expressed by `int` on `value`, returning the result or
|
|
||||||
/// [CheckedArithmeticError] if the operation tried to invoke safety-checked behavior.
|
|
||||||
///
|
|
||||||
/// `checked_cast` can be seen as an alternative to the language-native `@intCast` builtin that exposes the safety-
|
|
||||||
/// checked behavior in the form of an error type that may be caught or tried on.
|
|
||||||
///
|
|
||||||
pub fn checked_cast(comptime int: std.builtin.Type.Int, value: anytype) CheckedArithmeticError!Int(int) {
|
|
||||||
if ((value < min_int(int)) or (value > max_int(int))) {
|
|
||||||
return error.IntOverflow;
|
|
||||||
}
|
|
||||||
|
|
||||||
return @intCast(Int(int), value);
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to perform a checked multiplication between `a` and `b`, returning the result or [CheckedArithmeticError]
|
|
||||||
/// if the operation tried to invoke safety-checked behavior.
|
|
||||||
///
|
|
||||||
/// `checked_mul` can be seen as an alternative to the language-native multiplication operator (*) that exposes the
|
|
||||||
/// safety-checked behavior in the form of an error type that may be caught or tried on.
|
|
||||||
///
|
|
||||||
pub fn checked_mul(a: anytype, b: anytype) CheckedArithmeticError!@TypeOf(a * b) {
|
|
||||||
const result = @mulWithOverflow(a, b);
|
|
||||||
|
|
||||||
if (result.@"1" != 0) {
|
|
||||||
return error.IntOverflow;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.@"0";
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to perform a checked subtraction between `a` and `b`, returning the result or [CheckedArithmeticError] if
|
|
||||||
/// the operation tried to invoke safety-checked behavior.
|
|
||||||
///
|
|
||||||
/// `checked_sub` can be seen as an alternative to the language-native subtraction operator (-) that exposes the safety-
|
|
||||||
/// checked behavior in the form of an error type that may be caught or tried on.
|
|
||||||
///
|
|
||||||
pub fn checked_sub(a: anytype, b: anytype) CheckedArithmeticError!@TypeOf(a - b) {
|
|
||||||
const result = @subWithOverflow(a, b);
|
|
||||||
|
|
||||||
if (result.@"1" != 0) {
|
|
||||||
return error.IntOverflow;
|
|
||||||
}
|
|
||||||
|
|
||||||
return result.@"0";
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns `value` clamped between the inclusive bounds of `lower` and `upper`.
|
|
||||||
///
|
|
||||||
pub fn clamp(value: anytype, lower: anytype, upper: anytype) @TypeOf(value, lower, upper) {
|
|
||||||
return max(lower, min(upper, value));
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns `true` if `value` is clamped within the inclusive bounds of `lower` and `upper`.
|
|
||||||
///
|
|
||||||
pub fn is_clamped(value: anytype, lower: anytype, upper: anytype) bool {
|
|
||||||
return (value >= lower) and (value <= upper);
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns the maximum value between `a` and `b`.
|
|
||||||
///
|
|
||||||
pub fn max(a: anytype, b: anytype) @TypeOf(a, b) {
|
pub fn max(a: anytype, b: anytype) @TypeOf(a, b) {
|
||||||
return @max(a, b);
|
return @max(a, b);
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns the maximum value that the integer described by `int` may express.
|
|
||||||
///
|
|
||||||
pub fn max_int(comptime int: std.builtin.Type.Int) comptime_int {
|
pub fn max_int(comptime int: std.builtin.Type.Int) comptime_int {
|
||||||
const bit_count = int.bits;
|
const bit_count = int.bits;
|
||||||
|
|
||||||
if (bit_count == 0) return 0;
|
if (bit_count == 0) return 0;
|
||||||
|
|
||||||
return (1 << (bit_count - @boolToInt(int.signedness == .signed))) - 1;
|
return (1 << (bit_count - @intFromBool(int.signedness == .signed))) - 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns the minimum value between `a` and `b`.
|
|
||||||
///
|
|
||||||
pub fn min(a: anytype, b: anytype) @TypeOf(a, b) {
|
pub fn min(a: anytype, b: anytype) @TypeOf(a, b) {
|
||||||
return @min(a, b);
|
return @min(a, b);
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns the minimum value that the integer described by `int` may express.
|
|
||||||
///
|
|
||||||
pub fn min_int(comptime int: std.builtin.Type.Int) comptime_int {
|
pub fn min_int(comptime int: std.builtin.Type.Int) comptime_int {
|
||||||
if (int.signedness == .unsigned) return 0;
|
if (int.signedness == .unsigned) return 0;
|
||||||
|
|
||||||
|
@ -152,9 +26,6 @@ pub fn min_int(comptime int: std.builtin.Type.Int) comptime_int {
|
||||||
return -(1 << (bit_count - 1));
|
return -(1 << (bit_count - 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns `value` wrapped around the inclusive bounds of `lower` and `upper`.
|
|
||||||
///
|
|
||||||
pub fn wrap(value: anytype, lower: anytype, upper: anytype) @TypeOf(value, lower, upper) {
|
pub fn wrap(value: anytype, lower: anytype, upper: anytype) @TypeOf(value, lower, upper) {
|
||||||
const range = upper - lower;
|
const range = upper - lower;
|
||||||
|
|
||||||
|
|
|
@ -1,178 +0,0 @@
|
||||||
const debug = @import("./debug.zig");
|
|
||||||
|
|
||||||
const io = @import("./io.zig");
|
|
||||||
|
|
||||||
const math = @import("./math.zig");
|
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Addressable mapping of integers described by `index_int` to values of type `Value`.
|
|
||||||
///
|
|
||||||
/// Slab maps are similar to slot maps in that they have O(1) insertion and removal, however, use a flat table layout
|
|
||||||
/// instead of parallel arrays. This reduces memory usage in some cases and can be useful for data that does not need to
|
|
||||||
/// be quickly iterated over, as values ordering is not guaranteed.
|
|
||||||
///
|
|
||||||
/// *Note* `index_int` values may be as big or as small as desired per the use-case of the consumer, however, integers
|
|
||||||
/// smaller than `usize` may result in the map reporting it is out of memory due to exhausting the addressable space
|
|
||||||
/// provided by the integer.
|
|
||||||
///
|
|
||||||
pub fn Map(comptime index_int: std.builtin.Type.Int, comptime Value: type) type {
|
|
||||||
return struct {
|
|
||||||
allocator: io.Allocator,
|
|
||||||
free_index: Index = 0,
|
|
||||||
count: Index = 0,
|
|
||||||
table: []Entry = &.{},
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Table entry which may either store an inserted value or an index to the next free entry in the table.
|
|
||||||
///
|
|
||||||
const Entry = union (enum) {
|
|
||||||
free_index: Index,
|
|
||||||
value: Value,
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Used for indexing into the slab map.
|
|
||||||
///
|
|
||||||
const Index = math.Int(index_int);
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Slab map type.
|
|
||||||
///
|
|
||||||
const Self = @This();
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Overwrites the value referenced by `index` in `self`.
|
|
||||||
///
|
|
||||||
pub fn assign(self: *Self, index: Index, value: Value) void {
|
|
||||||
const entry = &self.table[index];
|
|
||||||
|
|
||||||
debug.assert(entry.* == .value);
|
|
||||||
|
|
||||||
entry.value = value;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Deinitializes `self` and sets it to an invalid state, freeing all memory allocated by `allocator`.
|
|
||||||
///
|
|
||||||
/// *Note* if the `table` field of `self` is an allocated slice, `allocator` must reference the same allocation
|
|
||||||
/// strategy as the one originally used to allocate the current table.
|
|
||||||
///
|
|
||||||
pub fn deinit(self: *Self, allocator: io.Allocator) void {
|
|
||||||
if (self.table.len == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
io.deallocate(allocator, self.table);
|
|
||||||
|
|
||||||
self.table = &.{};
|
|
||||||
self.count = 0;
|
|
||||||
self.free_index = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Fetches the value referenced by `index` in `self`, returning it.
|
|
||||||
///
|
|
||||||
pub fn fetch(self: *Self, index: Index) *Value {
|
|
||||||
const entry = &self.table[index];
|
|
||||||
|
|
||||||
debug.assert(entry.* == .value);
|
|
||||||
|
|
||||||
return &entry.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to grow the internal buffer of `self` by `growth_amount` using `allocator`.
|
|
||||||
///
|
|
||||||
/// The function returns [io.AllocatorError] if `allocator` could not commit the memory required to grow the
|
|
||||||
/// table by `growth_amount`, leaving `self` in the same state that it was in prior to starting the grow.
|
|
||||||
///
|
|
||||||
/// Growing ahead of multiple insertion operations is useful when the upper bound of insertions is well-
|
|
||||||
/// understood, as it can reduce the number of allocations required per insertion.
|
|
||||||
///
|
|
||||||
/// *Note* if the `table` field of `self` is an allocated slice, `allocator` must reference the same allocation
|
|
||||||
/// strategy as the one originally used to allocate the current table.
|
|
||||||
///
|
|
||||||
pub fn grow(self: *Self, allocator: io.Allocator, growth_amount: usize) io.AllocationError!void {
|
|
||||||
const grown_capacity = self.table.len + growth_amount;
|
|
||||||
const entries = try io.allocate_many(allocator, grown_capacity, Entry);
|
|
||||||
|
|
||||||
errdefer io.deallocate(allocator, entries);
|
|
||||||
|
|
||||||
if (self.table.len != 0) {
|
|
||||||
for (0 .. self.table.len) |index| {
|
|
||||||
entries[index] = self.table[index];
|
|
||||||
}
|
|
||||||
|
|
||||||
for (self.table.len .. entries.len) |index| {
|
|
||||||
entries[index] = .{.free_index = 0};
|
|
||||||
}
|
|
||||||
|
|
||||||
io.deallocate(allocator, self.table);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.table = entries;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to insert `value` into `self` as a new entry using `allocator` as the allocation strategy,
|
|
||||||
/// returning an index value representing a reference to the inserted value that may be queried through `self`
|
|
||||||
/// after.
|
|
||||||
///
|
|
||||||
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
|
||||||
/// internal buffer of `self` when necessary.
|
|
||||||
///
|
|
||||||
/// *Note* if the `table` field of `self` is an allocated slice, `allocator` must reference the same allocation
|
|
||||||
/// strategy as the one originally used to allocate the current table.
|
|
||||||
///
|
|
||||||
pub fn insert(self: *Self, value: Value) io.AllocationError!Index {
|
|
||||||
if (self.count == self.table.len) {
|
|
||||||
try self.grow(self.allocator, math.max(1, self.count));
|
|
||||||
}
|
|
||||||
|
|
||||||
if (self.free_index == self.count) {
|
|
||||||
const entry_index = self.count;
|
|
||||||
const entry = &self.table[entry_index];
|
|
||||||
|
|
||||||
entry.* = .{.value = value};
|
|
||||||
|
|
||||||
self.count += 1;
|
|
||||||
self.free_index += 1;
|
|
||||||
|
|
||||||
return entry_index;
|
|
||||||
}
|
|
||||||
|
|
||||||
const entry_index = self.free_index;
|
|
||||||
const entry = &self.table[self.free_index];
|
|
||||||
|
|
||||||
debug.assert(entry.* == .free_index);
|
|
||||||
|
|
||||||
self.count += 1;
|
|
||||||
self.free_index = entry.free_index;
|
|
||||||
entry.* = .{.value = value};
|
|
||||||
|
|
||||||
return entry_index;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns `true` if `self` contains no values, otherwise `false`.
|
|
||||||
///
|
|
||||||
pub fn is_empty(self: Self) bool {
|
|
||||||
return self.count == 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Removes the value referenced by `index` from `self`.
|
|
||||||
///
|
|
||||||
pub fn remove(self: *Self, index: Index) void {
|
|
||||||
const entry = &self.table[index];
|
|
||||||
|
|
||||||
debug.assert(entry.* == .value);
|
|
||||||
|
|
||||||
self.count -= 1;
|
|
||||||
entry.* = .{.free_index = self.free_index};
|
|
||||||
self.free_index = index;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
|
@ -1,236 +0,0 @@
|
||||||
const debug = @import("./debug.zig");
|
|
||||||
|
|
||||||
const io = @import("./io.zig");
|
|
||||||
|
|
||||||
const math = @import("./math.zig");
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Retruns a dense mapping slots that may store `Element`s indexable by a [Slot], where `key` defines how many bits the
|
|
||||||
/// [Slot] used is made from.
|
|
||||||
///
|
|
||||||
pub fn Map(comptime key: Key, comptime Element: type) type {
|
|
||||||
const KeySlot = Slot(key);
|
|
||||||
const Index = math.Unsigned(key.index_bits);
|
|
||||||
|
|
||||||
return struct {
|
|
||||||
capacity: usize,
|
|
||||||
values: []Element,
|
|
||||||
slots: [*]KeySlot,
|
|
||||||
erase: [*]Index,
|
|
||||||
next_free: Index,
|
|
||||||
|
|
||||||
const Self = @This();
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Clears all elements from the slots in `self`.
|
|
||||||
///
|
|
||||||
/// *Note* that clearing the slots is not the same as deinitializing them, as it does not deallocate any memory
|
|
||||||
/// that has already been allocated to the slots structure.
|
|
||||||
///
|
|
||||||
pub fn clear(self: *Self) void {
|
|
||||||
self.next_free = 0;
|
|
||||||
self.values = self.values[0 .. 0];
|
|
||||||
|
|
||||||
{
|
|
||||||
var index = @as(usize, 0);
|
|
||||||
|
|
||||||
while (index < self.capacity) : (index += 1) {
|
|
||||||
const slot = &self.slots[index];
|
|
||||||
|
|
||||||
slot.salt = math.max(slot.salt +% 1, 1);
|
|
||||||
slot.index = index;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Frees all memory allocated by `allocator` to self.
|
|
||||||
///
|
|
||||||
/// *Note*: if `self` already contains allocated memory then `allocator` must reference the same [io.Allocator]
|
|
||||||
/// that was used to create the already-allocated memory.
|
|
||||||
///
|
|
||||||
pub fn deinit(self: *Self, allocator: io.Allocator) void {
|
|
||||||
io.deallocate(allocator, self.values.ptr);
|
|
||||||
io.deallocate(allocator, self.slots);
|
|
||||||
io.deallocate(allocator, self.erase);
|
|
||||||
|
|
||||||
self.values = &.{};
|
|
||||||
self.slots = null;
|
|
||||||
self.erase = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to fetch the element identified referenced by `slot` from `self`, returning it or `null` if `slot`
|
|
||||||
/// does not reference a valid element.
|
|
||||||
///
|
|
||||||
pub fn fetch(self: Self, slot: KeySlot) ?*Element {
|
|
||||||
if (slot.index >= self.values.len) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const redirect = &self.slots[slot.index];
|
|
||||||
|
|
||||||
if (slot.salt != redirect.salt) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return &self.values[redirect.index];
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to transactionally grow `self` by `growth_amount` using `allocator`, returning a
|
|
||||||
/// [io.AllocationError] if it failed.
|
|
||||||
///
|
|
||||||
/// Should growing fail, `self` is left in an unmodified state.
|
|
||||||
///
|
|
||||||
/// *Note*: if `self` already contains allocated memory then `allocator` must reference the same [io.Allocator]
|
|
||||||
/// that was used to create the already-allocated memory.
|
|
||||||
///
|
|
||||||
pub fn grow(self: *Self, allocator: io.Allocator, growth_amount: usize) io.AllocationError!void {
|
|
||||||
const grown_capacity = self.capacity + growth_amount;
|
|
||||||
const values = try io.allocate_many(Element, grown_capacity, allocator);
|
|
||||||
|
|
||||||
errdefer io.deallocate(allocator, values);
|
|
||||||
|
|
||||||
const slots = try io.allocate_many(KeySlot, grown_capacity, allocator);
|
|
||||||
|
|
||||||
errdefer io.deallocate(allocator, slots);
|
|
||||||
|
|
||||||
const erase = try io.allocate_many(Index, grown_capacity, allocator);
|
|
||||||
|
|
||||||
errdefer io.deallocate(allocator, slots);
|
|
||||||
|
|
||||||
self.values = values;
|
|
||||||
self.slots = slots.ptr;
|
|
||||||
self.erase = erase.ptr;
|
|
||||||
self.capacity = grown_capacity;
|
|
||||||
|
|
||||||
// Add new values to the freelist
|
|
||||||
{
|
|
||||||
var index = @intCast(Index, self.values.len);
|
|
||||||
|
|
||||||
while (index < self.capacity) : (index += 1) {
|
|
||||||
const slot = &self.slots.?[index];
|
|
||||||
|
|
||||||
slot.salt = 1;
|
|
||||||
slot.index = index;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to return an initialized slot map with an initial capacity of `initial_capacity` and `allocator` as
|
|
||||||
/// the memory allocation strategy.
|
|
||||||
///
|
|
||||||
/// Upon failure, a [io.AllocationError] is returned instead.
|
|
||||||
///
|
|
||||||
pub fn init(allocator: io.Allocator, initial_capacity: usize) io.AllocationError!Self {
|
|
||||||
const values = try io.allocate_many(Element, initial_capacity, allocator);
|
|
||||||
|
|
||||||
errdefer io.deallocate(allocator, values);
|
|
||||||
|
|
||||||
const slots = try io.allocate_many(KeySlot, initial_capacity, allocator);
|
|
||||||
|
|
||||||
errdefer io.deallocate(allocator, slots);
|
|
||||||
|
|
||||||
const erase = try io.allocate_many(Index, initial_capacity, allocator);
|
|
||||||
|
|
||||||
errdefer io.deallocate(allocator, erase);
|
|
||||||
|
|
||||||
return Self{
|
|
||||||
.capacity = initial_capacity,
|
|
||||||
.values = values[0 .. 0],
|
|
||||||
.slots = slots.ptr,
|
|
||||||
.erase = erase.ptr,
|
|
||||||
.next_free = 0,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to insert `value` into `self`, growing the internal buffer with `allocator` if it is full and
|
|
||||||
/// returning a `Slot` of `key` referencing the inserted element or a [io.AllocationError] if it failed.
|
|
||||||
///
|
|
||||||
/// *Note*: if `self` already contains allocated memory then `allocator` must reference the same [io.Allocator]
|
|
||||||
/// that was used to create the already-allocated memory.
|
|
||||||
///
|
|
||||||
pub fn insert(self: *Self, allocator: io.Allocator, value: Element) io.AllocationError!KeySlot {
|
|
||||||
if (self.values.len == self.capacity) {
|
|
||||||
try self.grow(allocator, math.max(usize, 1, self.capacity));
|
|
||||||
}
|
|
||||||
|
|
||||||
const index_of_redirect = self.next_free;
|
|
||||||
const redirect = &self.slots.?[index_of_redirect];
|
|
||||||
|
|
||||||
// redirect.index points to the next free slot.
|
|
||||||
self.next_free = redirect.index;
|
|
||||||
redirect.index = @intCast(Index, self.values.len);
|
|
||||||
self.values = self.values.ptr[0 .. self.values.len + 1];
|
|
||||||
self.values[redirect.index] = value;
|
|
||||||
self.erase.?[redirect.index] = index_of_redirect;
|
|
||||||
|
|
||||||
return KeySlot{
|
|
||||||
.index = index_of_redirect,
|
|
||||||
.salt = redirect.salt,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to remove the element referenced by `slot` from `self`, returning `true` if it was successful or
|
|
||||||
/// `false` if `slot` does not reference a valid slot.
|
|
||||||
///
|
|
||||||
pub fn remove(self: *Self, slot: KeySlot) bool {
|
|
||||||
const redirect = &self.slots.?[slot.index];
|
|
||||||
|
|
||||||
if (slot.salt != redirect.salt) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
const free_index = redirect.index;
|
|
||||||
|
|
||||||
self.values = self.values[0 .. (self.values.len - 1)];
|
|
||||||
|
|
||||||
if (self.values.len > 0) {
|
|
||||||
const free_value = &self.values[free_index];
|
|
||||||
const free_erase = &self.erase.?[free_index];
|
|
||||||
const last_value = &self.values[self.values.len];
|
|
||||||
const last_erase = &self.erase.?[self.values.len];
|
|
||||||
|
|
||||||
free_value.* = last_value.*;
|
|
||||||
free_erase.* = last_erase.*;
|
|
||||||
self.slots.?[free_erase.*].index = free_index;
|
|
||||||
}
|
|
||||||
|
|
||||||
redirect.salt = math.max(Index, redirect.salt +% 1, 1);
|
|
||||||
redirect.index = self.next_free;
|
|
||||||
self.next_free = slot.index;
|
|
||||||
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Describes the memory layout of an element-slot mapping.
|
|
||||||
///
|
|
||||||
pub const Key = struct {
|
|
||||||
index_bits: usize,
|
|
||||||
salt_bits: usize,
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// References a slot in a slot mapping.
|
|
||||||
///
|
|
||||||
pub fn Slot(comptime key: Key) type {
|
|
||||||
return extern struct {
|
|
||||||
index: math.Unsigned(key.index_bits),
|
|
||||||
salt: math.Unsigned(key.salt_bits),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// [Key] that uses the same number of bits as a [usize].
|
|
||||||
///
|
|
||||||
pub const addressable_key = Key{
|
|
||||||
.index_bits = (@bitSizeOf(usize) / 2),
|
|
||||||
.salt_bits = (@bitSizeOf(usize) / 2),
|
|
||||||
};
|
|
|
@ -1,299 +0,0 @@
|
||||||
const debug = @import("./debug.zig");
|
|
||||||
|
|
||||||
const io = @import("./io.zig");
|
|
||||||
|
|
||||||
const math = @import("./math.zig");
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Hash type used by tables and their associated structures.
|
|
||||||
///
|
|
||||||
pub const Hash = u64;
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns a table type of `Key`-`Value` pairs implementing a hash-only approach to key-value storage.
|
|
||||||
///
|
|
||||||
/// Entries are hashed using the `keyer` and collisions are resolved by looking for another empty space nearby. This
|
|
||||||
/// repeats until the load factor exceeds the implementation-defined load maximum, at which point the table will rehash
|
|
||||||
/// itself to acquire more capacity.
|
|
||||||
///
|
|
||||||
pub fn Hashed(comptime Key: type, comptime Value: type, comptime keyer: Keyer(Key)) type {
|
|
||||||
const hash_info = @typeInfo(Hash).Int;
|
|
||||||
const load_max = 0.75;
|
|
||||||
const growth_factor = 0.6;
|
|
||||||
|
|
||||||
return struct {
|
|
||||||
allocator: io.Allocator,
|
|
||||||
count: usize = 0,
|
|
||||||
table: []?Entry = &.{},
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Key-value pair bundling.
|
|
||||||
///
|
|
||||||
pub const Entry = struct {
|
|
||||||
key: Key,
|
|
||||||
value: Value,
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to write `self` into `entry_table`, returning `true` if no identical entry already existed,
|
|
||||||
/// otherwise `false`.
|
|
||||||
///
|
|
||||||
/// Note that this does not modify the memory pointed to by `entry_table` in any way, meaning that
|
|
||||||
/// completely filled entry tables cannot perform the write at all and will invoke safety-checked behavior.
|
|
||||||
///
|
|
||||||
fn write_into(self: Entry, entry_table: []?Entry) bool {
|
|
||||||
const hash_max = math.min(math.max_int(hash_info), entry_table.len);
|
|
||||||
var hashed_key = math.wrap(keyer.hasher(self.key), math.min_int(hash_info), hash_max);
|
|
||||||
var iterations = @as(usize, 0);
|
|
||||||
|
|
||||||
while (true) : (iterations += 1) {
|
|
||||||
debug.assert(iterations < entry_table.len);
|
|
||||||
|
|
||||||
const table_entry = &(entry_table[hashed_key] orelse {
|
|
||||||
entry_table[hashed_key] = .{
|
|
||||||
.key = self.key,
|
|
||||||
.value = self.value,
|
|
||||||
};
|
|
||||||
|
|
||||||
return true;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (keyer.comparer(table_entry.key, self.key) == 0) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
hashed_key = (hashed_key +% 1) % hash_max;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Iterable wrapper for [Hashed] instances to make unordered traversal of key-value entries relatively trivial.
|
|
||||||
///
|
|
||||||
pub const Iterable = struct {
|
|
||||||
hashed_map: *Self,
|
|
||||||
iterations: usize = 0,
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to move past the current iteration of `self` and onto the next key-value entry, returning it or
|
|
||||||
/// `null` if there are no more elements in the referenced map.
|
|
||||||
///
|
|
||||||
pub fn next(self: *Iterable) ?Entry {
|
|
||||||
while (self.iterations < self.hashed_map.table.len) {
|
|
||||||
defer self.iterations += 1;
|
|
||||||
|
|
||||||
if (self.hashed_map.table[self.iterations]) |entry| {
|
|
||||||
return entry;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Table type.
|
|
||||||
///
|
|
||||||
const Self = @This();
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to write the `key`-`value` pair into `self`, using `allocator` as the memory allocation strategy,
|
|
||||||
/// and overwriting any value stored with a matching `key` and returning it if one existed.
|
|
||||||
///
|
|
||||||
/// The function returns [AllocationError] instead if `allocator` cannot commit the memory required to grow the
|
|
||||||
/// entry table of `self` when necessary.
|
|
||||||
///
|
|
||||||
pub fn assign(self: *Self, key: Key, value: Value) io.AllocationError!?Entry {
|
|
||||||
if (self.calculate_load_factor() >= load_max) {
|
|
||||||
const growth_size = @intToFloat(f64, math.max(1, self.table.len)) * growth_factor;
|
|
||||||
|
|
||||||
if (growth_size > math.max_int(@typeInfo(usize).Int)) {
|
|
||||||
return error.OutOfMemory;
|
|
||||||
}
|
|
||||||
|
|
||||||
try self.rehash(@floatToInt(usize, growth_size));
|
|
||||||
}
|
|
||||||
|
|
||||||
debug.assert(self.table.len > self.count);
|
|
||||||
|
|
||||||
{
|
|
||||||
const hash_max = math.min(math.max_int(hash_info), self.table.len);
|
|
||||||
var hashed_key = math.wrap(keyer.hasher(key), math.min_int(hash_info), hash_max);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
const entry = &(self.table[hashed_key] orelse {
|
|
||||||
self.table[hashed_key] = .{
|
|
||||||
.key = key,
|
|
||||||
.value = value,
|
|
||||||
};
|
|
||||||
|
|
||||||
return null;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (keyer.comparer(entry.key, key) == 0) {
|
|
||||||
const original_entry = entry.*;
|
|
||||||
|
|
||||||
entry.* = .{
|
|
||||||
.key = key,
|
|
||||||
.value = value,
|
|
||||||
};
|
|
||||||
|
|
||||||
return original_entry;
|
|
||||||
}
|
|
||||||
|
|
||||||
hashed_key = (hashed_key +% 1) % hash_max;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns the calculated load factor of `self` at the moment.
|
|
||||||
///
|
|
||||||
pub fn calculate_load_factor(self: Self) f32 {
|
|
||||||
return if (self.table.len == 0) 1 else @intToFloat(f32, self.count) / @intToFloat(f32, self.table.len);
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Clears all entries from `self`, resetting the count to `0`.
|
|
||||||
///
|
|
||||||
/// To clean up memory allocations made by the stack and deinitialize it, see [deinit] instead.
|
|
||||||
///
|
|
||||||
pub fn clear(self: *Self) void {
|
|
||||||
for (self.table) |*entry| {
|
|
||||||
entry.* = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
self.count = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Deinitializes `self` and sets it to an invalid state, freeing all memory allocated by `allocator`.
|
|
||||||
///
|
|
||||||
/// To clear all items from the table while preserving the current capacity, see [clear] instead.
|
|
||||||
///
|
|
||||||
pub fn deinit(self: *Self) void {
|
|
||||||
if (self.table.len == 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
io.deallocate(self.allocator, self.table);
|
|
||||||
|
|
||||||
self.table = &.{};
|
|
||||||
self.count = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to write the `key`-`value` pair into `self`, using `allocator` as the memory allocation strategy,
|
|
||||||
/// if no value already exists with a matching `key`, returning `true` if it was inserted, otherwise `false`.
|
|
||||||
///
|
|
||||||
/// The function returns [AllocationError] instead if `allocator` cannot commit the memory required to grow the
|
|
||||||
/// entry table of `self` when necessary.
|
|
||||||
///
|
|
||||||
pub fn insert(self: *Self, key: Key, value: Value) io.AllocationError!bool {
|
|
||||||
if (self.calculate_load_factor() >= load_max) {
|
|
||||||
const growth_amount = @intToFloat(f64, self.table.len) * growth_factor;
|
|
||||||
const min_size = 1;
|
|
||||||
|
|
||||||
try self.rehash(self.table.len + math.max(min_size, @floatToInt(usize, growth_amount)));
|
|
||||||
}
|
|
||||||
|
|
||||||
debug.assert(self.table.len > self.count);
|
|
||||||
|
|
||||||
defer self.count += 1;
|
|
||||||
|
|
||||||
return (Entry{
|
|
||||||
.key = key,
|
|
||||||
.value = value,
|
|
||||||
}).write_into(self.table);
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to find an entry in `self` matching `key`, returning it or `null` if no matching entry was found.
|
|
||||||
///
|
|
||||||
pub fn lookup(self: Self, key: Key) ?Value {
|
|
||||||
if (self.count == 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const hash_max = math.min(math.max_int(hash_info), self.table.len);
|
|
||||||
var hashed_key = math.wrap(keyer.hasher(key), math.min_int(hash_info), hash_max);
|
|
||||||
var iterations = @as(usize, 0);
|
|
||||||
|
|
||||||
while (iterations < self.count) : (iterations += 1) {
|
|
||||||
const entry = &(self.table[hashed_key] orelse return null);
|
|
||||||
|
|
||||||
if (keyer.comparer(entry.key, key) == 0) {
|
|
||||||
return entry.value;
|
|
||||||
}
|
|
||||||
|
|
||||||
hashed_key = (hashed_key +% 1) % hash_max;
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to reallocate and regenerate the table capacity in `self` using `allocator` to be equal to or
|
|
||||||
/// greater than `requested_range`, returning [io.AllocationError] if `allocator` cannot commit the memory
|
|
||||||
/// required for the table capacity size.
|
|
||||||
///
|
|
||||||
pub fn rehash(self: *Self, requested_range: usize) io.AllocationError!void {
|
|
||||||
const old_table = self.table;
|
|
||||||
|
|
||||||
self.table = try io.allocate_many(self.allocator, math.max(requested_range, self.count), ?Entry);
|
|
||||||
|
|
||||||
errdefer {
|
|
||||||
io.deallocate(self.allocator, self.table);
|
|
||||||
|
|
||||||
self.table = old_table;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (self.table) |*entry| {
|
|
||||||
entry.* = null;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (old_table.len != 0)
|
|
||||||
{
|
|
||||||
for (old_table) |maybe_entry| {
|
|
||||||
if (maybe_entry) |entry| {
|
|
||||||
debug.assert(entry.write_into(self.table));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
io.deallocate(self.allocator, old_table);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns a function group for defining table keying operations performable on `Key`.
|
|
||||||
///
|
|
||||||
pub fn Keyer(comptime Key: type) type {
|
|
||||||
return struct {
|
|
||||||
hasher: fn (key: Key) Hash,
|
|
||||||
comparer: fn (key_a: Key, key_b: Key) isize,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// A standard [Keyer] for `[]const u8` types that provides general-purpose string keying.
|
|
||||||
///
|
|
||||||
pub const string_keyer = Keyer([]const u8){
|
|
||||||
.hasher = hash_string,
|
|
||||||
.comparer = io.compare,
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Returns a general-purpose, non-cryptographically safe hash value for `string`.
|
|
||||||
///
|
|
||||||
pub fn hash_string(string: []const u8) Hash {
|
|
||||||
var hash_code = @as(Hash, 5381);
|
|
||||||
|
|
||||||
for (string) |byte| {
|
|
||||||
hash_code = ((hash_code << 5) + hash_code) + byte;
|
|
||||||
}
|
|
||||||
|
|
||||||
return hash_code;
|
|
||||||
}
|
|
|
@ -1,49 +1,14 @@
|
||||||
const debug = @import("./debug.zig");
|
|
||||||
|
|
||||||
const io = @import("./io.zig");
|
const io = @import("./io.zig");
|
||||||
|
|
||||||
const math = @import("./math.zig");
|
const math = @import("./math.zig");
|
||||||
|
|
||||||
const std = @import("std");
|
|
||||||
|
|
||||||
///
|
|
||||||
///
|
|
||||||
///
|
|
||||||
pub const DecimalFormat = struct {
|
pub const DecimalFormat = struct {
|
||||||
delimiter: []const u8 = "",
|
delimiter: []const io.Byte,
|
||||||
positive_prefix: enum {none, plus, space} = .none,
|
positive_prefix: enum {none, plus, space},
|
||||||
};
|
|
||||||
|
|
||||||
///
|
pub fn parse(self: DecimalFormat, utf8: []const io.Byte, comptime Decimal: type) ?Decimal {
|
||||||
///
|
|
||||||
///
|
|
||||||
pub const HexadecimalFormat = struct {
|
|
||||||
delimiter: []const u8 = "",
|
|
||||||
positive_prefix: enum {none, plus, space} = .none,
|
|
||||||
casing: enum {lower, upper} = .lower,
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Errors that may occur during any kind of utf8-encoded parsing.
|
|
||||||
///
|
|
||||||
pub const ParseError = error {
|
|
||||||
BadSyntax,
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Errors that may occur during any kind of utf8-encoded printing.
|
|
||||||
///
|
|
||||||
pub const PrintError = error {
|
|
||||||
PrintFailed,
|
|
||||||
PrintIncomplete,
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
///
|
|
||||||
///
|
|
||||||
pub fn parse_decimal(comptime Decimal: type, utf8: []const u8, format: DecimalFormat) !Decimal {
|
|
||||||
if (utf8.len == 0) {
|
if (utf8.len == 0) {
|
||||||
return error.BadSyntax;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
switch (@typeInfo(Decimal)) {
|
switch (@typeInfo(Decimal)) {
|
||||||
|
@ -55,7 +20,7 @@ pub fn parse_decimal(comptime Decimal: type, utf8: []const u8, format: DecimalFo
|
||||||
|
|
||||||
var result = @as(Decimal, 0);
|
var result = @as(Decimal, 0);
|
||||||
|
|
||||||
for (@boolToInt(has_sign) .. utf8.len) |index| {
|
for (@intFromBool(has_sign) .. utf8.len) |index| {
|
||||||
const radix = 10;
|
const radix = 10;
|
||||||
const code = utf8[index];
|
const code = utf8[index];
|
||||||
|
|
||||||
|
@ -67,8 +32,8 @@ pub fn parse_decimal(comptime Decimal: type, utf8: []const u8, format: DecimalFo
|
||||||
},
|
},
|
||||||
|
|
||||||
else => {
|
else => {
|
||||||
if (format.delimiter.len == 0 or !io.equals(format.delimiter, utf8[index ..])) {
|
if (self.delimiter.len == 0 or !io.equals(self.delimiter, utf8[index ..])) {
|
||||||
return error.BadSyntax;
|
return null;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -81,7 +46,7 @@ pub fn parse_decimal(comptime Decimal: type, utf8: []const u8, format: DecimalFo
|
||||||
|
|
||||||
.unsigned => {
|
.unsigned => {
|
||||||
if (has_sign and utf8[0] == '-') {
|
if (has_sign and utf8[0] == '-') {
|
||||||
return error.OutOfMemory;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
|
@ -90,11 +55,6 @@ pub fn parse_decimal(comptime Decimal: type, utf8: []const u8, format: DecimalFo
|
||||||
},
|
},
|
||||||
|
|
||||||
.Float => {
|
.Float => {
|
||||||
// ""
|
|
||||||
if (utf8.len == 0) {
|
|
||||||
return error.BadSyntax;
|
|
||||||
}
|
|
||||||
|
|
||||||
var has_sign = switch (utf8[0]) {
|
var has_sign = switch (utf8[0]) {
|
||||||
'-', '+', ' ' => true,
|
'-', '+', ' ' => true,
|
||||||
else => false,
|
else => false,
|
||||||
|
@ -102,197 +62,47 @@ pub fn parse_decimal(comptime Decimal: type, utf8: []const u8, format: DecimalFo
|
||||||
|
|
||||||
// "-"
|
// "-"
|
||||||
if (has_sign and utf8.len == 1) {
|
if (has_sign and utf8.len == 1) {
|
||||||
return error.BadSyntax;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
const sign_offset = @boolToInt(has_sign);
|
const sign_offset = @intFromBool(has_sign);
|
||||||
var has_decimal = utf8[sign_offset] == '.';
|
var has_decimal = utf8[sign_offset] == '.';
|
||||||
|
|
||||||
// "-."
|
// "-."
|
||||||
if (has_decimal and (utf8.len == 2)) {
|
if (has_decimal and (utf8.len == 2)) {
|
||||||
return error.BadSyntax;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
var result = @as(Decimal, 0);
|
var result = @as(Decimal, 0);
|
||||||
var factor = @as(Decimal, if (has_sign and utf8[0] == '-') -1 else 1);
|
var factor = @as(Decimal, if (has_sign and utf8[0] == '-') -1 else 1);
|
||||||
|
|
||||||
for (utf8[0 .. (sign_offset + @boolToInt(has_decimal))]) |code| switch (code) {
|
for (utf8[sign_offset + @intFromBool(has_decimal) .. utf8.len]) |code| {
|
||||||
|
switch (code) {
|
||||||
'.' => {
|
'.' => {
|
||||||
if (has_decimal) return error.BadSyntax;
|
if (has_decimal) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
has_decimal = true;
|
has_decimal = true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
|
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
|
||||||
if (has_decimal) factor /= 10.0;
|
if (has_decimal) {
|
||||||
|
factor /= 10.0;
|
||||||
|
}
|
||||||
|
|
||||||
result = ((result * 10.0) + @intToFloat(Decimal, code - '0'));
|
result = ((result * 10.0) + @as(Decimal, @floatFromInt(code - '0')));
|
||||||
},
|
},
|
||||||
|
|
||||||
else => return error.BadSyntax,
|
else => return null,
|
||||||
};
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return result * factor;
|
return result * factor;
|
||||||
},
|
},
|
||||||
|
|
||||||
else => @compileError("`" ++ @typeName(Decimal) ++ "` cannot be formatted as a decimal string"),
|
else => @compileError("`" ++ @typeName(Decimal) ++ "` cannot be formatted as a decimal string"),
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Attempts to print `utf8` to `writer`.
|
|
||||||
///
|
|
||||||
/// The function returns [PrintError] if the write failed to complete partially or entirely.
|
|
||||||
///
|
|
||||||
pub fn print(writer: io.Writer, utf8: []const u8) PrintError!void {
|
|
||||||
if ((writer.invoke(utf8) orelse return error.PrintFailed) != utf8.len) {
|
|
||||||
return error.PrintIncomplete;
|
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
///
|
|
||||||
///
|
|
||||||
///
|
|
||||||
pub fn print_formatted(writer: io.Writer, comptime format: []const u8, arguments: anytype) PrintError!void {
|
|
||||||
switch (@typeInfo(@TypeOf(arguments))) {
|
|
||||||
.Struct => |arguments_struct| {
|
|
||||||
comptime var arg_index = 0;
|
|
||||||
comptime var head = 0;
|
|
||||||
comptime var tail = 0;
|
|
||||||
|
|
||||||
inline while (tail < format.len) : (tail += 1) {
|
|
||||||
if (format[tail] == '{') {
|
|
||||||
if (tail > format.len) {
|
|
||||||
@compileError("expected an idenifier after opening `{`");
|
|
||||||
}
|
|
||||||
|
|
||||||
tail += 1;
|
|
||||||
|
|
||||||
switch (format[tail]) {
|
|
||||||
'{' => {
|
|
||||||
try print(writer, format[head .. (tail - 1)]);
|
|
||||||
|
|
||||||
tail += 1;
|
|
||||||
head = tail;
|
|
||||||
},
|
|
||||||
|
|
||||||
'}' => {
|
|
||||||
if (!arguments_struct.is_tuple) {
|
|
||||||
@compileError("all format specifiers must be named when using a named struct");
|
|
||||||
}
|
|
||||||
|
|
||||||
try print(writer, arguments[arg_index]);
|
|
||||||
|
|
||||||
arg_index += 1;
|
|
||||||
tail += 1;
|
|
||||||
head = tail;
|
|
||||||
},
|
|
||||||
|
|
||||||
else => {
|
|
||||||
if (arguments_struct.is_tuple) {
|
|
||||||
@compileError("format specifiers cannot be named when using a tuple struct");
|
|
||||||
}
|
|
||||||
|
|
||||||
try print(writer, format[head .. (tail - 1)]);
|
|
||||||
|
|
||||||
head = tail;
|
|
||||||
tail += 1;
|
|
||||||
|
|
||||||
if (tail >= format.len) {
|
|
||||||
@compileError("expected closing `}` or another `{` after opening `{`");
|
|
||||||
}
|
|
||||||
|
|
||||||
debug.assert(tail < format.len);
|
|
||||||
|
|
||||||
inline while (format[tail] != '}') {
|
|
||||||
tail += 1;
|
|
||||||
|
|
||||||
debug.assert(tail < format.len);
|
|
||||||
}
|
|
||||||
|
|
||||||
try print_value(writer, @field(arguments, format[head .. tail]));
|
|
||||||
|
|
||||||
tail += 1;
|
|
||||||
head = tail;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
else => @compileError("`arguments` must be a struct type"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
|
||||||
///
|
|
||||||
///
|
|
||||||
pub fn print_decimal(writer: io.Writer, value: anytype, format: DecimalFormat) PrintError!void {
|
|
||||||
if (value == 0) {
|
|
||||||
return print(writer, switch (format.positive_prefix) {
|
|
||||||
.none => "0",
|
|
||||||
.plus => "+0",
|
|
||||||
.space => " 0",
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
switch (@typeInfo(@TypeOf(value))) {
|
|
||||||
.Int => |int| {
|
|
||||||
const radix = 10;
|
|
||||||
var buffer = [_]u8{0} ** (1 + math.max(int.bits, 1));
|
|
||||||
var buffer_start = buffer.len - 1;
|
|
||||||
|
|
||||||
{
|
|
||||||
var decomposable_value = value;
|
|
||||||
|
|
||||||
while (decomposable_value != 0) : (buffer_start -= 1) {
|
|
||||||
buffer[buffer_start] = @intCast(u8, (decomposable_value % radix) + '0');
|
|
||||||
decomposable_value = (decomposable_value / radix);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (int.signedness == .unsigned and value < 0) {
|
|
||||||
buffer[buffer_start] = '-';
|
|
||||||
} else {
|
|
||||||
switch (format.positive_prefix) {
|
|
||||||
.none => buffer_start += 1,
|
|
||||||
.plus => buffer[buffer_start] = '+',
|
|
||||||
.space => buffer[buffer_start] = ' ',
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try print(writer, buffer[buffer_start ..]);
|
|
||||||
},
|
|
||||||
|
|
||||||
else => @compileError("`arguments` must be a struct type"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn print_hexadecimal(writer: io.Writer, value: anytype, format: HexadecimalFormat) PrintError!void {
|
|
||||||
// TODO: Implement.
|
|
||||||
_ = writer;
|
|
||||||
_ = value;
|
|
||||||
_ = format;
|
|
||||||
|
|
||||||
unreachable;
|
|
||||||
}
|
|
||||||
|
|
||||||
noinline fn print_value(writer: io.Writer, value: anytype) PrintError!void {
|
|
||||||
const Value = @TypeOf(value);
|
|
||||||
|
|
||||||
return switch (@typeInfo(Value)) {
|
|
||||||
.Int => print_decimal(writer, value, .{}),
|
|
||||||
.Float => print_decimal(writer, value, .{}),
|
|
||||||
|
|
||||||
.Pointer => |pointer| switch (pointer.size) {
|
|
||||||
.One, .Many, .C => print_hexadecimal(writer, @ptrToInt(value), .{}),
|
|
||||||
.Slice => if (pointer.child == u8) print(writer, value) else @compileError(unformattableMessage(Value)),
|
|
||||||
},
|
|
||||||
|
|
||||||
else => @compileError(unformattableMessage(Value)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn unformattableMessage(comptime Value: type) []const u8 {
|
|
||||||
return "`" ++ @typeName(Value) ++ "` are not formattable";
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,128 @@
|
||||||
|
const coral = @import("coral");
|
||||||
|
|
||||||
|
const ext = @import("./ext.zig");
|
||||||
|
|
||||||
|
const file = @import("./file.zig");
|
||||||
|
|
||||||
|
const kym = @import("./kym.zig");
|
||||||
|
|
||||||
|
pub const Manifest = struct {
|
||||||
|
title: [255:0]coral.io.Byte = [_:0]coral.io.Byte{0} ** 255,
|
||||||
|
width: u16 = 640,
|
||||||
|
height: u16 = 480,
|
||||||
|
tick_rate: f32 = 60.0,
|
||||||
|
|
||||||
|
pub fn load(self: *Manifest, env: *kym.RuntimeEnv, file_access: file.Access) kym.RuntimeError!void {
|
||||||
|
const manifest = try env.execute_file(file_access, file.Path.from(&.{"app.ona"}));
|
||||||
|
|
||||||
|
defer env.discard(manifest);
|
||||||
|
|
||||||
|
const title = try env.get_field(manifest, "title");
|
||||||
|
|
||||||
|
defer env.discard(title);
|
||||||
|
|
||||||
|
const title_string = try env.to_string(title);
|
||||||
|
|
||||||
|
const width = @as(u16, get: {
|
||||||
|
const ref = try env.get_field(manifest, "width");
|
||||||
|
|
||||||
|
defer env.discard(ref);
|
||||||
|
|
||||||
|
break: get @intFromFloat(env.to_float(ref) catch @as(f64, @floatFromInt(self.width)));
|
||||||
|
});
|
||||||
|
|
||||||
|
const height = @as(u16, get: {
|
||||||
|
const ref = try env.get_field(manifest, "height");
|
||||||
|
|
||||||
|
defer env.discard(ref);
|
||||||
|
|
||||||
|
break: get @intFromFloat(env.to_float(ref) catch @as(f64, @floatFromInt(self.height)));
|
||||||
|
});
|
||||||
|
|
||||||
|
const tick_rate = @as(f32, get: {
|
||||||
|
const ref = try env.get_field(manifest, "tick_rate");
|
||||||
|
|
||||||
|
defer env.discard(ref);
|
||||||
|
|
||||||
|
break: get @floatCast(env.to_float(ref) catch self.tick_rate);
|
||||||
|
});
|
||||||
|
|
||||||
|
{
|
||||||
|
const limited_title_len = coral.math.min(title_string.len, self.title.len);
|
||||||
|
|
||||||
|
coral.io.copy(&self.title, title_string[0 .. limited_title_len]);
|
||||||
|
coral.io.zero(self.title[limited_title_len .. self.title.len]);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.tick_rate = tick_rate;
|
||||||
|
self.width = width;
|
||||||
|
self.height = height;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const LogSeverity = enum {
|
||||||
|
info,
|
||||||
|
warn,
|
||||||
|
fail,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const WritableLog = struct {
|
||||||
|
severity: LogSeverity,
|
||||||
|
write_buffer: coral.list.ByteStack,
|
||||||
|
|
||||||
|
pub fn as_writer(self: *WritableLog) coral.io.Writer {
|
||||||
|
return coral.io.Writer.bind(WritableLog, self, struct {
|
||||||
|
fn write(writable_log: *WritableLog, bytes: []const coral.io.Byte) ?usize {
|
||||||
|
writable_log.write(bytes) catch return null;
|
||||||
|
|
||||||
|
return bytes.len;
|
||||||
|
}
|
||||||
|
}.write);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn free(self: *WritableLog) void {
|
||||||
|
self.write_buffer.free();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make(log_severity: LogSeverity, allocator: coral.io.Allocator) WritableLog {
|
||||||
|
return .{
|
||||||
|
.severity = log_severity,
|
||||||
|
.write_buffer = coral.list.ByteStack.make(allocator),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write(self: *WritableLog, bytes: []const coral.io.Byte) coral.io.AllocationError!void {
|
||||||
|
const format_string = "%.*s";
|
||||||
|
var line_written = @as(usize, 0);
|
||||||
|
|
||||||
|
for (bytes) |byte| {
|
||||||
|
if (byte == '\n') {
|
||||||
|
ext.SDL_LogError(
|
||||||
|
ext.SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
format_string,
|
||||||
|
self.write_buffer.values.len,
|
||||||
|
self.write_buffer.values.ptr);
|
||||||
|
|
||||||
|
self.write_buffer.clear();
|
||||||
|
|
||||||
|
line_written = 0;
|
||||||
|
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
try self.write_buffer.push_one(byte);
|
||||||
|
|
||||||
|
line_written += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (self.write_buffer.values.len == 0) {
|
||||||
|
ext.SDL_LogError(
|
||||||
|
ext.SDL_LOG_CATEGORY_APPLICATION,
|
||||||
|
format_string,
|
||||||
|
self.write_buffer.values.len,
|
||||||
|
self.write_buffer.values.ptr);
|
||||||
|
|
||||||
|
self.write_buffer.clear();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
|
@ -1,23 +0,0 @@
|
||||||
const coral = @import("coral");
|
|
||||||
|
|
||||||
pub const Item = struct {
|
|
||||||
transform: Transform,
|
|
||||||
|
|
||||||
options: union (enum) {
|
|
||||||
sprite: struct {
|
|
||||||
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Transform = extern struct {
|
|
||||||
x: coral.math.Vector2,
|
|
||||||
y: coral.math.Vector2,
|
|
||||||
origin: coral.math.Vector2,
|
|
||||||
|
|
||||||
pub const identity = Transform{
|
|
||||||
.x = .{1, 0},
|
|
||||||
.y = .{0, 1},
|
|
||||||
.origin = .{0, 0},
|
|
||||||
};
|
|
||||||
};
|
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
pub usingnamespace @cImport({
|
pub usingnamespace @cImport({
|
||||||
@cInclude("SDL2/SDL.h");
|
@cInclude("SDL2/SDL.h");
|
||||||
});
|
});
|
||||||
|
|
|
@ -2,53 +2,53 @@ const coral = @import("coral");
|
||||||
|
|
||||||
const ext = @import("./ext.zig");
|
const ext = @import("./ext.zig");
|
||||||
|
|
||||||
pub const Contents = struct {
|
pub const Access = union (enum) {
|
||||||
allocator: coral.io.Allocator,
|
sandboxed_path: *const Path,
|
||||||
data: []u8,
|
|
||||||
|
|
||||||
pub const InitError = coral.io.AllocationError || Readable.ReadError;
|
pub fn open_readable(self: Access, readable_path: Path) ?*Readable {
|
||||||
|
switch (self) {
|
||||||
|
.sandboxed_path => |sandboxed_path| {
|
||||||
|
const readable_path_string = sandboxed_path.joined(readable_path).to_string() orelse return null;
|
||||||
|
|
||||||
pub fn deinit(self: *Contents) void {
|
return @ptrCast(ext.SDL_RWFromFile(readable_path_string.ptr, "rb"));
|
||||||
coral.io.deallocate(self.allocator, self.data);
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init(allocator: coral.io.Allocator, readable_file: *Readable) InitError!Contents {
|
pub fn query(self: Access, path: Path) ?Info {
|
||||||
const file_offset = try readable_file.skip(0);
|
switch (self) {
|
||||||
const file_size = try readable_file.seek_end();
|
.sandboxed_path => |sandboxed_path| {
|
||||||
|
const path_string = sandboxed_path.joined(path).to_string() orelse return null;
|
||||||
|
const rw_ops = ext.SDL_RWFromFile(path_string, "rb") orelse return null;
|
||||||
|
const file_size = ext.SDL_RWseek(rw_ops, 0, ext.RW_SEEK_END);
|
||||||
|
|
||||||
_ = try readable_file.seek(file_offset);
|
if (ext.SDL_RWclose(rw_ops) != 0 or file_size < 0) {
|
||||||
|
return null;
|
||||||
const allocation = try coral.io.allocate_many(u8, file_size, allocator);
|
|
||||||
|
|
||||||
errdefer coral.io.deallocate(allocator, allocation);
|
|
||||||
|
|
||||||
if (try readable_file.read(allocation) != allocation.len) {
|
|
||||||
// Read less than was allocated for.
|
|
||||||
return error.FileUnavailable;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return Contents{
|
return Info{
|
||||||
.allocator = allocator,
|
.size = @intCast(file_size),
|
||||||
.data = allocation,
|
|
||||||
};
|
};
|
||||||
|
},
|
||||||
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub const Info = struct {
|
||||||
|
size: u64,
|
||||||
|
};
|
||||||
|
|
||||||
pub const Path = extern struct {
|
pub const Path = extern struct {
|
||||||
data: [4096]u8 = [_]u8{0} ** 4096,
|
data: [4096]coral.io.Byte = [_]coral.io.Byte{0} ** 4096,
|
||||||
|
|
||||||
pub const cwd = Path.from(&.{"./"});
|
pub const cwd = Path.from(&.{"./"});
|
||||||
|
|
||||||
pub const ValidationError = error {
|
|
||||||
PathTooLong,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn from(components: []const []const u8) Path {
|
pub fn from(components: []const []const u8) Path {
|
||||||
// TODO: Implement proper parsing / removal of duplicate path delimiters.
|
// TODO: Implement proper parsing / removal of duplicate path delimiters.
|
||||||
var path = Path{};
|
var path = Path{};
|
||||||
|
|
||||||
{
|
{
|
||||||
var writable_slice = coral.io.FixedBuffer{.slice = &path.data};
|
var writable_slice = coral.io.FixedBuffer{.bytes = &path.data};
|
||||||
|
|
||||||
for (components) |component| {
|
for (components) |component| {
|
||||||
if (writable_slice.write(component) != component.len) {
|
if (writable_slice.write(component) != component.len) {
|
||||||
|
@ -64,7 +64,7 @@ pub const Path = extern struct {
|
||||||
var path = Path{};
|
var path = Path{};
|
||||||
|
|
||||||
{
|
{
|
||||||
var writable = coral.io.FixedBuffer{.slice = &path.data};
|
var writable = coral.io.FixedBuffer{.bytes = &path.data};
|
||||||
var written = @as(usize, 0);
|
var written = @as(usize, 0);
|
||||||
|
|
||||||
for (&self.data) |byte| {
|
for (&self.data) |byte| {
|
||||||
|
@ -91,32 +91,20 @@ pub const Path = extern struct {
|
||||||
return path;
|
return path;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn to_string(self: Path) ValidationError![:0]const u8 {
|
pub fn to_string(self: Path) ?[:0]const coral.io.Byte {
|
||||||
const sentineled_data = get_sentineled_data: {
|
|
||||||
const last_index = self.data.len - 1;
|
const last_index = self.data.len - 1;
|
||||||
|
|
||||||
if (self.data[last_index] != 0) {
|
if (self.data[last_index] != 0) {
|
||||||
return error.PathTooLong;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
break: get_sentineled_data self.data[0 .. last_index:0];
|
return coral.io.slice_sentineled(@as(coral.io.Byte, 0), @as([*:0]const coral.io.Byte, @ptrCast(&self.data)));
|
||||||
};
|
|
||||||
|
|
||||||
return sentineled_data[0 .. coral.io.sentinel_index(u8, 0, sentineled_data):0];
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const ReadError = error {
|
|
||||||
FileUnavailable,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Readable = opaque {
|
pub const Readable = opaque {
|
||||||
pub fn as_reader(self: *Readable) coral.io.Reader {
|
pub fn as_reader(self: *Readable) coral.io.Reader {
|
||||||
return coral.io.Reader.bind(Readable, self, struct {
|
return coral.io.Reader.bind(Readable, self, read_into);
|
||||||
fn read(readable: *Readable, buffer: []u8) ?usize {
|
|
||||||
return readable.read(buffer) catch null;
|
|
||||||
}
|
|
||||||
}.read);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn close(self: *Readable) void {
|
pub fn close(self: *Readable) void {
|
||||||
|
@ -125,97 +113,81 @@ pub const Readable = opaque {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read(self: *Readable, buffer: []u8) ReadError!usize {
|
pub fn read_into(self: *Readable, buffer: []coral.io.Byte) ?usize {
|
||||||
ext.SDL_ClearError();
|
ext.SDL_ClearError();
|
||||||
|
|
||||||
const bytes_read = ext.SDL_RWread(rw_ops_cast(self), buffer.ptr, @sizeOf(u8), buffer.len);
|
const bytes_read = ext.SDL_RWread(rw_ops_cast(self), buffer.ptr, @sizeOf(coral.io.Byte), buffer.len);
|
||||||
const error_message = ext.SDL_GetError();
|
const error_message = ext.SDL_GetError();
|
||||||
|
|
||||||
if (bytes_read == 0 and error_message != null and error_message.* != 0) {
|
if (bytes_read == 0 and error_message != null and error_message.* != 0) {
|
||||||
return error.FileUnavailable;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return bytes_read;
|
return bytes_read;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn seek(self: *Readable, cursor: u64) ReadError!u64 {
|
pub fn seek_head(self: *Readable, cursor: u64) ?u64 {
|
||||||
// TODO: Fix safety of int cast.
|
// TODO: Fix safety of int cast.
|
||||||
const byte_offset = ext.SDL_RWseek(rw_ops_cast(self), @intCast(i64, cursor), ext.RW_SEEK_SET);
|
const byte_offset = ext.SDL_RWseek(rw_ops_cast(self), @intCast(cursor), ext.RW_SEEK_SET);
|
||||||
|
|
||||||
if (byte_offset < 0) {
|
if (byte_offset < 0) {
|
||||||
return error.FileUnavailable;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return @intCast(u64, byte_offset);
|
return @intCast(byte_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn seek_end(self: *Readable) ReadError!usize {
|
pub fn seek_tail(self: *Readable) ?usize {
|
||||||
const byte_offset = ext.SDL_RWseek(rw_ops_cast(self), 0, ext.RW_SEEK_END);
|
const byte_offset = ext.SDL_RWseek(rw_ops_cast(self), 0, ext.RW_SEEK_END);
|
||||||
|
|
||||||
if (byte_offset < 0) {
|
if (byte_offset < 0) {
|
||||||
return error.FileUnavailable;
|
return error.FileUnavailable;
|
||||||
}
|
}
|
||||||
|
|
||||||
return @intCast(u64, byte_offset);
|
return @intCast(byte_offset);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn skip(self: *Readable, offset: i64) ReadError!u64 {
|
pub fn skip(self: *Readable, offset: i64) ?u64 {
|
||||||
const byte_offset = ext.SDL_RWseek(rw_ops_cast(self), offset, ext.RW_SEEK_CUR);
|
const byte_offset = ext.SDL_RWseek(rw_ops_cast(self), offset, ext.RW_SEEK_CUR);
|
||||||
|
|
||||||
if (byte_offset < 0) {
|
if (byte_offset < 0) {
|
||||||
return error.FileUnavailable;
|
return error.FileUnavailable;
|
||||||
}
|
}
|
||||||
|
|
||||||
return @intCast(u64, byte_offset);
|
return @intCast(byte_offset);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const System = union (enum) {
|
pub fn allocate_and_load(allocator: coral.io.Allocator, access: Access, path: Path) coral.io.AllocationError!?[]coral.io.Byte {
|
||||||
sandboxed_path: *const Path,
|
const allocation = try allocator.reallocate(null, query_file_size: {
|
||||||
|
const info = access.query(path) orelse return null;
|
||||||
|
|
||||||
pub const FileInfo = struct {
|
break: query_file_size info.size;
|
||||||
size: u64,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const OpenError = Path.ValidationError || error {
|
|
||||||
FileNotFound,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const QueryError = OpenError || ReadError;
|
|
||||||
|
|
||||||
pub fn open_readable(self: System, path: Path) OpenError!*Readable {
|
|
||||||
switch (self) {
|
|
||||||
.sandboxed_path => |sandboxed_path| {
|
|
||||||
return @ptrCast(*Readable, ext.SDL_RWFromFile(try sandboxed_path.joined(path).to_string(), "rb") orelse {
|
|
||||||
return error.FileNotFound;
|
|
||||||
});
|
});
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn query_info(self: System, path: Path) QueryError!FileInfo {
|
const readable = access.open_readable(path) orelse {
|
||||||
switch (self) {
|
allocator.deallocate(allocation);
|
||||||
.sandboxed_path => |sandboxed_path| {
|
|
||||||
const file = ext.SDL_RWFromFile(try sandboxed_path.joined(path).to_string(), "rb") orelse {
|
return null;
|
||||||
return error.FileNotFound;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
defer coral.debug.assert(ext.SDL_RWclose(file) == 0);
|
defer _ = readable.close();
|
||||||
|
|
||||||
const file_size = ext.SDL_RWseek(file, 0, ext.RW_SEEK_END);
|
const bytes_read = readable.read_into(allocation) orelse {
|
||||||
|
allocator.deallocate(allocation);
|
||||||
|
|
||||||
if (file_size < 0) {
|
return null;
|
||||||
return error.FileUnavailable;
|
|
||||||
}
|
|
||||||
|
|
||||||
return FileInfo{
|
|
||||||
.size = @intCast(u64, file_size),
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (bytes_read != allocation.len) {
|
||||||
|
allocator.deallocate(allocation);
|
||||||
|
|
||||||
|
return null;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
}
|
return allocation;
|
||||||
};
|
}
|
||||||
|
|
||||||
fn rw_ops_cast(ptr: *anyopaque) *ext.SDL_RWops {
|
fn rw_ops_cast(ptr: *anyopaque) *ext.SDL_RWops {
|
||||||
return @ptrCast(*ext.SDL_RWops, @alignCast(@alignOf(ext.SDL_RWops), ptr));
|
return @ptrCast(@alignCast(ptr));
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,121 +6,84 @@ const ext = @import("./ext.zig");
|
||||||
|
|
||||||
const std = @import("std");
|
const std = @import("std");
|
||||||
|
|
||||||
///
|
const AllocationNode = struct {
|
||||||
/// Recorded allocation info state.
|
trace: std.debug.ConfigurableTrace(2, 4, switch (builtin.mode) {
|
||||||
///
|
|
||||||
const AllocationInfo = struct {
|
|
||||||
trace: AllocationTrace,
|
|
||||||
next_info: ?*AllocationInfo,
|
|
||||||
size: usize,
|
|
||||||
};
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Recorded stack trace of allocation call site.
|
|
||||||
///
|
|
||||||
/// *Note* this structure is reduced to zero bytes in released builds optimized for speed or size.
|
|
||||||
///
|
|
||||||
const AllocationTrace = std.debug.ConfigurableTrace(2, 4, switch (builtin.mode) {
|
|
||||||
.Debug, .ReleaseSafe => true,
|
.Debug, .ReleaseSafe => true,
|
||||||
.ReleaseFast, .ReleaseSmall => false,
|
.ReleaseFast, .ReleaseSmall => false,
|
||||||
});
|
}),
|
||||||
|
|
||||||
///
|
next: ?*AllocationNode,
|
||||||
/// Heap allocation context.
|
size: usize,
|
||||||
///
|
|
||||||
const Context = struct {
|
|
||||||
allocation_info_head: ?*AllocationInfo = null,
|
|
||||||
|
|
||||||
///
|
fn alloc(size: usize, return_address: usize) *AllocationNode {
|
||||||
/// Attempts to allocate a buffer of `size` length from `self`, with `return_address` as the location of the
|
const node = @as(*AllocationNode, @ptrCast(@alignCast(ext.SDL_malloc(@sizeOf(AllocationNode) + size))));
|
||||||
/// allocation request origin.
|
|
||||||
///
|
|
||||||
/// A reference to the allocated buffer is returned via a slice if the allocation was successful, otherwise `null`
|
|
||||||
/// is returned.
|
|
||||||
///
|
|
||||||
/// *Note* the returned buffer must be deallocated with [deallocate] before program exit or it will cause a memory
|
|
||||||
/// leak.
|
|
||||||
///
|
|
||||||
/// *Note* allocation checks are disabled in release builds optimized for speed or size.
|
|
||||||
///
|
|
||||||
fn allocate(self: *Context, size: usize, return_address: usize) ?[]u8 {
|
|
||||||
switch (builtin.mode) {
|
|
||||||
.Debug, .ReleaseSafe => {
|
|
||||||
const allocation_info_size = @sizeOf(AllocationInfo);
|
|
||||||
const total_allocation_size = allocation_info_size + size;
|
|
||||||
const allocation = ext.SDL_malloc(total_allocation_size) orelse return null;
|
|
||||||
const allocation_info = @ptrCast(*AllocationInfo, @alignCast(@alignOf(AllocationInfo), allocation));
|
|
||||||
|
|
||||||
allocation_info.* = .{
|
node.* = .{
|
||||||
.size = size,
|
.size = size,
|
||||||
.next_info = self.allocation_info_head,
|
.next = null,
|
||||||
.trace = .{},
|
.trace = .{},
|
||||||
};
|
};
|
||||||
|
|
||||||
allocation_info.trace.addAddr(return_address, "");
|
node.trace.addAddr(return_address, "");
|
||||||
|
|
||||||
self.allocation_info_head = allocation_info;
|
return node;
|
||||||
|
|
||||||
return @ptrCast([*]u8, allocation)[allocation_info_size .. total_allocation_size];
|
|
||||||
},
|
|
||||||
|
|
||||||
.ReleaseFast, .ReleaseSmall => {
|
|
||||||
return @ptrCast([*]u8, ext.SDL_malloc(size) orelse return null)[0 .. size];
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
fn dealloc(self: *AllocationNode) void {
|
||||||
/// Returns the assumed pointer to the [AllocationInfo] address of `allocation`.
|
ext.SDL_free(self);
|
||||||
///
|
|
||||||
fn allocation_info_of(allocation: [*]u8) *AllocationInfo {
|
|
||||||
return @intToPtr(*AllocationInfo, @ptrToInt(allocation) - @sizeOf(AllocationInfo));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
fn realloc(self: *AllocationNode, size: usize, return_address: usize) *AllocationNode {
|
||||||
/// Deallocates a the allocation buffer referenced by `allocation`.
|
const node = @as(*AllocationNode, @ptrCast(@alignCast(ext.SDL_realloc(self, @sizeOf(AllocationNode) + size))));
|
||||||
///
|
|
||||||
/// *Note* the pointer and length of `allocation` must match valid values known to `allocator` otherwise safety-
|
node.* = .{
|
||||||
/// checked behavior will occur.
|
.size = size,
|
||||||
///
|
.next = null,
|
||||||
/// *Note* allocation checks are disabled in release builds optimized for speed or size.
|
.trace = .{},
|
||||||
///
|
};
|
||||||
|
|
||||||
|
node.trace.addAddr(return_address, "");
|
||||||
|
|
||||||
|
return node;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn owns_userdata(self: *AllocationNode, other_userdata: []const coral.io.Byte) bool {
|
||||||
|
const self_userdata = self.userdata();
|
||||||
|
|
||||||
|
return self_userdata.ptr == other_userdata.ptr and self_userdata.len == other_userdata.len;
|
||||||
|
}
|
||||||
|
|
||||||
|
fn userdata(self: *AllocationNode) []coral.io.Byte {
|
||||||
|
return @as([*]coral.io.Byte, @ptrFromInt(@intFromPtr(self) + @sizeOf(AllocationNode)))[0 .. self.size];
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const Context = struct {
|
||||||
|
head: ?*AllocationNode = null,
|
||||||
|
|
||||||
fn deallocate(self: *Context, allocation: []u8) void {
|
fn deallocate(self: *Context, allocation: []u8) void {
|
||||||
switch (builtin.mode) {
|
switch (builtin.mode) {
|
||||||
.Debug, .ReleaseSafe => {
|
.Debug, .ReleaseSafe => {
|
||||||
const target_allocation_info = allocation_info_of(allocation.ptr);
|
const panic_message = "incorrect allocation address for deallocating";
|
||||||
|
var current_node = self.head orelse @panic(panic_message);
|
||||||
|
|
||||||
if (target_allocation_info.size != allocation.len) {
|
if (current_node.owns_userdata(allocation)) {
|
||||||
@panic("incorrect allocation length for deallocating");
|
self.head = current_node.next;
|
||||||
|
|
||||||
|
return current_node.dealloc();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (self.allocation_info_head) |allocation_info_head| {
|
while (true) {
|
||||||
if (target_allocation_info == allocation_info_head) {
|
const next_node = current_node.next orelse @panic(panic_message);
|
||||||
self.allocation_info_head = allocation_info_head.next_info;
|
|
||||||
|
|
||||||
ext.SDL_free(target_allocation_info);
|
if (next_node.owns_userdata(allocation)) {
|
||||||
|
current_node.next = next_node.next;
|
||||||
|
|
||||||
return;
|
return next_node.dealloc();
|
||||||
}
|
}
|
||||||
|
|
||||||
var previous_allocation_info = allocation_info_head;
|
current_node = next_node;
|
||||||
var current_allocation_info = allocation_info_head.next_info;
|
|
||||||
|
|
||||||
while (current_allocation_info) |allocation_info| {
|
|
||||||
if (allocation_info == target_allocation_info) {
|
|
||||||
previous_allocation_info.next_info = allocation_info.next_info;
|
|
||||||
|
|
||||||
ext.SDL_free(target_allocation_info);
|
|
||||||
|
|
||||||
return;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
previous_allocation_info = allocation_info;
|
|
||||||
current_allocation_info = allocation_info.next_info;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@panic("incorrect allocation address for deallocating");
|
|
||||||
},
|
},
|
||||||
|
|
||||||
.ReleaseFast, .ReleaseSmall => {
|
.ReleaseFast, .ReleaseSmall => {
|
||||||
|
@ -129,111 +92,67 @@ const Context = struct {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
fn reallocate(self: *Context, return_address: usize, existing_allocation: ?[]u8, size: usize) coral.io.AllocationError![]u8 {
|
||||||
/// Attempts to reallocate the buffer referenced by `allocation` to be `size` length from `self`.
|
|
||||||
///
|
|
||||||
/// A reference to the reallocated buffer is returned via a slice if the reallocation was successful, otherwise
|
|
||||||
/// `null` is returned.
|
|
||||||
///
|
|
||||||
/// *Note* the returned buffer must be deallocated with [deallocate] before program exit or it will cause a memory
|
|
||||||
/// leak.
|
|
||||||
///
|
|
||||||
/// *Note* the pointer and length of `allocation` must match valid values known to `allocator` otherwise safety-
|
|
||||||
/// checked behavior will occur.
|
|
||||||
///
|
|
||||||
/// *Note* the allocation referenced by `allocation` should be considered invalid once the function returns,
|
|
||||||
/// discarding it in favor of the return value.
|
|
||||||
///
|
|
||||||
/// *Note* allocation checks are disabled in release builds optimized for speed or size.
|
|
||||||
///
|
|
||||||
fn reallocate(self: *Context, allocation: []u8, size: usize) ?[]u8 {
|
|
||||||
switch (builtin.mode) {
|
switch (builtin.mode) {
|
||||||
.Debug, .ReleaseSafe => {
|
.Debug, .ReleaseSafe => {
|
||||||
const target_allocation_info = allocation_info_of(allocation.ptr);
|
if (existing_allocation) |allocation| {
|
||||||
|
const panic_message = "incorrect allocation address for reallocating";
|
||||||
|
var current_node = self.head orelse @panic(panic_message);
|
||||||
|
|
||||||
if (target_allocation_info.size != allocation.len) {
|
if (current_node.owns_userdata(allocation)) {
|
||||||
@panic("incorrect allocation length for reallocating");
|
const node = current_node.realloc(size, return_address);
|
||||||
|
|
||||||
|
self.head = node;
|
||||||
|
|
||||||
|
return node.userdata();
|
||||||
}
|
}
|
||||||
|
|
||||||
const allocation_info_size = @sizeOf(AllocationInfo);
|
while (true) {
|
||||||
|
const next_node = current_node.next orelse @panic(panic_message);
|
||||||
|
|
||||||
if (self.allocation_info_head) |allocation_info_head| {
|
if (next_node.owns_userdata(allocation)) {
|
||||||
if (target_allocation_info == allocation_info_head) {
|
const node = next_node.realloc(size, return_address);
|
||||||
self.allocation_info_head = allocation_info_head.next_info;
|
|
||||||
|
|
||||||
const allocation_address = ext.SDL_realloc(target_allocation_info, size) orelse return null;
|
current_node.next = node;
|
||||||
|
|
||||||
target_allocation_info.size = size;
|
return node.userdata();
|
||||||
|
|
||||||
return @ptrCast([*]u8, allocation_address)[
|
|
||||||
allocation_info_size .. (allocation_info_size + size)];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
var previous_allocation_info = allocation_info_head;
|
current_node = next_node;
|
||||||
var current_allocation_info = allocation_info_head.next_info;
|
}
|
||||||
|
} else {
|
||||||
|
const node = AllocationNode.alloc(size, return_address);
|
||||||
|
|
||||||
while (current_allocation_info) |allocation_info| {
|
if (self.head) |head| {
|
||||||
if (allocation_info == target_allocation_info) {
|
node.next = head;
|
||||||
previous_allocation_info.next_info = allocation_info.next_info;
|
|
||||||
|
|
||||||
const allocation_address = ext.SDL_realloc(target_allocation_info, size) orelse return null;
|
|
||||||
|
|
||||||
target_allocation_info.size = size;
|
|
||||||
|
|
||||||
return @ptrCast([*]u8, allocation_address)[
|
|
||||||
allocation_info_size .. (allocation_info_size + size)];
|
|
||||||
}
|
}
|
||||||
|
|
||||||
previous_allocation_info = allocation_info;
|
self.head = node;
|
||||||
current_allocation_info = allocation_info.next_info;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@panic("incorrect allocation address for reallocating");
|
return node.userdata();
|
||||||
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
.ReleaseFast, .ReleaseSmall => {
|
.ReleaseFast, .ReleaseSmall => {
|
||||||
return @ptrCast([*]u8, ext.SDL_realloc(allocation.ptr, size) orelse return null)[0 .. size];
|
if (existing_allocation) |allocation | {
|
||||||
|
return @as([*]u8, ext.SDL_realloc(allocation.ptr, size) orelse {
|
||||||
|
return error.OutOfMemory;
|
||||||
|
})[0 .. size];
|
||||||
|
}
|
||||||
|
|
||||||
|
return @as([*]u8, ext.SDL_malloc(size) orelse return error.OutOfMemory)[0 .. size];
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
///
|
|
||||||
/// Heap context.
|
|
||||||
///
|
|
||||||
var context = Context{};
|
var context = Context{};
|
||||||
|
|
||||||
///
|
pub const allocator = coral.io.Allocator.bind(Context, &context, .{
|
||||||
/// Heap allocator.
|
.reallocate = Context.reallocate,
|
||||||
///
|
.deallocate = Context.deallocate,
|
||||||
pub const allocator = coral.io.Allocator.bind(Context, &context, struct {
|
});
|
||||||
fn reallocate(self: *Context, options: coral.io.AllocationOptions) ?[]u8 {
|
|
||||||
if (options.size == 0) {
|
|
||||||
if (options.allocation) |allocation| {
|
|
||||||
self.deallocate(allocation);
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return self.allocate(0, options.return_address);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (options.allocation) |allocation| {
|
|
||||||
return self.reallocate(allocation, options.size);
|
|
||||||
}
|
|
||||||
|
|
||||||
return self.allocate(options.size, options.return_address);
|
|
||||||
}
|
|
||||||
}.reallocate);
|
|
||||||
|
|
||||||
///
|
|
||||||
/// Checks for any allocations belonging to the process heap allocated through the [allocator] interface that are still
|
|
||||||
/// alive and reports the stack traces of any detected allocations to stderr along with the allocation address and
|
|
||||||
/// length.
|
|
||||||
///
|
|
||||||
/// *Note* this function becomes a no-op in release builds optimized for speed or size.
|
|
||||||
///
|
|
||||||
pub fn trace_leaks() void {
|
pub fn trace_leaks() void {
|
||||||
switch (builtin.mode) {
|
switch (builtin.mode) {
|
||||||
.Debug, .ReleaseSafe => {
|
.Debug, .ReleaseSafe => {
|
||||||
|
@ -242,7 +161,7 @@ pub fn trace_leaks() void {
|
||||||
while (current_allocation_info) |allocation_info| : (current_allocation_info = allocation_info.next_info) {
|
while (current_allocation_info) |allocation_info| : (current_allocation_info = allocation_info.next_info) {
|
||||||
std.debug.print("{d} byte leak at 0x{x} detected:\n", .{
|
std.debug.print("{d} byte leak at 0x{x} detected:\n", .{
|
||||||
allocation_info.size,
|
allocation_info.size,
|
||||||
@ptrToInt(allocation_info) + @sizeOf(AllocationInfo),
|
@as(usize, allocation_info) + @sizeOf(AllocationNode),
|
||||||
});
|
});
|
||||||
|
|
||||||
allocation_info.trace.dump();
|
allocation_info.trace.dump();
|
||||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -4,10 +4,30 @@ const tokens = @import("./tokens.zig");
|
||||||
|
|
||||||
allocator: coral.io.Allocator,
|
allocator: coral.io.Allocator,
|
||||||
arena: coral.arena.Stacking,
|
arena: coral.arena.Stacking,
|
||||||
statements: StatementList,
|
statements: Statement.List,
|
||||||
error_message: []const u8,
|
error_message: []const u8,
|
||||||
|
|
||||||
pub const BinaryOperator = enum {
|
pub const Expression = union (enum) {
|
||||||
|
nil_literal,
|
||||||
|
true_literal,
|
||||||
|
false_literal,
|
||||||
|
number_literal: []const u8,
|
||||||
|
string_literal: []const u8,
|
||||||
|
table_literal: NamedList,
|
||||||
|
grouped_expression: *Expression,
|
||||||
|
|
||||||
|
binary_operation: struct {
|
||||||
|
operator: BinaryOperator,
|
||||||
|
lhs_expression: *Expression,
|
||||||
|
rhs_expression: *Expression,
|
||||||
|
},
|
||||||
|
|
||||||
|
unary_operation: struct {
|
||||||
|
operator: UnaryOperator,
|
||||||
|
expression: *Expression,
|
||||||
|
},
|
||||||
|
|
||||||
|
pub const BinaryOperator = enum {
|
||||||
addition,
|
addition,
|
||||||
subtraction,
|
subtraction,
|
||||||
multiplication,
|
multiplication,
|
||||||
|
@ -31,41 +51,18 @@ pub const BinaryOperator = enum {
|
||||||
.less_equals_comparison => .symbol_less_equals,
|
.less_equals_comparison => .symbol_less_equals,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Expression = union (enum) {
|
pub const NamedList = coral.list.Stack(struct {
|
||||||
nil_literal,
|
|
||||||
true_literal,
|
|
||||||
false_literal,
|
|
||||||
integer_literal: []const u8,
|
|
||||||
float_literal: []const u8,
|
|
||||||
string_literal: []const u8,
|
|
||||||
array_literal: coral.list.Stack(Expression),
|
|
||||||
|
|
||||||
table_literal: coral.list.Stack(struct {
|
|
||||||
identifier: []const u8,
|
identifier: []const u8,
|
||||||
expression: Expression,
|
expression: Expression,
|
||||||
}),
|
});
|
||||||
|
|
||||||
grouped_expression: *Expression,
|
pub const List = coral.list.Stack(Expression);
|
||||||
|
|
||||||
binary_operation: struct {
|
|
||||||
operator: BinaryOperator,
|
|
||||||
lhs_expression: *Expression,
|
|
||||||
rhs_expression: *Expression,
|
|
||||||
},
|
|
||||||
|
|
||||||
unary_operation: struct {
|
|
||||||
operator: UnaryOperator,
|
|
||||||
expression: *Expression,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const ExpressionParser = fn (self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression;
|
const ExpressionParser = fn (self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression;
|
||||||
|
|
||||||
///
|
|
||||||
///
|
|
||||||
///
|
|
||||||
pub const ParseError = error {
|
pub const ParseError = error {
|
||||||
OutOfMemory,
|
OutOfMemory,
|
||||||
BadSyntax,
|
BadSyntax,
|
||||||
|
@ -76,16 +73,16 @@ const Self = @This();
|
||||||
pub const Statement = union (enum) {
|
pub const Statement = union (enum) {
|
||||||
return_expression: Expression,
|
return_expression: Expression,
|
||||||
return_nothing,
|
return_nothing,
|
||||||
};
|
|
||||||
|
|
||||||
const StatementList = coral.list.Stack(Statement);
|
const List = coral.list.Stack(Statement);
|
||||||
|
};
|
||||||
|
|
||||||
const UnaryOperator = enum {
|
const UnaryOperator = enum {
|
||||||
boolean_negation,
|
boolean_negation,
|
||||||
numeric_negation,
|
numeric_negation,
|
||||||
};
|
};
|
||||||
|
|
||||||
fn binary_operation_parser(comptime parse_next: ExpressionParser, comptime operators: []const BinaryOperator) ExpressionParser {
|
fn binary_operation_parser(comptime parse_next: ExpressionParser, comptime operators: []const Expression.BinaryOperator) ExpressionParser {
|
||||||
return struct {
|
return struct {
|
||||||
fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression {
|
fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression {
|
||||||
var expression = try parse_next(self, tokenizer);
|
var expression = try parse_next(self, tokenizer);
|
||||||
|
@ -117,30 +114,30 @@ fn binary_operation_parser(comptime parse_next: ExpressionParser, comptime opera
|
||||||
}.parse;
|
}.parse;
|
||||||
}
|
}
|
||||||
|
|
||||||
fn check_syntax(self: *Self, condition: bool, error_message: []const u8) ParseError!void {
|
fn check_syntax(self: *Self, condition: bool, message: []const u8) ParseError!void {
|
||||||
if (condition) {
|
if (condition) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
return self.fail_syntax(error_message);
|
return self.fail_syntax(message);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn deinit(self: *Self) void {
|
fn fail_syntax(self: *Self, message: []const u8) ParseError {
|
||||||
self.arena.deinit();
|
self.error_message = message;
|
||||||
self.statements.deinit();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn fail_syntax(self: *Self, error_message: []const u8) ParseError {
|
|
||||||
self.error_message = error_message;
|
|
||||||
|
|
||||||
return error.BadSyntax;
|
return error.BadSyntax;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn init(allocator: coral.io.Allocator) coral.io.AllocationError!Self {
|
pub fn free(self: *Self) void {
|
||||||
|
self.arena.free();
|
||||||
|
self.statements.free();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make(allocator: coral.io.Allocator) Self {
|
||||||
return Self{
|
return Self{
|
||||||
.arena = try coral.arena.Stacking.init(allocator, 4096),
|
.arena = coral.arena.Stacking.make(allocator, 4096),
|
||||||
.allocator = allocator,
|
.allocator = allocator,
|
||||||
.statements = .{.allocator = allocator},
|
.statements = Statement.List.make(allocator),
|
||||||
.error_message = "",
|
.error_message = "",
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -150,16 +147,14 @@ pub fn list_statements(self: Self) []const Statement {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
|
pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
|
||||||
self.reset();
|
self.free();
|
||||||
|
|
||||||
errdefer self.reset();
|
var has_returned = false;
|
||||||
|
|
||||||
var has_not_returned_yet = true;
|
|
||||||
|
|
||||||
while (tokenizer.step(.{.include_newlines = false})) {
|
while (tokenizer.step(.{.include_newlines = false})) {
|
||||||
switch (tokenizer.current_token) {
|
switch (tokenizer.current_token) {
|
||||||
.keyword_return => {
|
.keyword_return => {
|
||||||
try self.check_syntax(has_not_returned_yet, "cannot return more than once per function scope");
|
try self.check_syntax(!has_returned, "multiple returns in function scope but expected only one");
|
||||||
|
|
||||||
try self.statements.push_one(get_statement: {
|
try self.statements.push_one(get_statement: {
|
||||||
if (tokenizer.step(.{.include_newlines = true})) {
|
if (tokenizer.step(.{.include_newlines = true})) {
|
||||||
|
@ -177,7 +172,7 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
|
||||||
break: get_statement .return_nothing;
|
break: get_statement .return_nothing;
|
||||||
});
|
});
|
||||||
|
|
||||||
has_not_returned_yet = false;
|
has_returned = true;
|
||||||
},
|
},
|
||||||
|
|
||||||
else => return self.fail_syntax("invalid statement"),
|
else => return self.fail_syntax("invalid statement"),
|
||||||
|
@ -215,16 +210,10 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
|
||||||
return Expression{.grouped_expression = try coral.io.allocate_one(self.arena.as_allocator(), expression)};
|
return Expression{.grouped_expression = try coral.io.allocate_one(self.arena.as_allocator(), expression)};
|
||||||
},
|
},
|
||||||
|
|
||||||
.integer => |value| {
|
.number => |value| {
|
||||||
_ = tokenizer.step(.{.include_newlines = false});
|
_ = tokenizer.step(.{.include_newlines = false});
|
||||||
|
|
||||||
return Expression{.integer_literal = value};
|
return Expression{.number_literal = value};
|
||||||
},
|
|
||||||
|
|
||||||
.real => |value| {
|
|
||||||
_ = tokenizer.step(.{.include_newlines = false});
|
|
||||||
|
|
||||||
return Expression{.float_literal = value};
|
|
||||||
},
|
},
|
||||||
|
|
||||||
.string => |value| {
|
.string => |value| {
|
||||||
|
@ -233,48 +222,10 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
|
||||||
return Expression{.string_literal = value};
|
return Expression{.string_literal = value};
|
||||||
},
|
},
|
||||||
|
|
||||||
.symbol_bracket_left => {
|
|
||||||
try self.check_syntax(tokenizer.step(.{.include_newlines = false}), "unexpected end of array literal");
|
|
||||||
|
|
||||||
var expression = Expression{
|
|
||||||
.array_literal = .{
|
|
||||||
.allocator = self.arena.as_allocator(),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
coral.debug.assert(expression == .array_literal);
|
|
||||||
|
|
||||||
const array_average_maximum = 32;
|
|
||||||
|
|
||||||
try expression.array_literal.grow(array_average_maximum);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
switch (tokenizer.current_token) {
|
|
||||||
.symbol_bracket_right => {
|
|
||||||
_ = tokenizer.step(.{.include_newlines = false});
|
|
||||||
|
|
||||||
return expression;
|
|
||||||
},
|
|
||||||
|
|
||||||
else => {
|
|
||||||
try self.check_syntax(
|
|
||||||
tokenizer.step(.{.include_newlines = false}),
|
|
||||||
"expected `]` or expression after `[`");
|
|
||||||
|
|
||||||
try expression.array_literal.push_one(try self.parse_expression(tokenizer));
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_brace_left => {
|
.symbol_brace_left => {
|
||||||
try self.check_syntax(tokenizer.step(.{.include_newlines = false}), "unexpected end of table literal");
|
try self.check_syntax(tokenizer.step(.{.include_newlines = false}), "unexpected end of table literal");
|
||||||
|
|
||||||
var expression = Expression{
|
var expression = Expression{.table_literal = Expression.NamedList.make(self.arena.as_allocator())};
|
||||||
.table_literal = .{
|
|
||||||
.allocator = self.arena.as_allocator(),
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
coral.debug.assert(expression == .table_literal);
|
coral.debug.assert(expression == .table_literal);
|
||||||
|
|
||||||
|
@ -356,8 +307,3 @@ const parse_term = binary_operation_parser(parse_factor, &.{
|
||||||
.multiplication,
|
.multiplication,
|
||||||
.divsion,
|
.divsion,
|
||||||
});
|
});
|
||||||
|
|
||||||
pub fn reset(self: *Self) void {
|
|
||||||
self.statements.clear();
|
|
||||||
self.arena.deinit();
|
|
||||||
}
|
|
||||||
|
|
|
@ -0,0 +1,116 @@
|
||||||
|
const coral = @import("coral");
|
||||||
|
|
||||||
|
allocator: coral.io.Allocator,
|
||||||
|
interned: SymbolTable,
|
||||||
|
globals: Object,
|
||||||
|
values: DataStack,
|
||||||
|
frames: CallStack,
|
||||||
|
|
||||||
|
pub const Float = f64;
|
||||||
|
|
||||||
|
const CallStack = coral.list.Stack(struct {
|
||||||
|
callable: *Object,
|
||||||
|
opcode_index: usize,
|
||||||
|
stack_index: usize,
|
||||||
|
});
|
||||||
|
|
||||||
|
const DataStack = coral.list.Stack(Variant);
|
||||||
|
|
||||||
|
pub const Object = struct {
|
||||||
|
ref_count: usize,
|
||||||
|
userdata: []coral.io.Byte,
|
||||||
|
userinfo: *const anyopaque,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const PopError = error {
|
||||||
|
StackOverflow,
|
||||||
|
};
|
||||||
|
|
||||||
|
const Self = @This();
|
||||||
|
|
||||||
|
const SymbolTable = coral.map.Table([]const coral.io.Byte, *Object, coral.map.string_table_traits);
|
||||||
|
|
||||||
|
pub const Variant = union (enum) {
|
||||||
|
nil,
|
||||||
|
true,
|
||||||
|
false,
|
||||||
|
number: Float,
|
||||||
|
object: *Object,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub fn acquire_instance(_: *Self, object: *Object) *Object {
|
||||||
|
// TODO: safety-check object belongs to state.
|
||||||
|
object.ref_count += 1;
|
||||||
|
|
||||||
|
return object;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn acquire_interned(self: *Self, userdata: []const u8, userinfo: *const anyopaque) coral.io.AllocationError!*Object {
|
||||||
|
// TODO: Include userinfo in matching lookup.
|
||||||
|
if (self.interned.lookup(userdata)) |object| {
|
||||||
|
return self.acquire_instance(object);
|
||||||
|
} else {
|
||||||
|
const data_object = try self.acquire_new(userdata, userinfo);
|
||||||
|
|
||||||
|
errdefer self.release(data_object);
|
||||||
|
|
||||||
|
coral.debug.assert(try self.interned.insert(data_object.userdata, data_object));
|
||||||
|
|
||||||
|
return data_object;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn acquire_new(self: *Self, userdata: []const u8, userinfo: *const anyopaque) coral.io.AllocationError!*Object {
|
||||||
|
const allocated_userdata = try coral.io.allocate_copy(self.allocator, userdata);
|
||||||
|
|
||||||
|
errdefer self.allocator.deallocate(allocated_userdata);
|
||||||
|
|
||||||
|
const allocated_object = try coral.io.allocate_one(self.allocator, Object{
|
||||||
|
.ref_count = 1,
|
||||||
|
.userdata = allocated_userdata,
|
||||||
|
.userinfo = userinfo,
|
||||||
|
});
|
||||||
|
|
||||||
|
errdefer self.allocator.deallocate(allocated_object);
|
||||||
|
|
||||||
|
return allocated_object;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn free(self: *Self) void {
|
||||||
|
self.values.free();
|
||||||
|
self.frames.free();
|
||||||
|
self.interned.free();
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn make(allocator: coral.io.Allocator) Self {
|
||||||
|
return .{
|
||||||
|
.values = DataStack.make(allocator),
|
||||||
|
.frames = CallStack.make(allocator),
|
||||||
|
.interned = SymbolTable.make(allocator),
|
||||||
|
.allocator = allocator,
|
||||||
|
|
||||||
|
.globals = .{
|
||||||
|
.ref_count = 0,
|
||||||
|
.userdata = &.{},
|
||||||
|
.userinfo = &.{},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn pop_value(self: *Self) PopError!Variant {
|
||||||
|
return self.values.pop() orelse error.StackOverflow;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn push_value(self: *Self, value: Variant) coral.io.AllocationError!void {
|
||||||
|
return self.values.push_one(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn release(self: *Self, object: *Object) void {
|
||||||
|
coral.debug.assert(object.ref_count != 0);
|
||||||
|
|
||||||
|
object.ref_count -= 1;
|
||||||
|
|
||||||
|
if (object.ref_count == 0) {
|
||||||
|
self.allocator.deallocate(object);
|
||||||
|
}
|
||||||
|
}
|
|
@ -29,8 +29,7 @@ pub const Token = union(enum) {
|
||||||
symbol_equals,
|
symbol_equals,
|
||||||
symbol_double_equals,
|
symbol_double_equals,
|
||||||
|
|
||||||
integer: []const u8,
|
number: []const u8,
|
||||||
real: []const u8,
|
|
||||||
string: []const u8,
|
string: []const u8,
|
||||||
|
|
||||||
keyword_nil,
|
keyword_nil,
|
||||||
|
@ -41,7 +40,7 @@ pub const Token = union(enum) {
|
||||||
|
|
||||||
pub fn text(self: Token) []const u8 {
|
pub fn text(self: Token) []const u8 {
|
||||||
return switch (self) {
|
return switch (self) {
|
||||||
.unknown => |unknown| @ptrCast([*]const u8, &unknown)[0 .. 1],
|
.unknown => |unknown| @as([*]const u8, @ptrCast(&unknown))[0 .. 1],
|
||||||
.newline => "newline",
|
.newline => "newline",
|
||||||
|
|
||||||
.global => |identifier| identifier,
|
.global => |identifier| identifier,
|
||||||
|
@ -69,8 +68,7 @@ pub const Token = union(enum) {
|
||||||
.symbol_equals => "=",
|
.symbol_equals => "=",
|
||||||
.symbol_double_equals => "==",
|
.symbol_double_equals => "==",
|
||||||
|
|
||||||
.integer => |literal| literal,
|
.number => |literal| literal,
|
||||||
.real => |literal| literal,
|
|
||||||
.string => |literal| literal,
|
.string => |literal| literal,
|
||||||
|
|
||||||
.keyword_nil => "nil",
|
.keyword_nil => "nil",
|
||||||
|
@ -134,7 +132,7 @@ pub const Tokenizer = struct {
|
||||||
else => break,
|
else => break,
|
||||||
};
|
};
|
||||||
|
|
||||||
self.current_token = .{.real = self.source[begin .. cursor]};
|
self.current_token = .{.number = self.source[begin .. cursor]};
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
},
|
},
|
||||||
|
@ -142,7 +140,7 @@ pub const Tokenizer = struct {
|
||||||
else => break,
|
else => break,
|
||||||
};
|
};
|
||||||
|
|
||||||
self.current_token = .{.integer = self.source[begin .. cursor]};
|
self.current_token = .{.number = self.source[begin .. cursor]};
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
|
@ -1,6 +1,4 @@
|
||||||
const builtin = @import("builtin");
|
const app = @import("./app.zig");
|
||||||
|
|
||||||
const canvas = @import("./canvas.zig");
|
|
||||||
|
|
||||||
const coral = @import("coral");
|
const coral = @import("coral");
|
||||||
|
|
||||||
|
@ -8,143 +6,87 @@ const ext = @import("./ext.zig");
|
||||||
|
|
||||||
pub const file = @import("./file.zig");
|
pub const file = @import("./file.zig");
|
||||||
|
|
||||||
pub const heap = @import("./heap.zig");
|
const heap = @import("./heap.zig");
|
||||||
|
|
||||||
const kym = @import("./kym.zig");
|
const kym = @import("./kym.zig");
|
||||||
|
|
||||||
const AppManifest = struct {
|
pub const RuntimeError = error {
|
||||||
title: [255:0]u8 = [_:0]u8{0} ** 255,
|
OutOfMemory,
|
||||||
width: u16 = 640,
|
InitFailure,
|
||||||
height: u16 = 480,
|
BadManifest,
|
||||||
|
|
||||||
pub fn load_script(self: *AppManifest, env: *kym.RuntimeEnv, fs: file.System, file_path: []const u8) !void {
|
|
||||||
var manifest = try env.execute_file(heap.allocator, fs, file.Path.from(&.{file_path}));
|
|
||||||
|
|
||||||
defer manifest.deinit();
|
|
||||||
|
|
||||||
{
|
|
||||||
var title = try env.get(manifest.value, try env.intern("title"));
|
|
||||||
|
|
||||||
defer title.deinit();
|
|
||||||
|
|
||||||
const title_string = try env.string_cast(title.value);
|
|
||||||
|
|
||||||
try env.check(title_string.len <= self.title.len, "`title` cannot exceed 255 bytes in length");
|
|
||||||
coral.io.copy(&self.title, title_string);
|
|
||||||
}
|
|
||||||
|
|
||||||
const u16_max = coral.math.max_int(@typeInfo(u16).Int);
|
|
||||||
|
|
||||||
{
|
|
||||||
const width = try env.get(manifest.value, try env.intern("width"));
|
|
||||||
|
|
||||||
errdefer width.deinit();
|
|
||||||
|
|
||||||
if (width.value.as_number()) |value| {
|
|
||||||
if (value < u16_max) {
|
|
||||||
self.width = @floatToInt(u16, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
{
|
|
||||||
const height = try env.get(manifest.value, try env.intern("height"));
|
|
||||||
|
|
||||||
errdefer height.deinit();
|
|
||||||
|
|
||||||
if (height.value.as_number()) |value| {
|
|
||||||
if (value < u16_max) {
|
|
||||||
self.height = @floatToInt(u16, value);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
fn stack_as_log_writer(self: *coral.list.ByteStack) coral.io.Writer {
|
fn last_sdl_error() [:0]const u8 {
|
||||||
return coral.io.Writer.bind(coral.list.ByteStack, self, struct {
|
return coral.io.slice_sentineled(@as(u8, 0), @as([*:0]const u8, @ptrCast(ext.SDL_GetError())));
|
||||||
fn write(stack: *coral.list.ByteStack, bytes: []const coral.io.Byte) ?usize {
|
|
||||||
var line_written = @as(usize, 0);
|
|
||||||
|
|
||||||
for (bytes) |byte| {
|
|
||||||
if (byte == '\n') {
|
|
||||||
ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "%.*s", stack.values.len, stack.values.ptr);
|
|
||||||
stack.clear();
|
|
||||||
|
|
||||||
line_written = 0;
|
|
||||||
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
stack.push_one(byte) catch {
|
|
||||||
coral.debug.assert(stack.drop(line_written));
|
|
||||||
|
|
||||||
return null;
|
|
||||||
};
|
|
||||||
|
|
||||||
line_written += 1;
|
|
||||||
}
|
|
||||||
|
|
||||||
return bytes.len;
|
|
||||||
}
|
|
||||||
}.write);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn run_app(base_file_system: file.System) void {
|
pub fn run_app(file_access: file.Access) RuntimeError!void {
|
||||||
defer heap.trace_leaks();
|
var info_log = app.WritableLog.make(.info, heap.allocator);
|
||||||
|
|
||||||
var log_buffer = coral.list.ByteStack{.allocator = heap.allocator};
|
defer info_log.free();
|
||||||
|
|
||||||
defer log_buffer.deinit();
|
var fail_log = app.WritableLog.make(.fail, heap.allocator);
|
||||||
|
|
||||||
var script_env = kym.RuntimeEnv.init(heap.allocator, stack_as_log_writer(&log_buffer), .{
|
defer fail_log.free();
|
||||||
.stack_max = 512,
|
|
||||||
.calls_max = 512,
|
|
||||||
}) catch {
|
|
||||||
return ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "failed to initialize Kym vm\n");
|
|
||||||
};
|
|
||||||
|
|
||||||
defer script_env.deinit();
|
|
||||||
|
|
||||||
const app_file_name = "app.ona";
|
|
||||||
var app_manifest = AppManifest{};
|
|
||||||
|
|
||||||
app_manifest.load_script(&script_env, base_file_system, app_file_name) catch {
|
|
||||||
return ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "failed to load %s\n", app_file_name);
|
|
||||||
};
|
|
||||||
|
|
||||||
if (ext.SDL_Init(ext.SDL_INIT_EVERYTHING) != 0) {
|
if (ext.SDL_Init(ext.SDL_INIT_EVERYTHING) != 0) {
|
||||||
return ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "%s\n", ext.SDL_GetError());
|
try fail_log.write(last_sdl_error());
|
||||||
|
|
||||||
|
return error.InitFailure;
|
||||||
}
|
}
|
||||||
|
|
||||||
defer ext.SDL_Quit();
|
defer ext.SDL_Quit();
|
||||||
|
|
||||||
{
|
var script_env = kym.RuntimeEnv.make(heap.allocator, .{
|
||||||
const base_prefix = ext.SDL_GetBasePath() orelse {
|
.out_writer = info_log.as_writer(),
|
||||||
return ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "%s\n", ext.SDL_GetError());
|
.err_writer = fail_log.as_writer(),
|
||||||
|
}) catch {
|
||||||
|
try fail_log.write("failed to initialize script runtime");
|
||||||
|
|
||||||
|
return error.InitFailure;
|
||||||
};
|
};
|
||||||
|
|
||||||
defer ext.SDL_free(base_prefix);
|
defer script_env.free();
|
||||||
|
|
||||||
const window_flags = 0;
|
var manifest = app.Manifest{};
|
||||||
const window_pos = ext.SDL_WINDOWPOS_CENTERED;
|
|
||||||
|
|
||||||
const window = ext.SDL_CreateWindow(&app_manifest.title, window_pos, window_pos, app_manifest.width, app_manifest.height, window_flags) orelse {
|
manifest.load(&script_env, file_access) catch {
|
||||||
return ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "%s\n", ext.SDL_GetError());
|
fail_log.write("failed to load / execute app.ona manifest") catch {};
|
||||||
|
|
||||||
|
return error.BadManifest;
|
||||||
|
};
|
||||||
|
|
||||||
|
const window = create: {
|
||||||
|
const pos = ext.SDL_WINDOWPOS_CENTERED;
|
||||||
|
const flags = 0;
|
||||||
|
|
||||||
|
break: create ext.SDL_CreateWindow(&manifest.title, pos, pos, manifest.width, manifest.height, flags) orelse {
|
||||||
|
fail_log.write(last_sdl_error()) catch {};
|
||||||
|
|
||||||
|
return error.InitFailure;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
defer ext.SDL_DestroyWindow(window);
|
defer ext.SDL_DestroyWindow(window);
|
||||||
|
|
||||||
const renderer_flags = 0;
|
const renderer = create: {
|
||||||
|
const defaultDriverIndex = -1;
|
||||||
|
const flags = ext.SDL_RENDERER_ACCELERATED;
|
||||||
|
|
||||||
const renderer = ext.SDL_CreateRenderer(window, -1, renderer_flags) orelse {
|
break: create ext.SDL_CreateRenderer(window, defaultDriverIndex, flags) orelse {
|
||||||
return ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "%s\n", ext.SDL_GetError());
|
fail_log.write(last_sdl_error()) catch {};
|
||||||
|
|
||||||
|
return error.InitFailure;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
defer ext.SDL_DestroyRenderer(renderer);
|
defer ext.SDL_DestroyRenderer(renderer);
|
||||||
|
|
||||||
|
{
|
||||||
|
var previous_ticks = ext.SDL_GetTicks64();
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
// TODO: Delta timing.
|
{
|
||||||
var event = @as(ext.SDL_Event, undefined);
|
var event = @as(ext.SDL_Event, undefined);
|
||||||
|
|
||||||
while (ext.SDL_PollEvent(&event) != 0) {
|
while (ext.SDL_PollEvent(&event) != 0) {
|
||||||
|
@ -153,18 +95,22 @@ pub fn run_app(base_file_system: file.System) void {
|
||||||
else => {},
|
else => {},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ext.SDL_SetRenderDrawColor(renderer, 0, 0, 0, 0) != 0) {
|
|
||||||
return ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "%s\n", ext.SDL_GetError());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (ext.SDL_RenderClear(renderer) != 0) {
|
{
|
||||||
return ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "%s\n", ext.SDL_GetError());
|
// Based on https://fabiensanglard.net/timer_and_framerate/index.php.
|
||||||
|
const current_ticks = ext.SDL_GetTicks64();
|
||||||
|
const milliseconds_per_second = 1000.0;
|
||||||
|
const tick_frequency = @as(u64, @intFromFloat(milliseconds_per_second / manifest.tick_rate));
|
||||||
|
|
||||||
|
while (previous_ticks < current_ticks) {
|
||||||
|
previous_ticks += tick_frequency;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Render here.
|
_ = ext.SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
|
||||||
|
_ = ext.SDL_RenderClear(renderer);
|
||||||
ext.SDL_RenderPresent(renderer);
|
_ = ext.SDL_RenderPresent(renderer);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
const coral = @import("coral");
|
|
||||||
|
|
||||||
const ona = @import("ona");
|
const ona = @import("ona");
|
||||||
|
|
||||||
pub fn main() !void {
|
pub fn main() ona.RuntimeError!void {
|
||||||
ona.run_app(.{.sandboxed_path = &ona.file.Path.cwd});
|
try ona.run_app(.{.sandboxed_path = &ona.file.Path.cwd});
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,3 +0,0 @@
|
||||||
const _coral = @import("coral");
|
|
||||||
|
|
||||||
const _ona = @import("ona");
|
|
Loading…
Reference in New Issue