Major re-write to use asynchronous running loop
continuous-integration/drone/push Build is failing Details

This commit is contained in:
kayomn 2024-05-29 19:27:02 +01:00
parent ef537bef14
commit fac0470a4a
66 changed files with 4043 additions and 7231 deletions

0
.drone.yml Executable file → Normal file
View File

3
.gitattributes vendored Normal file
View File

@ -0,0 +1,3 @@
tools/sokol-shdc filter=lfs diff=lfs merge=lfs -text
tools/sokol-shdc.exe filter=lfs diff=lfs merge=lfs -text
*.bmp filter=lfs diff=lfs merge=lfs -text

4
.gitignore vendored Executable file → Normal file
View File

@ -1,2 +1,2 @@
/zig-cache/ /zig-cache
/zig-out/ /zig-out

2
.vscode/launch.json vendored
View File

@ -5,7 +5,7 @@
"name": "Runner", "name": "Runner",
"type": "gdb", "type": "gdb",
"request": "launch", "request": "launch",
"target": "${workspaceRoot}/zig-out/bin/runner", "target": "${workspaceRoot}/zig-out/bin/main",
"cwd": "${workspaceRoot}/debug/", "cwd": "${workspaceRoot}/debug/",
"valuesFormatting": "parseText", "valuesFormatting": "parseText",
"preLaunchTask": "Build All" "preLaunchTask": "Build All"

View File

@ -1,7 +1,7 @@
{ {
"files.insertFinalNewline": true, "files.insertFinalNewline": true,
"files.trimTrailingWhitespace": true, "files.trimTrailingWhitespace": true,
"zig.initialSetupDone": true, "debug.console.collapseIdenticalLines": false,
"[zig]": { "[zig]": {
"editor.formatOnSave": false, "editor.formatOnSave": false,

6
.vscode/tasks.json vendored
View File

@ -9,7 +9,7 @@
"kind": "build", "kind": "build",
"isDefault": true "isDefault": true
}, },
"problemMatcher": "$gcc", "problemMatcher": "$zig",
"presentation": { "presentation": {
"echo": true, "echo": true,
"reveal": "silent", "reveal": "silent",
@ -17,8 +17,8 @@
"panel": "shared", "panel": "shared",
"showReuseMessage": false, "showReuseMessage": false,
"clear": true, "clear": true,
"revealProblems": "onProblem" "revealProblems": "onProblem",
} }
} }
] ]
} }

137
build.zig
View File

@ -1,44 +1,137 @@
const builtin = @import("builtin");
const std = @import("std"); const std = @import("std");
pub fn build(b: *std.Build) void { pub fn build(b: *std.Build) !void {
const target = b.standardTargetOptions(.{}); const target = b.standardTargetOptions(.{});
const optimize = b.standardOptimizeOption(.{}); const optimize = b.standardOptimizeOption(.{});
const coral_module = b.createModule(.{.source_file = .{.path = "./source/coral/coral.zig"}}); const coral_module = b.createModule(.{
.root_source_file = b.path("src/coral/coral.zig"),
const ona_module = b.createModule(.{
.source_file = .{.path = "./source/ona/ona.zig"},
.dependencies = &.{
.{
.name = "coral",
.module = coral_module,
},
},
}); });
b.installArtifact(create: { const ona_module = add: {
const compile_step = b.addExecutable(.{ const sokol_dependency = b.dependency("sokol", .{
.name = "runner",
.root_source_file = .{ .path = "source/runner.zig" },
.target = target, .target = target,
.optimize = optimize, .optimize = optimize,
}); });
compile_step.addModule("ona", ona_module); const module = b.addModule("ona", .{
compile_step.linkLibC(); .root_source_file = b.path("src/ona/ona.zig"),
compile_step.linkSystemLibrary("SDL2");
break: create compile_step; .imports = &.{
}); .{
.name = "sokol",
.module = sokol_dependency.module("sokol"),
},
.{
.name = "coral",
.module = coral_module,
},
},
});
break: add module;
};
b.step("test", "Run unit tests").dependOn(create: { b.step("test", "Run unit tests").dependOn(create: {
const tests = b.addTest(.{ const tests = b.addTest(.{
.root_source_file = .{.path = "source/test.zig"}, .root_source_file = b.path("src/test.zig"),
.target = target, .target = target,
.optimize = optimize, .optimize = optimize,
}); });
break: create &tests.step; break: create &tests.step;
}); });
b.installArtifact(add: {
const compile_step = b.addExecutable(.{
.name = "main",
.root_source_file = b.path("src/main.zig"),
.target = target,
.optimize = optimize,
});
compile_step.root_module.addImport("ona", ona_module);
compile_step.root_module.addImport("coral", coral_module);
compile_step.linkLibC();
compile_step.linkSystemLibrary("SDL2");
try depend_on_shaders(b, target, "src/ona/gfx/shaders/", &compile_step.step);
break: add compile_step;
});
}
fn depend_on_shaders(
b: *std.Build,
target: std.Build.ResolvedTarget,
shader_dir_path: []const u8,
step: *std.Build.Step,
) !void {
var dir = try std.fs.cwd().openDir(shader_dir_path, .{ .iterate = true });
defer dir.close();
var walker = try dir.walk(b.allocator);
defer walker.deinit();
const shdc_path = switch (builtin.os.tag) {
.windows => "./tools/sokol-shdc.exe",
.linux => "./tools/sokol-shdc",
else => @compileError("cannot compile sokol shaders on this platform"),
};
const path_buffer_max = 255;
var input_path_buffer = [_]u8{undefined} ** path_buffer_max;
var output_path_buffer = [_]u8{undefined} ** path_buffer_max;
const glsl = if (target.result.isDarwin()) "glsl410" else "glsl430";
const slang = glsl ++ ":metal_macos:hlsl5:glsl300es:wgsl";
while (try walker.next()) |entry| {
if (entry.kind != .file or !std.mem.endsWith(u8, entry.path, ".glsl")) {
continue;
}
const input_path = try std.fmt.bufPrint(&input_path_buffer, "{s}{s}", .{shader_dir_path, entry.path});
const output_path = try std.fmt.bufPrint(&output_path_buffer, "{s}.zig", .{input_path});
const output = std.fs.path.basename(output_path);
dir.access(output, .{.mode = .read_only}) catch {
const cmd = b.addSystemCommand(&.{
shdc_path,
"-i",
input_path,
"-o",
output_path,
"-l",
slang,
"-f",
"sokol_zig",
});
step.dependOn(&cmd.step);
continue;
};
if ((try dir.statFile(entry.path)).mtime > (try dir.statFile(output)).mtime) {
const cmd = b.addSystemCommand(&.{
shdc_path,
"-i",
input_path,
"-o",
output_path,
"-l",
slang,
"-f",
"sokol_zig",
});
step.dependOn(&cmd.step);
}
}
} }

17
build.zig.zon Normal file
View File

@ -0,0 +1,17 @@
.{
.name = "Ona",
.version = "0.0.1",
.paths = .{
"src",
"build.zig",
"build.zig.zon",
"LICENSE",
"README.md",
},
.dependencies = .{
.sokol = .{
.url = "git+https://github.com/floooh/sokol-zig.git#796a3d3d54c22d20da9e91a9a9120d5423d1e700",
.hash = "12209a187e071d76af00c02865677d170f844376866d062b1b5f82e4ecbd750c4e18",
},
},
}

BIN
debug/actor.bmp (Stored with Git LFS) Executable file

Binary file not shown.

View File

@ -1,23 +0,0 @@
let tool = "wrench"
var test_param = `monkey {tool} {2 + 1 - 1}`
let printer = lambda (pfx):
@print(test_param)
return lambda (msg):
@print(pfx)
@print(msg)
end
end
let pr = printer("this is a final closure")
pr("goodbye")
return {
.title = "Game",
.width = 1280,
.height = 800,
.tick_rate = 60,
}

BIN
debug/test.bmp (Stored with Git LFS) Normal file

Binary file not shown.

0
readme.md Executable file → Normal file
View File

View File

@ -1,116 +0,0 @@
const debug = @import("./debug.zig");
const io = @import("./io.zig");
const list = @import("./list.zig");
const math = @import("./math.zig");
pub const Stacking = struct {
allocator: io.Allocator,
min_region_size: usize,
head_region: ?*Region,
tail_region: ?*Region,
const Region = struct {
next: ?*Region,
count: usize,
capacity: usize,
fn allocate(self: *Region, region_size: usize) []usize {
debug.assert(self.can_allocate(region_size));
const offset = (@sizeOf(Region) / @alignOf(usize)) + self.count;
const allocation = @as([*]usize, @ptrCast(self))[offset .. (offset + region_size)];
self.count += region_size;
return allocation;
}
fn as_raw(self: *Region) []usize {
return @as([*]usize, @ptrCast(self))[0 .. ((@sizeOf(Region) / @alignOf(usize)) + self.capacity)];
}
fn can_allocate(self: Region, region_size: usize) bool {
return (self.count + region_size) <= self.capacity;
}
};
fn allocate_region(self: *Stacking, requested_region_size: usize) io.AllocationError!*Region {
const region_size = @max(requested_region_size, self.min_region_size);
const region = @as(*Region, @ptrCast(@alignCast(try self.allocator.reallocate(null, @alignOf(Region) + (@alignOf(usize) * region_size)))));
region.* = .{
.next = null,
.count = 0,
.capacity = region_size,
};
return region;
}
pub fn as_allocator(self: *Stacking) io.Allocator {
return io.Allocator.bind(Stacking, self, .{
.deallocate = deallocate,
.reallocate = reallocate,
});
}
pub fn deinit(self: *Stacking) void {
while (self.head_region) |region| {
const next_region = region.next;
self.allocator.deallocate(region.as_raw());
self.head_region = next_region;
}
self.head_region = null;
self.tail_region = null;
}
fn deallocate(_: *Stacking, _: []io.Byte) void {
// TODO: Decide how to implement.
unreachable;
}
pub fn init(allocator: io.Allocator, min_region_size: usize) Stacking {
return Stacking{
.allocator = allocator,
.min_region_size = min_region_size,
.head_region = null,
.tail_region = null,
};
}
fn reallocate(self: *Stacking, _: usize, allocation: ?[]io.Byte, byte_size: usize) io.AllocationError![]io.Byte {
if (allocation) |buffer| {
if (byte_size < buffer.len) {
return buffer[0 .. byte_size];
}
}
const region_size = (byte_size + @sizeOf(usize) - 1) / @sizeOf(usize);
const tail_region = self.tail_region orelse {
const region = try self.allocate_region(region_size);
self.tail_region = region;
self.head_region = self.tail_region;
return @as([*]io.Byte, @ptrCast(region.allocate(region_size)))[0 .. byte_size];
};
if (!tail_region.can_allocate(region_size)) {
const region = try self.allocate_region(region_size);
tail_region.next = region;
self.tail_region = region;
return @as([*]io.Byte, @ptrCast(region.allocate(region_size)))[0 .. byte_size];
}
return @as([*]io.Byte, @ptrCast(tail_region.allocate(region_size)))[0 .. byte_size];
}
};

View File

@ -1,14 +0,0 @@
pub const arena = @import("./arena.zig");
pub const debug = @import("./debug.zig");
pub const io = @import("./io.zig");
pub const list = @import("./list.zig");
pub const map = @import("./map.zig");
pub const math = @import("./math.zig");
pub const utf8 = @import("./utf8.zig");

View File

@ -1,6 +0,0 @@
pub fn assert(condition: bool) void {
if (!condition) {
unreachable;
}
}

View File

@ -1,383 +0,0 @@
const debug = @import("./debug.zig");
const math = @import("./math.zig");
const std = @import("std");
pub const AllocationError = error {
OutOfMemory,
};
pub const Allocator = struct {
context: *anyopaque,
actions: *const struct {
deallocate: *const fn (context: *anyopaque, allocation: []Byte) void,
reallocate: *const fn (context: *anyopaque, return_address: usize, existing_allocation: ?[]Byte, size: usize) AllocationError![]Byte,
},
pub fn Actions(comptime State: type) type {
return struct {
deallocate: fn (state: *State, allocation: []Byte) void,
reallocate: fn (state: *State, return_address: usize, existing_allocation: ?[]Byte, size: usize) AllocationError![]Byte,
};
}
pub fn bind(comptime State: type, state: *State, comptime actions: Actions(State)) Allocator {
const is_zero_aligned = @alignOf(State) == 0;
const ErasedActions = struct {
fn deallocate(context: *anyopaque, allocation: []Byte) void {
if (is_zero_aligned) {
return actions.deallocate(@ptrCast(context), allocation);
}
return actions.deallocate(@ptrCast(@alignCast(context)), allocation);
}
fn reallocate(context: *anyopaque, return_address: usize, existing_allocation: ?[]Byte, size: usize) AllocationError![]Byte {
if (is_zero_aligned) {
return actions.reallocate(@ptrCast(context), return_address, existing_allocation, size);
}
return actions.reallocate(@ptrCast(@alignCast(context)), return_address, existing_allocation, size);
}
};
return .{
.context = if (is_zero_aligned) state else @ptrCast(state),
.actions = &.{
.deallocate = ErasedActions.deallocate,
.reallocate = ErasedActions.reallocate,
}
};
}
pub fn deallocate(self: Allocator, allocation: anytype) void {
switch (@typeInfo(@TypeOf(allocation))) {
.Pointer => |pointer| {
self.actions.deallocate(self.context, switch (pointer.size) {
.One => @as([*]Byte, @ptrCast(allocation))[0 .. @sizeOf(pointer.child)],
.Slice => @as([*]Byte, @ptrCast(allocation.ptr))[0 .. (@sizeOf(pointer.child) * allocation.len)],
.Many, .C => @compileError("length of allocation must be known to deallocate"),
});
},
else => @compileError("cannot deallocate " ++ allocation),
}
}
pub fn reallocate(self: Allocator, allocation: ?[]Byte, allocation_size: usize) AllocationError![]Byte {
return self.actions.reallocate(self.context, @returnAddress(), allocation, allocation_size);
}
};
pub const Byte = u8;
pub const FixedBuffer = struct {
bytes: []Byte,
pub fn as_writer(self: *FixedBuffer) Writer {
return Writer.bind(FixedBuffer, self, struct {
fn write(writable_memory: *FixedBuffer, data: []const Byte) ?usize {
return writable_memory.write(data);
}
}.write);
}
pub fn put(self: *FixedBuffer, byte: Byte) bool {
if (self.bytes.len == 0) {
return false;
}
self.bytes[0] = byte;
self.bytes = self.bytes[1 ..];
return true;
}
pub fn write(self: *FixedBuffer, bytes: []const Byte) usize {
const writable = @min(self.bytes.len, bytes.len);
copy(self.bytes, bytes);
self.bytes = self.bytes[writable ..];
return writable;
}
};
pub fn Functor(comptime Output: type, comptime Input: type) type {
return struct {
context: *const anyopaque,
invoker: *const fn (capture: *const anyopaque, input: Input) Output,
const Self = @This();
pub fn bind(comptime State: type, state: *const State, comptime invoker: fn (capture: *const State, input: Input) Output) Self {
const is_zero_aligned = @alignOf(State) == 0;
const Invoker = struct {
fn invoke(context: *const anyopaque, input: Input) Output {
if (is_zero_aligned) {
return invoker(@ptrCast(context), input);
}
return invoker(@ptrCast(@alignCast(context)), input);
}
};
return .{
.context = if (is_zero_aligned) state else @ptrCast(state),
.invoker = Invoker.invoke,
};
}
pub fn from(comptime invoker: fn (input: Input) Output) Self {
const Invoker = struct {
fn invoke(_: *const anyopaque, input: Input) Output {
return invoker(input);
}
};
return .{
.context = &.{},
.invoker = Invoker.invoke,
};
}
pub fn invoke(self: Self, input: Input) Output {
return self.invoker(self.context, input);
}
};
}
pub fn Generator(comptime Output: type, comptime Input: type) type {
return struct {
context: *anyopaque,
invoker: *const fn (capture: *anyopaque, input: Input) Output,
const Self = @This();
pub fn bind(comptime State: type, state: *State, comptime invoker: fn (capture: *State, input: Input) Output) Self {
const is_zero_aligned = @alignOf(State) == 0;
return .{
.context = if (is_zero_aligned) state else @ptrCast(state),
.invoker = struct {
fn invoke(context: *anyopaque, input: Input) Output {
if (is_zero_aligned) {
return invoker(@ptrCast(context), input);
}
return invoker(@ptrCast(@alignCast(context)), input);
}
}.invoke,
};
}
pub fn from(comptime invoker: fn (input: Input) Output) Self {
const Invoker = struct {
fn invoke(_: *const anyopaque, input: Input) Output {
return invoker(input);
}
};
return .{
.context = &.{},
.invoker = Invoker.invoke,
};
}
pub fn invoke(self: Self, input: Input) Output {
return self.invoker(self.context, input);
}
};
}
pub fn Tag(comptime Element: type) type {
return switch (@typeInfo(Element)) {
.Enum => |info| info.tag_type,
.Union => |info| info.tag_type orelse @compileError(@typeName(Element) ++ " has no tag type"),
else => @compileError("expected enum or union type, found '" ++ @typeName(Element) ++ "'"),
};
}
pub const Writer = Generator(?usize, []const Byte);
pub fn all_equals(target: []const Byte, match: Byte) bool {
for (0 .. target.len) |index| {
if (target[index] != match) {
return false;
}
}
return true;
}
pub fn allocate_copy(comptime Element: type, allocator: Allocator, source: []const Element) AllocationError![]Element {
const allocation = try allocator.actions.reallocate(allocator.context, @returnAddress(), null, @sizeOf(Element) * source.len);
copy(allocation, bytes_of(source));
return @as([*]Element, @ptrCast(@alignCast(allocation.ptr)))[0 .. source.len];
}
pub fn allocate_many(allocator: Allocator, count: usize, value: anytype) AllocationError![]@TypeOf(value) {
const Type = @TypeOf(value);
const typeSize = @sizeOf(Type);
if (typeSize == 0) {
@compileError("Cannot allocate memory for 0-byte sized type " ++ @typeName(Type));
}
const allocations = @as([*]Type, @ptrCast(@alignCast(try allocator.actions.reallocate(
allocator.context,
@returnAddress(),
null,
typeSize * count))))[0 .. count];
for (allocations) |*allocation| {
allocation.* = value;
}
return allocations;
}
pub fn allocate_one(allocator: Allocator, value: anytype) AllocationError!*@TypeOf(value) {
const Type = @TypeOf(value);
const typeSize = @sizeOf(Type);
if (typeSize == 0) {
@compileError("Cannot allocate memory for 0-byte sized type " ++ @typeName(Type));
}
const allocation = @as(*Type, @ptrCast(@alignCast(try allocator.actions.reallocate(
allocator.context,
@returnAddress(),
null,
typeSize))));
allocation.* = value;
return allocation;
}
pub fn allocate_string(allocator: Allocator, source: []const Byte) AllocationError![:0]Byte {
const allocation = try allocator.actions.reallocate(allocator.context, @returnAddress(), null, source.len + 1);
copy(allocation[0 .. source.len], source);
allocation[source.len] = 0;
return @ptrCast(allocation);
}
pub fn are_equal(target: []const Byte, match: []const Byte) bool {
if (target.len != match.len) {
return false;
}
for (0 .. target.len) |index| {
if (target[index] != match[index]) {
return false;
}
}
return true;
}
pub fn bytes_of(value: anytype) []const Byte {
const pointer_info = @typeInfo(@TypeOf(value)).Pointer;
return switch (pointer_info.size) {
.One => @as([*]const Byte, @ptrCast(value))[0 .. @sizeOf(pointer_info.child)],
.Slice => @as([*]const Byte, @ptrCast(value.ptr))[0 .. @sizeOf(pointer_info.child) * value.len],
else => @compileError("`value` must be single-element pointer or slice type"),
};
}
pub fn copy(target: []Byte, source: []const Byte) void {
var index: usize = 0;
while (index < source.len) : (index += 1) {
target[index] = source[index];
}
}
pub fn compare(this: []const Byte, that: []const Byte) isize {
const range = @min(this.len, that.len);
var index: usize = 0;
while (index < range) : (index += 1) {
const difference = @as(isize, @intCast(this[index])) - @as(isize, @intCast(that[index]));
if (difference != 0) {
return difference;
}
}
return @as(isize, @intCast(this.len)) - @as(isize, @intCast(that.len));
}
pub fn djb2_hash(comptime int: std.builtin.Type.Int, target: []const Byte) math.Int(int) {
var hash_code = @as(math.Int(int), 5381);
for (target) |byte| {
hash_code = ((hash_code << 5) +% hash_code) +% byte;
}
return hash_code;
}
pub fn find_first(haystack: []const Byte, needle: Byte) ?usize {
for (0 .. haystack.len) |i| {
if (haystack[i] == needle) {
return i;
}
}
return null;
}
pub fn jenkins_hash(comptime int: std.builtin.Type.Int, bytes: []const Byte) math.Int(int) {
var hash = @as(math.Int(int), 0);
for (bytes) |byte| {
hash +%= byte;
hash +%= (hash << 10);
hash ^= (hash >> 6);
}
hash +%= (hash << 3);
hash ^= (hash >> 11);
hash +%= (hash << 15);
return hash;
}
pub const null_writer = Writer.from(write_null);
pub fn slice_sentineled(comptime sen: anytype, ptr: [*:sen]const @TypeOf(sen)) [:sen]const @TypeOf(sen) {
var len = @as(usize, 0);
while (ptr[len] != sen) {
len += 1;
}
return ptr[0 .. len:sen];
}
pub fn tag_of(comptime value: anytype) Tag(@TypeOf(value)) {
return @as(Tag(@TypeOf(value)), value);
}
fn write_null(buffer: []const u8) ?usize {
return buffer.len;
}
pub fn zero(target: []Byte) void {
for (target) |*t| t.* = 0;
}

View File

@ -1,140 +0,0 @@
const io = @import("./io.zig");
const math = @import("./math.zig");
pub const ByteStack = Stack(io.Byte);
pub fn Stack(comptime Value: type) type {
return struct {
allocator: io.Allocator,
capacity: usize,
values: []Value,
const Self = @This();
pub fn clear(self: *Self) void {
self.values = self.values[0 .. 0];
}
pub fn deinit(self: *Self) void {
if (self.capacity == 0) {
return;
}
self.allocator.deallocate(self.values.ptr[0 .. self.capacity]);
self.values = &.{};
}
pub fn drop(self: *Self, count: usize) bool {
if (math.checked_sub(self.values.len, count)) |updated_count| {
self.values = self.values[0 .. updated_count];
return true;
}
return false;
}
pub fn grow(self: *Self, growth_amount: usize) io.AllocationError!void {
const grown_capacity = self.capacity + growth_amount;
const buffer = try self.allocator.reallocate(null, @sizeOf(Value) * grown_capacity);
errdefer self.allocator.deallocate(buffer);
if (self.capacity != 0) {
io.copy(buffer, io.bytes_of(self.values));
self.allocator.deallocate(self.values.ptr[0 .. self.capacity]);
}
self.values = @as([*]Value, @ptrCast(@alignCast(buffer)))[0 .. self.values.len];
self.capacity = grown_capacity;
}
pub fn init(allocator: io.Allocator) Self {
return .{
.allocator = allocator,
.capacity = 0,
.values = &.{},
};
}
pub fn is_empty(self: Self) bool {
return self.values.len == 0;
}
pub fn pack(self: *Self) io.AllocationError![]Value {
const packed_size = self.values.len;
const buffer = try self.allocator.reallocate(null, @sizeOf(Value) * self.values.len);
io.copy(buffer, io.bytes_of(self.values));
if (self.capacity != 0) {
self.allocator.deallocate(self.values.ptr[0 .. self.capacity]);
}
self.values = @as([*]Value, @ptrCast(@alignCast(buffer)))[0 .. packed_size];
self.capacity = packed_size;
return self.values;
}
pub fn peek(self: Self) ?Value {
if (self.values.len == 0) {
return null;
}
return self.values[self.values.len - 1];
}
pub fn pop(self: *Self) ?Value {
if (self.values.len == 0) {
return null;
}
const last_index = self.values.len - 1;
defer self.values = self.values[0 .. last_index];
return self.values[last_index];
}
pub fn push_all(self: *Self, values: []const Value) io.AllocationError!void {
const new_length = self.values.len + values.len;
if (new_length > self.capacity) {
try self.grow(values.len + values.len);
}
const offset_index = self.values.len;
self.values = self.values.ptr[0 .. new_length];
for (0 .. values.len) |index| {
self.values[offset_index + index] = values[index];
}
}
pub fn push_one(self: *Self, value: Value) io.AllocationError!void {
if (self.values.len == self.capacity) {
try self.grow(@max(1, self.capacity));
}
const offset_index = self.values.len;
self.values = self.values.ptr[0 .. self.values.len + 1];
self.values[offset_index] = value;
}
};
}
pub fn stack_as_writer(self: *ByteStack) io.Writer {
return io.Writer.bind(ByteStack, self, write_stack);
}
fn write_stack(stack: *ByteStack, bytes: []const io.Byte) ?usize {
stack.push_all(bytes) catch return null;
return bytes.len;
}

View File

@ -1,274 +0,0 @@
const debug = @import("./debug.zig");
const io = @import("./io.zig");
const list = @import("./list.zig");
const math = @import("./math.zig");
pub fn StringTable(comptime Value: type) type {
return Table([]const io.Byte, Value, struct {
const Self = @This();
fn equals(key_a: []const io.Byte, key_b: []const io.Byte) bool {
return io.are_equal(key_a, key_b);
}
fn hash(key: []const io.Byte) usize {
return io.djb2_hash(@typeInfo(usize).Int, key);
}
});
}
pub fn Table(comptime Key: type, comptime Value: type, comptime Traits: type) type {
const load_max = 0.75;
const max_int = math.max_int(@typeInfo(usize).Int);
return struct {
allocator: io.Allocator,
traits: Traits,
count: usize,
entries: []?Entry,
pub const Entry = struct {
key: Key,
value: Value,
fn write_into(self: Entry, table: *Self) bool {
const hash_max = @min(max_int, table.entries.len);
var hashed_key = table.hash_key(self.key) % hash_max;
var iterations = @as(usize, 0);
while (true) : (iterations += 1) {
debug.assert(iterations < table.entries.len);
const table_entry = &(table.entries[hashed_key] orelse {
table.entries[hashed_key] = .{
.key = self.key,
.value = self.value,
};
table.count += 1;
return true;
});
if (table.keys_equal(table_entry.key, self.key)) {
return false;
}
hashed_key = (hashed_key +% 1) % hash_max;
}
}
};
pub const Iterable = struct {
table: *Self,
iterations: usize,
pub fn next(self: *Iterable) ?Entry {
while (self.iterations < self.table.entries.len) {
defer self.iterations += 1;
if (self.table.entries[self.iterations]) |entry| {
return entry;
}
}
return null;
}
};
const Self = @This();
pub fn as_iterable(self: *Self) Iterable {
return .{
.table = self,
.iterations = 0,
};
}
fn hash_key(self: Self, key: Key) usize {
return if (@sizeOf(Traits) == 0) Traits.hash(key) else self.traits.hash(key);
}
pub fn remove(self: *Self, key: Key) ?Entry {
const hash_max = @min(max_int, self.entries.len);
var hashed_key = self.hash_key(key) % hash_max;
while (true) {
const entry = &(self.entries[hashed_key] orelse continue);
if (self.keys_equal(entry.key, key)) {
const original_entry = entry.*;
self.entries[hashed_key] = null;
return original_entry;
}
hashed_key = (hashed_key +% 1) % hash_max;
}
}
pub fn replace(self: *Self, key: Key, value: Value) io.AllocationError!?Entry {
try self.rehash(load_max);
debug.assert(self.entries.len > self.count);
{
const hash_max = @min(max_int, self.entries.len);
const has_context = @sizeOf(Traits) != 0;
var hashed_key = (if (has_context) self.traits.hash(key) else Traits.hash(key)) % hash_max;
while (true) {
const entry = &(self.entries[hashed_key] orelse {
self.entries[hashed_key] = .{
.key = key,
.value = value,
};
self.count += 1;
return null;
});
if (has_context) {
if (self.traits.equals(entry.key, key)) {
const original_entry = entry.*;
entry.* = .{
.key = key,
.value = value,
};
return original_entry;
}
} else {
if (Traits.equals(entry.key, key)) {
const original_entry = entry.*;
entry.* = .{
.key = key,
.value = value,
};
return original_entry;
}
}
hashed_key = (hashed_key +% 1) % hash_max;
}
}
}
pub fn calculate_load_factor(self: Self) f32 {
return if (self.entries.len == 0) 1 else @as(f32, @floatFromInt(self.count)) / @as(f32, @floatFromInt(self.entries.len));
}
pub fn clear(self: *Self) void {
for (self.entries) |*entry| {
entry.* = null;
}
self.count = 0;
}
pub fn deinit(self: *Self) void {
if (self.entries.len == 0) {
return;
}
self.allocator.deallocate(self.entries);
self.entries = &.{};
self.count = 0;
}
pub fn init(allocator: io.Allocator, traits: Traits) Self {
return .{
.allocator = allocator,
.count = 0,
.entries = &.{},
.traits = traits,
};
}
pub fn insert(self: *Self, key: Key, value: Value) io.AllocationError!bool {
try self.rehash(load_max);
debug.assert(self.entries.len > self.count);
const entry = Entry{
.key = key,
.value = value,
};
return entry.write_into(self);
}
fn keys_equal(self: Self, key_a: Key, key_b: Key) bool {
if (@sizeOf(Traits) == 0) {
return Traits.equals(key_a, key_b);
} else {
return self.traits.equals(key_a, key_b);
}
}
pub fn lookup(self: Self, key: Key) ?Value {
if (self.count == 0) {
return null;
}
const hash_max = @min(max_int, self.entries.len);
const has_context = @sizeOf(Traits) != 0;
var hashed_key = (if (has_context) self.traits.hash(key) else Traits.hash(key)) % hash_max;
var iterations = @as(usize, 0);
while (iterations < self.count) : (iterations += 1) {
const entry = &(self.entries[hashed_key] orelse return null);
if (has_context) {
if (self.traits.equals(entry.key, key)) {
return entry.value;
}
} else {
if (Traits.equals(entry.key, key)) {
return entry.value;
}
}
hashed_key = (hashed_key +% 1) % hash_max;
}
return null;
}
pub fn rehash(self: *Self, max_load: f32) io.AllocationError!void {
if (self.calculate_load_factor() <= max_load) {
return;
}
var table = init(self.allocator, self.traits);
errdefer table.deinit();
table.entries = allocate: {
const min_count = @max(1, self.count);
const table_size = min_count * 2;
break: allocate try io.allocate_many(self.allocator, table_size, @as(?Entry, null));
};
for (self.entries) |maybe_entry| {
if (maybe_entry) |entry| {
debug.assert(entry.write_into(&table));
}
}
self.deinit();
self.* = table;
}
};
}

View File

@ -1,62 +0,0 @@
const std = @import("std");
pub fn Int(comptime int: std.builtin.Type.Int) type {
return @Type(.{.Int = int});
}
pub fn clamp(value: anytype, lower: anytype, upper: anytype) @TypeOf(value, lower, upper) {
return @max(lower, @min(upper, value));
}
pub fn checked_add(a: anytype, b: anytype) ?@TypeOf(a + b) {
const result = @addWithOverflow(a, b);
return if (result.@"1" == 0) result.@"0" else null;
}
pub fn checked_mul(a: anytype, b: anytype) ?@TypeOf(a * b) {
const result = @mulWithOverflow(a, b);
return if (result.@"1" == 0) result.@"0" else null;
}
pub fn checked_sub(a: anytype, b: anytype) ?@TypeOf(a - b) {
const result = @subWithOverflow(a, b);
return if (result.@"1" == 0) result.@"0" else null;
}
pub fn clamped_cast(comptime dest_int: std.builtin.Type.Int, value: anytype) Int(dest_int) {
const Value = @TypeOf(value);
return switch (@typeInfo(Value)) {
.Int => |int| switch (int.signedness) {
.signed => @intCast(clamp(value, min_int(dest_int), max_int(dest_int))),
.unsigned => @intCast(@min(value, max_int(dest_int))),
},
.Float => @intFromFloat(clamp(value, min_int(dest_int), max_int(dest_int))),
else => @compileError("`" ++ @typeName(Value) ++ "` cannot be cast to an int"),
};
}
pub fn max_int(comptime int: std.builtin.Type.Int) comptime_int {
const bit_count = int.bits;
if (bit_count == 0) return 0;
return (1 << (bit_count - @intFromBool(int.signedness == .signed))) - 1;
}
pub fn min_int(comptime int: std.builtin.Type.Int) comptime_int {
if (int.signedness == .unsigned) {
return 0;
}
const bit_count = int.bits;
if (bit_count == 0) return 0;
return -(1 << (bit_count - 1));
}

View File

@ -1,318 +0,0 @@
const debug = @import("./debug.zig");
const io = @import("./io.zig");
const math = @import("./math.zig");
pub const DecimalFormat = struct {
delimiter: []const io.Byte,
positive_prefix: enum {none, plus, space},
pub const default = DecimalFormat{
.delimiter = "",
.positive_prefix = .none,
};
pub fn parse(self: DecimalFormat, utf8: []const io.Byte, comptime Decimal: type) ?Decimal {
if (utf8.len == 0) {
return null;
}
switch (@typeInfo(Decimal)) {
.Int => |int| {
var has_sign = switch (utf8[0]) {
'-', '+', ' ' => true,
else => false,
};
var result = @as(Decimal, 0);
for (@intFromBool(has_sign) .. utf8.len) |index| {
const radix = 10;
const code = utf8[index];
switch (code) {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
const offset_code = math.checked_sub(code, '0') orelse return null;
result = math.checked_mul(result, radix) orelse return null;
result = math.checked_add(result, offset_code) orelse return null;
},
else => {
if (self.delimiter.len == 0 or !io.are_equal(self.delimiter, utf8[index ..])) {
return null;
}
},
}
}
switch (int.signedness) {
.signed => {
return result * @as(Decimal, if (has_sign and utf8[0] == '-') -1 else 1);
},
.unsigned => {
if (has_sign and utf8[0] == '-') {
return null;
}
return result;
},
}
},
.Float => {
var has_sign = switch (utf8[0]) {
'-', '+', ' ' => true,
else => false,
};
// "-"
if (has_sign and utf8.len == 1) {
return null;
}
const sign_offset = @intFromBool(has_sign);
var has_decimal = utf8[sign_offset] == '.';
// "-."
if (has_decimal and (utf8.len == 2)) {
return null;
}
var result = @as(Decimal, 0);
var factor = @as(Decimal, if (has_sign and utf8[0] == '-') -1 else 1);
for (utf8[sign_offset + @intFromBool(has_decimal) .. utf8.len]) |code| {
switch (code) {
'.' => {
if (has_decimal) {
return null;
}
has_decimal = true;
},
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
if (has_decimal) {
factor /= 10.0;
}
result = ((result * 10.0) + @as(Decimal, @floatFromInt(code - '0')));
},
else => return null,
}
}
return result * factor;
},
else => @compileError("`" ++ @typeName(Decimal) ++ "` cannot be formatted as a decimal string"),
}
}
pub fn print(self: DecimalFormat, writer: io.Writer, value: anytype) ?usize {
if (value == 0) {
return print_string(writer, switch (self.positive_prefix) {
.none => "0",
.plus => "+0",
.space => " 0",
});
}
const ValueType = @TypeOf(value);
switch (@typeInfo(ValueType)) {
.Int => |int| {
const radix = 10;
var buffer = [_]u8{0} ** (1 + @max(int.bits, 1));
var buffer_start = buffer.len - 1;
{
var decomposable_value = value;
while (decomposable_value != 0) : (buffer_start -= 1) {
buffer[buffer_start] = @intCast(@mod(decomposable_value, radix) + '0');
decomposable_value = @divTrunc(decomposable_value, radix);
}
}
if (int.signedness == .unsigned and value < 0) {
buffer[buffer_start] = '-';
} else {
switch (self.positive_prefix) {
.none => buffer_start += 1,
.plus => buffer[buffer_start] = '+',
.space => buffer[buffer_start] = ' ',
}
}
return print_string(writer, buffer[buffer_start ..]);
},
.Float => |float| {
var length = @as(usize, 0);
if (value < 0) {
length += print_string(writer, "-") orelse return null;
}
const Float = @TypeOf(value);
const Int = math.Int(.{
.bits = float.bits,
.signedness = .unsigned,
});
const integer = @as(Int, @intFromFloat(value));
length += self.print(writer, integer) orelse return null;
length += print_string(writer, ".") orelse return null;
length += self.print(writer, @as(Int, @intFromFloat((value - @as(Float, @floatFromInt(integer))) * 100))) orelse return null;
return length;
},
else => @compileError(unformattableMessage(ValueType)),
}
}
};
pub const HexadecimalFormat = struct {
delimiter: []const u8 = "",
positive_prefix: enum {none, plus, space} = .none,
casing: enum {lower, upper} = .lower,
const default = HexadecimalFormat{
.delimiter = "",
.positive_prefix = .none,
.casing = .lower,
};
pub fn print(self: HexadecimalFormat, writer: io.Writer, value: anytype) ?usize {
// TODO: Implement.
_ = self;
_ = writer;
_ = value;
unreachable;
}
};
pub fn alloc_formatted(allocator: io.Allocator, comptime format: []const u8, args: anytype) io.AllocationError![]io.Byte {
const formatted_len = print_formatted(io.null_writer, format, args);
debug.assert(formatted_len != null);
const allocation = try allocator.reallocate(null, formatted_len.?);
errdefer allocator.deallocate(allocation);
{
var fixed_buffer = io.FixedBuffer{.bytes = allocation};
debug.assert(print_formatted(fixed_buffer.as_writer(), format, args) == formatted_len);
}
return allocation;
}
pub fn print_string(writer: io.Writer, utf8: []const u8) ?usize {
return writer.invoke(utf8);
}
pub fn print_formatted(writer: io.Writer, comptime format: []const u8, args: anytype) ?usize {
var length = @as(usize, 0);
switch (@typeInfo(@TypeOf(args))) {
.Struct => |arguments_struct| {
comptime var arg_index = 0;
comptime var head = 0;
comptime var tail = 0;
inline while (tail < format.len) : (tail += 1) {
if (format[tail] == '{') {
if (tail > format.len) {
@compileError("expected an idenifier after opening `{`");
}
tail += 1;
switch (format[tail]) {
'{' => {
length += print_string(writer, format[head .. (tail - 1)]) orelse return null;
tail += 1;
head = tail;
},
'}' => {
if (!arguments_struct.is_tuple) {
@compileError("all format specifiers must be named when using a named struct");
}
length += print_string(writer, args[arg_index]) orelse return null;
arg_index += 1;
tail += 1;
head = tail;
},
else => {
if (arguments_struct.is_tuple) {
@compileError("format specifiers cannot be named when using a tuple struct");
}
length += print_string(writer, format[head .. (tail - 1)]) orelse return null;
head = tail;
tail += 1;
if (tail >= format.len) {
@compileError("expected closing `}` or another `{` after opening `{`");
}
debug.assert(tail < format.len);
inline while (format[tail] != '}') {
tail += 1;
debug.assert(tail < format.len);
}
length += print_value(writer, @field(args, format[head .. tail])) orelse return null;
tail += 1;
head = tail;
}
}
}
}
length += print_string(writer, format[head .. ]) orelse return null;
},
else => @compileError("`arguments` must be a struct type"),
}
return length;
}
noinline fn print_value(writer: io.Writer, value: anytype) ?usize {
const Value = @TypeOf(value);
return switch (@typeInfo(Value)) {
.Int => DecimalFormat.default.print(writer, value),
.Float => DecimalFormat.default.print(writer, value),
.Pointer => |pointer| switch (pointer.size) {
.Many, .C => HexadecimalFormat.default.print(writer, @intFromPtr(value)),
.One => if (pointer.child == []const u8) print_string(writer, *value) else HexadecimalFormat.default.print(writer, @intFromPtr(value)),
.Slice => if (pointer.child == u8) print_string(writer, value) else @compileError(unformattableMessage(Value)),
},
else => @compileError(unformattableMessage(Value)),
};
}
fn unformattableMessage(comptime Value: type) []const u8 {
return "type `" ++ @typeName(Value) ++ "` is not formattable with this formatter";
}

View File

@ -1,101 +0,0 @@
const coral = @import("coral");
const ext = @import("./ext.zig");
const file = @import("./file.zig");
const kym = @import("./kym.zig");
pub const Manifest = struct {
title: [255:0]coral.io.Byte = [_:0]coral.io.Byte{0} ** 255,
width: u16 = 640,
height: u16 = 480,
tick_rate: f32 = 60.0,
pub fn load(self: *Manifest, env: *kym.RuntimeEnv) kym.RuntimeError!void {
const manifest = (try env.import(file.Path.from(&.{"app.ona"}))).pop() orelse return;
defer env.release(manifest);
const width = @as(u16, get: {
if (try kym.get_field(env, manifest, "width")) |ref| {
defer env.release(ref);
const fixed = try env.expect_fixed(ref);
if (fixed > 0 and fixed < coral.math.max_int(@typeInfo(@TypeOf(self.width)).Int)) {
break: get @intCast(fixed);
}
}
break: get self.width;
});
const height = @as(u16, get: {
if (try kym.get_field(env, manifest, "height")) |ref| {
defer env.release(ref);
const fixed = try env.expect_fixed(ref);
if (fixed > 0 and fixed < coral.math.max_int(@typeInfo(@TypeOf(self.height)).Int)) {
break: get @intCast(fixed);
}
}
break: get self.height;
});
const tick_rate = @as(f32, get: {
if (try kym.get_field(env, manifest, "tick_rate")) |ref| {
defer env.release(ref);
break: get @floatCast(try env.expect_float(ref));
}
break: get self.tick_rate;
});
if (try kym.get_field(env, manifest, "title")) |ref| {
defer env.release(ref);
const title_string = try env.expect_string(ref);
const limited_title_len = @min(title_string.len, self.title.len);
coral.io.copy(&self.title, title_string[0 .. limited_title_len]);
coral.io.zero(self.title[limited_title_len .. self.title.len]);
} else {
coral.io.zero(&self.title);
}
self.tick_rate = tick_rate;
self.width = width;
self.height = height;
}
};
pub fn log_info(message: []const coral.io.Byte) void {
ext.SDL_LogInfo(
ext.SDL_LOG_CATEGORY_APPLICATION,
"%.*s",
coral.math.clamped_cast(@typeInfo(c_int).Int, message.len),
message.ptr,
);
}
pub fn log_warn(message: []const coral.io.Byte) void {
ext.SDL_LogWarn(
ext.SDL_LOG_CATEGORY_APPLICATION,
"%.*s",
coral.math.clamped_cast(@typeInfo(c_int).Int, message.len),
message.ptr,
);
}
pub fn log_fail(message: []const coral.io.Byte) void {
ext.SDL_LogError(
ext.SDL_LOG_CATEGORY_APPLICATION,
"%.*s",
coral.math.clamped_cast(@typeInfo(c_int).Int, message.len),
message.ptr,
);
}

View File

@ -1,194 +0,0 @@
const coral = @import("coral");
const ext = @import("./ext.zig");
pub const Access = union (enum) {
null,
sandboxed_path: *const Path,
pub fn open_readable(self: Access, readable_path: Path) ?*Readable {
switch (self) {
.null => return null,
.sandboxed_path => |sandboxed_path| {
const path_string = sandboxed_path.joined(readable_path).get_string();
return @ptrCast(ext.SDL_RWFromFile(path_string.ptr, "rb"));
},
}
}
pub fn query(self: Access, path: Path) ?Info {
switch (self) {
.null => return null,
.sandboxed_path => |sandboxed_path| {
const path_string = sandboxed_path.joined(path).get_string();
const rw_ops = ext.SDL_RWFromFile(path_string, "rb") orelse return null;
const file_size = ext.SDL_RWseek(rw_ops, 0, ext.RW_SEEK_END);
if (ext.SDL_RWclose(rw_ops) != 0 or file_size < 0) {
return null;
}
return Info{
.size = @intCast(file_size),
};
},
}
}
};
pub const Info = struct {
size: u64,
};
pub const Path = extern struct {
data: [4096]coral.io.Byte = [_]coral.io.Byte{0} ** 4096,
pub const cwd = Path.from(&.{"./"});
pub fn from(components: []const []const u8) Path {
// TODO: Implement proper parsing / removal of duplicate path delimiters.
var path = Path{};
{
var writable_slice = coral.io.FixedBuffer{.bytes = &path.data};
for (components) |component| {
if (writable_slice.write(component) != component.len) {
break;
}
}
}
return path;
}
pub fn joined(self: Path, other: Path) Path {
var path = Path{};
{
var writable = coral.io.FixedBuffer{.bytes = &path.data};
var written = @as(usize, 0);
for (&self.data) |byte| {
if ((byte == 0) or !(writable.put(byte))) {
break;
}
written += 1;
}
if ((written > 0) and (path.data[written - 1] != '/') and writable.put('/')) {
written += 1;
}
for (&other.data) |byte| {
if ((byte == 0) or !(writable.put(byte))) {
break;
}
written += 1;
}
}
return path;
}
pub fn get_string(self: Path) [:0]const coral.io.Byte {
coral.debug.assert(self.data[self.data.len - 1] == 0);
return coral.io.slice_sentineled(@as(coral.io.Byte, 0), @as([*:0]const coral.io.Byte, @ptrCast(&self.data)));
}
};
pub const Readable = opaque {
pub fn as_reader(self: *Readable) coral.io.Reader {
return coral.io.Reader.bind(Readable, self, read_into);
}
pub fn close(self: *Readable) void {
if (ext.SDL_RWclose(rw_ops_cast(self)) != 0) {
@panic("Failed to close file");
}
}
pub fn read_into(self: *Readable, buffer: []coral.io.Byte) ?usize {
ext.SDL_ClearError();
const bytes_read = ext.SDL_RWread(rw_ops_cast(self), buffer.ptr, @sizeOf(coral.io.Byte), buffer.len);
const error_message = ext.SDL_GetError();
if (bytes_read == 0 and error_message != null and error_message.* != 0) {
return null;
}
return bytes_read;
}
pub fn seek_head(self: *Readable, cursor: u64) ?u64 {
// TODO: Fix safety of int cast.
const byte_offset = ext.SDL_RWseek(rw_ops_cast(self), @intCast(cursor), ext.RW_SEEK_SET);
if (byte_offset < 0) {
return null;
}
return @intCast(byte_offset);
}
pub fn seek_tail(self: *Readable) ?usize {
const byte_offset = ext.SDL_RWseek(rw_ops_cast(self), 0, ext.RW_SEEK_END);
if (byte_offset < 0) {
return error.FileUnavailable;
}
return @intCast(byte_offset);
}
pub fn skip(self: *Readable, offset: i64) ?u64 {
const byte_offset = ext.SDL_RWseek(rw_ops_cast(self), offset, ext.RW_SEEK_CUR);
if (byte_offset < 0) {
return error.FileUnavailable;
}
return @intCast(byte_offset);
}
};
pub fn allocate_and_load(allocator: coral.io.Allocator, access: Access, path: Path) coral.io.AllocationError!?[]coral.io.Byte {
const allocation = try allocator.reallocate(null, query_file_size: {
const info = access.query(path) orelse return null;
break: query_file_size info.size;
});
const readable = access.open_readable(path) orelse {
allocator.deallocate(allocation);
return null;
};
defer _ = readable.close();
const bytes_read = readable.read_into(allocation) orelse {
allocator.deallocate(allocation);
return null;
};
if (bytes_read != allocation.len) {
allocator.deallocate(allocation);
return null;
}
return allocation;
}
fn rw_ops_cast(ptr: *anyopaque) *ext.SDL_RWops {
return @ptrCast(@alignCast(ptr));
}

View File

@ -1,2 +0,0 @@
pub const lina = @import("./gfx/lina.zig");

View File

@ -1,181 +0,0 @@
pub const Vector2 = struct {
x: f32,
y: f32,
pub const Scalars = [2]f32;
pub fn equals(self: Vector2, vector: Vector2) bool {
return self.x == vector.x and self.y == vector.y;
}
pub fn from_scalar(scalar: f32) Vector2 {
return .{
.x = scalar,
.y = scalar,
};
}
pub fn from_scalars(scalars: Scalars) Vector2 {
return .{
.x = scalars[0],
.y = scalars[1],
};
}
pub fn scalar_added(self: Vector2, scalar: f32) Vector2 {
return .{
.x = self.x + scalar,
.y = self.y + scalar,
};
}
pub fn scalar_divided(self: Vector2, scalar: f32) Vector2 {
return .{
.x = self.x / scalar,
.y = self.y / scalar,
};
}
pub fn scalar_multiplied(self: Vector2, scalar: f32) Vector2 {
return .{
.x = self.x * scalar,
.y = self.y * scalar,
};
}
pub fn to_scalars(self: Vector2) Scalars {
return .{self.x, self.y};
}
pub fn scalar_subtracted(self: Vector2, scalar: f32) Vector2 {
return .{
.x = self.x - scalar,
.y = self.y - scalar,
};
}
pub fn vector_added(self: Vector2, vector: Vector2) Vector2 {
return .{
.x = self.x + vector.x,
.y = self.y + vector.y,
};
}
pub fn vector_divided(self: Vector2, vector: Vector2) Vector2 {
return .{
.x = self.x / vector.x,
.y = self.y / vector.y,
};
}
pub fn vector_multiplied(self: Vector2, vector: Vector2) Vector2 {
return .{
.x = self.x * vector.x,
.y = self.y * vector.y,
};
}
pub fn vector_subtracted(self: Vector2, vector: Vector2) Vector2 {
return .{
.x = self.x - vector.x,
.y = self.y - vector.y,
};
}
};
pub const Vector3 = struct {
x: f32,
y: f32,
z: f32,
pub const Scalars = [3]f32;
pub fn equals(self: Vector3, vector: Vector3) bool {
return self.x == vector.x and self.y == vector.y and self.z == vector.z;
}
pub fn from_scalar(scalar: f32) Vector3 {
return .{
.x = scalar,
.y = scalar,
.z = scalar,
};
}
pub fn from_scalars(scalars: Scalars) Vector3 {
return .{
.x = scalars[0],
.y = scalars[1],
.z = scalars[2],
};
}
pub fn scalar_added(self: Vector3, scalar: f32) Vector3 {
return .{
.x = self.x + scalar,
.y = self.y + scalar,
.z = self.z + scalar,
};
}
pub fn scalar_divided(self: Vector3, scalar: f32) Vector3 {
return .{
.x = self.x / scalar,
.y = self.y / scalar,
.z = self.z / scalar,
};
}
pub fn scalar_multiplied(self: Vector3, scalar: f32) Vector3 {
return .{
.x = self.x * scalar,
.y = self.y * scalar,
.z = self.z * scalar,
};
}
pub fn scalar_subtracted(self: Vector3, scalar: f32) Vector3 {
return .{
.x = self.x - scalar,
.y = self.y - scalar,
.z = self.z - scalar,
};
}
pub fn to_scalars(self: Vector3) Scalars {
return .{self.x, self.y, self.z};
}
pub fn vector_added(self: Vector3, other: Vector3) Vector3 {
return .{
.x = self.x + other.x,
.y = self.y + other.y,
.z = self.z + other.z,
};
}
pub fn vector_divided(self: Vector3, other: Vector3) Vector3 {
return .{
.x = self.x / other.x,
.y = self.y / other.y,
.z = self.z / other.z,
};
}
pub fn vector_multiplied(self: Vector3, other: Vector3) Vector3 {
return .{
.x = self.x * other.x,
.y = self.y * other.y,
.z = self.z * other.z,
};
}
pub fn vector_subtracted(self: Vector3, other: Vector3) Vector3 {
return .{
.x = self.x - other.x,
.y = self.y - other.y,
.z = self.z - other.z,
};
}
};

View File

@ -1,171 +0,0 @@
const builtin = @import("builtin");
const coral = @import("coral");
const ext = @import("./ext.zig");
const std = @import("std");
const AllocationNode = struct {
trace: std.debug.ConfigurableTrace(2, 4, switch (builtin.mode) {
.Debug, .ReleaseSafe => true,
.ReleaseFast, .ReleaseSmall => false,
}),
next: ?*AllocationNode,
size: usize,
fn alloc(size: usize, return_address: usize) *AllocationNode {
const node = @as(*AllocationNode, @ptrCast(@alignCast(ext.SDL_malloc(@sizeOf(AllocationNode) + size))));
node.* = .{
.size = size,
.next = null,
.trace = .{},
};
node.trace.addAddr(return_address, "");
return node;
}
fn dealloc(self: *AllocationNode) void {
ext.SDL_free(self);
}
fn realloc(self: *AllocationNode, size: usize, return_address: usize) *AllocationNode {
const node = @as(*AllocationNode, @ptrCast(@alignCast(ext.SDL_realloc(self, @sizeOf(AllocationNode) + size))));
node.* = .{
.size = size,
.next = null,
.trace = .{},
};
node.trace.addAddr(return_address, "");
return node;
}
fn owns_userdata(self: *AllocationNode, other_userdata: []const coral.io.Byte) bool {
const self_userdata = self.userdata();
return self_userdata.ptr == other_userdata.ptr and self_userdata.len == other_userdata.len;
}
fn userdata(self: *AllocationNode) []coral.io.Byte {
return @as([*]coral.io.Byte, @ptrFromInt(@intFromPtr(self) + @sizeOf(AllocationNode)))[0 .. self.size];
}
};
const Context = struct {
head: ?*AllocationNode = null,
fn deallocate(_: *Context, allocation: []coral.io.Byte) void {
// switch (builtin.mode) {
// .Debug, .ReleaseSafe => {
// const panic_message = "incorrect allocation address for deallocating";
// var current_node = self.head orelse @panic(panic_message);
// if (current_node.owns_userdata(allocation)) {
// self.head = current_node.next;
// return current_node.dealloc();
// }
// while (true) {
// const next_node = current_node.next orelse @panic(panic_message);
// if (next_node.owns_userdata(allocation)) {
// current_node.next = next_node.next;
// return next_node.dealloc();
// }
// current_node = next_node;
// }
// },
// .ReleaseFast, .ReleaseSmall => {
ext.SDL_free(allocation.ptr);
// },
// }
}
fn reallocate(_: *Context, _: usize, existing_allocation: ?[]coral.io.Byte, size: usize) coral.io.AllocationError![]coral.io.Byte {
// switch (builtin.mode) {
// .Debug, .ReleaseSafe => {
// if (existing_allocation) |allocation| {
// const panic_message = "incorrect allocation address for reallocating";
// var current_node = self.head orelse @panic(panic_message);
// if (current_node.owns_userdata(allocation)) {
// const node = current_node.realloc(size, return_address);
// self.head = node;
// return node.userdata();
// }
// while (true) {
// const next_node = current_node.next orelse @panic(panic_message);
// if (next_node.owns_userdata(allocation)) {
// const node = next_node.realloc(size, return_address);
// current_node.next = node;
// return node.userdata();
// }
// current_node = next_node;
// }
// } else {
// const node = AllocationNode.alloc(size, return_address);
// if (self.head) |head| {
// node.next = head;
// }
// self.head = node;
// return node.userdata();
// }
// },
// .ReleaseFast, .ReleaseSmall => {
if (existing_allocation) |allocation | {
return @as([*]coral.io.Byte, @ptrCast(ext.SDL_realloc(allocation.ptr, size) orelse return error.OutOfMemory))[0 .. size];
}
return @as([*]u8, @ptrCast(ext.SDL_malloc(size) orelse return error.OutOfMemory))[0 .. size];
// },
// }
}
};
var context = Context{};
pub const allocator = coral.io.Allocator.bind(Context, &context, .{
.reallocate = Context.reallocate,
.deallocate = Context.deallocate,
});
pub fn trace_leaks() void {
switch (builtin.mode) {
.Debug, .ReleaseSafe => {
var current_node = context.head;
while (current_node) |node| : (current_node = node.next) {
std.debug.print("{d} byte leak at 0x{x} detected", .{
node.size,
@intFromPtr(node) + @sizeOf(AllocationNode),
});
node.trace.dump();
}
},
.ReleaseFast, .ReleaseSmall => {},
}
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -1,128 +0,0 @@
const coral = @import("coral");
const kym = @import("../kym.zig");
associative: RefTable,
contiguous: RefList,
const RefList = coral.list.Stack(?*kym.RuntimeObj);
const RefTable = coral.map.Table(*kym.RuntimeObj, *kym.RuntimeObj, struct {
pub const hash = kym.RuntimeObj.hash;
pub const equals = kym.RuntimeObj.equals;
});
const Self = @This();
pub fn deinit(self: *Self, env: *kym.RuntimeEnv) void {
{
var field_iterable = self.associative.as_iterable();
while (field_iterable.next()) |entry| {
env.release(entry.key);
env.release(entry.value);
}
}
self.associative.deinit();
while (self.contiguous.pop()) |value| {
if (value) |ref| {
env.release(ref);
}
}
self.contiguous.deinit();
}
pub fn init(env: *kym.RuntimeEnv) Self {
return .{
.associative = RefTable.init(env.allocator, .{}),
.contiguous = RefList.init(env.allocator),
};
}
pub const typeinfo = &kym.Typeinfo{
.size = @sizeOf(Self),
.name = "table",
.destruct = typeinfo_destruct,
.get = typeinfo_get,
.set = typeinfo_set,
};
fn typeinfo_destruct(context: kym.Typeinfo.DestructContext) void {
@as(*Self, @ptrCast(@alignCast(context.userdata))).deinit(context.env);
}
fn typeinfo_get(context: kym.Typeinfo.GetContext) kym.RuntimeError!?*kym.RuntimeObj {
const table = @as(*Self, @ptrCast(@alignCast(context.userdata)));
const index = (try context.push_index()).pop().?;
defer context.env.release(index);
if (index.is_fixed()) |fixed| {
if (fixed < 0) {
// TODO: Negative indexing.
unreachable;
}
if (fixed < table.contiguous.values.len) {
return table.contiguous.values[@intCast(fixed)];
}
}
if (table.associative.lookup(index)) |value| {
return value;
}
return null;
}
fn typeinfo_set(context: kym.Typeinfo.SetContext) kym.RuntimeError!void {
const table = @as(*Self, @ptrCast(@alignCast(context.userdata)));
const index = (try context.push_index()).pop().?;
errdefer context.env.release(index);
if (index.is_fixed()) |fixed| {
if (fixed < 0) {
// TODO: Negative indexing.
unreachable;
}
if (fixed < table.contiguous.values.len) {
const maybe_replacing = &table.contiguous.values[@intCast(fixed)];
if (maybe_replacing.*) |replacing| {
context.env.release(replacing);
}
if ((try context.push_value()).pop()) |value| {
errdefer context.env.release(value);
maybe_replacing.* = value;
} else {
maybe_replacing.* = null;
}
return;
}
}
const value = (try context.push_value()).pop() orelse {
if (table.associative.remove(index)) |removed| {
context.env.release(removed.key);
context.env.release(removed.value);
}
return;
};
errdefer context.env.release(value);
if (try table.associative.replace(index, value)) |replaced| {
context.env.release(replaced.key);
context.env.release(replaced.value);
}
}

View File

@ -1,515 +0,0 @@
const coral = @import("coral");
pub const Line = struct {
number: u32,
};
pub const Token = union(enum) {
end,
unknown: coral.io.Byte,
newline,
identifier: []const coral.io.Byte,
builtin: []const coral.io.Byte,
symbol_plus,
symbol_minus,
symbol_asterisk,
symbol_forward_slash,
symbol_paren_left,
symbol_paren_right,
symbol_bang,
symbol_comma,
symbol_at,
symbol_brace_left,
symbol_brace_right,
symbol_bracket_left,
symbol_bracket_right,
symbol_period,
symbol_colon,
symbol_less_than,
symbol_less_equals,
symbol_greater_than,
symbol_greater_equals,
symbol_equals,
symbol_double_equals,
number: []const coral.io.Byte,
string: []const coral.io.Byte,
template_string: []const coral.io.Byte,
keyword_nil,
keyword_false,
keyword_true,
keyword_return,
keyword_self,
keyword_const,
keyword_if,
keyword_do,
keyword_end,
keyword_while,
keyword_else,
keyword_elif,
keyword_var,
keyword_let,
keyword_lambda,
pub fn text(self: Token) []const coral.io.Byte {
return switch (self) {
.end => "end",
.unknown => |unknown| @as([*]const coral.io.Byte, @ptrCast(&unknown))[0 .. 1],
.newline => "newline",
.identifier => |identifier| identifier,
.builtin => |identifier| identifier,
.symbol_plus => "+",
.symbol_minus => "-",
.symbol_asterisk => "*",
.symbol_forward_slash => "/",
.symbol_paren_left => "(",
.symbol_paren_right => ")",
.symbol_bang => "!",
.symbol_comma => ",",
.symbol_at => "@",
.symbol_brace_left => "{",
.symbol_brace_right => "}",
.symbol_bracket_left => "[",
.symbol_bracket_right => "]",
.symbol_period => ".",
.symbol_colon => ":",
.symbol_less_than => "<",
.symbol_less_equals => "<=",
.symbol_greater_than => ">",
.symbol_greater_equals => ">=",
.symbol_equals => "=",
.symbol_double_equals => "==",
.number => |literal| literal,
.string => |literal| literal,
.template_string => |literal| literal,
.keyword_const => "const",
.keyword_nil => "nil",
.keyword_false => "false",
.keyword_true => "true",
.keyword_return => "return",
.keyword_self => "self",
.keyword_if => "if",
.keyword_do => "do",
.keyword_end => "end",
.keyword_while => "while",
.keyword_elif => "elif",
.keyword_else => "else",
.keyword_var => "var",
.keyword_let => "let",
.keyword_lambda => "lambda",
};
}
};
pub const Stream = struct {
source: []const coral.io.Byte,
line: Line = .{.number = 1},
token: Token = .newline,
pub fn skip_newlines(self: *Stream) void {
self.step();
while (self.token == .newline) {
self.step();
}
}
pub fn step(self: *Stream) void {
var cursor = @as(usize, 0);
defer self.source = self.source[cursor ..];
while (cursor < self.source.len) {
switch (self.source[cursor]) {
'#' => {
cursor += 1;
while (cursor < self.source.len and self.source[cursor] != '\n') {
cursor += 1;
}
},
' ', '\t' => cursor += 1,
'\n' => {
cursor += 1;
self.token = .newline;
self.line.number += 1;
return;
},
'0' ... '9' => {
const begin = cursor;
cursor += 1;
while (cursor < self.source.len) switch (self.source[cursor]) {
'0' ... '9' => cursor += 1,
'.' => {
cursor += 1;
while (cursor < self.source.len) switch (self.source[cursor]) {
'0' ... '9' => cursor += 1,
else => break,
};
self.token = .{.number = self.source[begin .. cursor]};
return;
},
else => break,
};
self.token = .{.number = self.source[begin .. cursor]};
return;
},
'A' ... 'Z', 'a' ... 'z', '_' => {
const begin = cursor;
cursor += 1;
while (cursor < self.source.len) switch (self.source[cursor]) {
'0'...'9', 'A'...'Z', 'a'...'z', '_' => cursor += 1,
else => break,
};
const identifier = self.source[begin .. cursor];
coral.debug.assert(identifier.len != 0);
switch (identifier[0]) {
'c' => {
if (coral.io.are_equal(identifier[1 ..], "onst")) {
self.token = .keyword_const;
return;
}
},
'd' => {
if (coral.io.are_equal(identifier[1 ..], "o")) {
self.token = .keyword_do;
return;
}
},
'e' => {
if (coral.io.are_equal(identifier[1 ..], "lse")) {
self.token = .keyword_else;
return;
}
if (coral.io.are_equal(identifier[1 ..], "lif")) {
self.token = .keyword_elif;
return;
}
if (coral.io.are_equal(identifier[1 ..], "nd")) {
self.token = .keyword_end;
return;
}
},
'f' => {
if (coral.io.are_equal(identifier[1 ..], "alse")) {
self.token = .keyword_false;
return;
}
},
'i' => {
if (coral.io.are_equal(identifier[1 ..], "f")) {
self.token = .keyword_if;
return;
}
},
'l' => {
if (coral.io.are_equal(identifier[1 ..], "ambda")) {
self.token = .keyword_lambda;
return;
}
if (coral.io.are_equal(identifier[1 ..], "et")) {
self.token = .keyword_let;
return;
}
},
'n' => {
if (coral.io.are_equal(identifier[1 ..], "il")) {
self.token = .keyword_nil;
return;
}
},
'r' => {
if (coral.io.are_equal(identifier[1 ..], "eturn")) {
self.token = .keyword_return;
return;
}
},
's' => {
if (coral.io.are_equal(identifier[1 ..], "elf")) {
self.token = .keyword_self;
return;
}
},
't' => {
if (coral.io.are_equal(identifier[1 ..], "rue")) {
self.token = .keyword_true;
return;
}
},
'v' => {
if (coral.io.are_equal(identifier[1 ..], "ar")) {
self.token = .keyword_var;
return;
}
},
'w' => {
if (coral.io.are_equal(identifier[1 ..], "hile")) {
self.token = .keyword_while;
return;
}
},
else => {},
}
self.token = .{.identifier = identifier};
return;
},
'`' => {
cursor += 1;
const begin = cursor;
while (cursor < self.source.len) switch (self.source[cursor]) {
'`' => break,
else => cursor += 1,
};
self.token = .{.template_string = self.source[begin .. cursor]};
cursor += 1;
return;
},
'"' => {
cursor += 1;
const begin = cursor;
while (cursor < self.source.len) switch (self.source[cursor]) {
'"' => break,
else => cursor += 1,
};
self.token = .{.string = self.source[begin .. cursor]};
cursor += 1;
return;
},
'{' => {
self.token = .symbol_brace_left;
cursor += 1;
return;
},
'}' => {
self.token = .symbol_brace_right;
cursor += 1;
return;
},
'[' => {
self.token = .symbol_bracket_left;
cursor += 1;
return;
},
']' => {
self.token = .symbol_bracket_right;
cursor += 1;
return;
},
',' => {
self.token = .symbol_comma;
cursor += 1;
return;
},
'!' => {
self.token = .symbol_bang;
cursor += 1;
return;
},
')' => {
self.token = .symbol_paren_right;
cursor += 1;
return;
},
'(' => {
self.token = .symbol_paren_left;
cursor += 1;
return;
},
'/' => {
self.token = .symbol_forward_slash;
cursor += 1;
return;
},
'*' => {
self.token = .symbol_asterisk;
cursor += 1;
return;
},
'-' => {
self.token = .symbol_minus;
cursor += 1;
return;
},
'+' => {
self.token = .symbol_plus;
cursor += 1;
return;
},
':' => {
self.token = .symbol_colon;
cursor += 1;
return;
},
'=' => {
cursor += 1;
if (cursor < self.source.len) {
switch (self.source[cursor]) {
'=' => {
cursor += 1;
self.token = .symbol_double_equals;
return;
},
else => {},
}
}
self.token = .symbol_equals;
return;
},
'<' => {
cursor += 1;
if (cursor < self.source.len and (self.source[cursor] == '=')) {
cursor += 1;
self.token = .symbol_less_equals;
return;
}
self.token = .symbol_less_than;
return;
},
'>' => {
cursor += 1;
if (cursor < self.source.len and (self.source[cursor] == '=')) {
cursor += 1;
self.token = .symbol_greater_equals;
return;
}
self.token = .symbol_greater_than;
return;
},
'.' => {
self.token = .symbol_period;
cursor += 1;
return;
},
'@' => {
self.token = .symbol_at;
cursor += 1;
return;
},
else => {
self.token = .{.unknown = self.source[cursor]};
cursor += 1;
return;
},
}
}
self.token = .end;
return;
}
};

View File

@ -1,226 +0,0 @@
pub const Expr = @import("./tree/Expr.zig");
pub const Stmt = @import("./tree/Stmt.zig");
const coral = @import("coral");
const tokens = @import("./tokens.zig");
pub const Declaration = struct {
identifier: []const coral.io.Byte,
is: packed struct {
readonly: bool = false,
captured: bool = false,
} = .{},
};
pub const Environment = struct {
captures: [capture_max]Capture = [_]Capture{.{.declaration_index = 0}} ** capture_max,
capture_count: u8 = 0,
declarations: [declaration_max]Declaration = [_]Declaration{.{.identifier = ""}} ** declaration_max,
declaration_count: u8 = 0,
argument_count: u8 = 0,
statement: ?*const Stmt = null,
enclosing: ?*Environment = null,
pub const Capture = union (enum) {
declaration_index: u8,
capture_index: u8,
};
pub const DeclareError = coral.io.AllocationError || error {
DeclarationExists,
};
const capture_max = coral.math.max_int(@typeInfo(u8).Int);
const declaration_max = coral.math.max_int(@typeInfo(u8).Int);
pub fn create_enclosed(self: *Environment, root: *Root) coral.io.AllocationError!*Environment {
return coral.io.allocate_one(root.arena.as_allocator(), Environment{
.enclosing = self,
});
}
fn declare(self: *Environment, declaration: Declaration) DeclareError!*const Declaration {
if (self.declaration_count == self.declarations.len) {
return error.OutOfMemory;
}
{
var environment = self;
while (true) {
var remaining_count = environment.declaration_count;
while (remaining_count != 0) {
remaining_count -= 1;
if (coral.io.are_equal(environment.declarations[remaining_count].identifier, declaration.identifier)) {
return error.DeclarationExists;
}
}
environment = environment.enclosing orelse break;
}
}
const declaration_slot = &self.declarations[self.declaration_count];
declaration_slot.* = declaration;
self.declaration_count += 1;
return declaration_slot;
}
pub fn declare_argument(self: *Environment, identifier: []const coral.io.Byte) DeclareError!*const Declaration {
coral.debug.assert(self.declaration_count <= self.argument_count);
defer self.argument_count += 1;
return self.declare(.{
.identifier = identifier,
.is = .{.readonly = true},
});
}
pub fn declare_constant(self: *Environment, identifier: []const coral.io.Byte) DeclareError!*const Declaration {
return self.declare(.{
.identifier = identifier,
.is = .{.readonly = true},
});
}
pub fn declare_variable(self: *Environment, identifier: []const coral.io.Byte) DeclareError!*const Declaration {
return self.declare(.{.identifier = identifier});
}
pub fn resolve_declaration(self: *Environment, identifier: []const coral.io.Byte) coral.io.AllocationError!?*const Declaration {
var environment = self;
var ancestry = @as(u32, 0);
while (true) : (ancestry += 1) {
var remaining_count = environment.declaration_count;
while (remaining_count != 0) {
remaining_count -= 1;
const declaration = &environment.declarations[remaining_count];
if (coral.io.are_equal(declaration.identifier, identifier)) {
if (ancestry != 0) {
declaration.is.captured = true;
environment = self;
ancestry -= 1;
while (ancestry != 0) : (ancestry -= 1) {
if (environment.capture_count == environment.captures.len) {
return error.OutOfMemory;
}
coral.debug.assert(environment.enclosing != null);
const enclosing_environment = environment.enclosing.?;
environment.captures[environment.capture_count] = .{
.capture_index = enclosing_environment.capture_count
};
environment.capture_count += 1;
environment = enclosing_environment;
}
environment.captures[environment.capture_count] = .{.declaration_index = remaining_count};
environment.capture_count += 1;
}
return declaration;
}
}
environment = environment.enclosing orelse return null;
}
}
pub fn get_captures(self: *const Environment) []const Capture {
return self.captures[0 .. self.capture_count];
}
pub fn get_declarations(self: *const Environment) []const Declaration {
return self.declarations[0 .. self.declaration_count];
}
};
pub const ParseError = coral.io.AllocationError || error {
BadSyntax,
};
pub const Root = struct {
arena: coral.arena.Stacking,
environment: Environment,
error_messages: MessageList,
const MessageList = coral.list.Stack([]coral.io.Byte);
pub fn report_error(self: *Root, line: tokens.Line, comptime format: []const u8, args: anytype) ParseError {
const allocator = self.arena.as_allocator();
try self.error_messages.push_one(try coral.utf8.alloc_formatted(allocator, "{line_number}: {message}", .{
.message = try coral.utf8.alloc_formatted(allocator, format, args),
.line_number = line.number,
}));
return error.BadSyntax;
}
pub fn report_declare_error(self: *Root, line: tokens.Line, identifier: []const coral.io.Byte, @"error": Environment.DeclareError) ParseError {
return switch (@"error") {
error.OutOfMemory => error.OutOfMemory,
error.DeclarationExists => self.report_error(line, "declaration `{identifier}` already exists", .{
.identifier = identifier,
}),
};
}
pub fn create_expr(self: *Root, expr: Expr) coral.io.AllocationError!*Expr {
return coral.io.allocate_one(self.arena.as_allocator(), expr);
}
pub fn create_stmt(self: *Root, stmt: Stmt) coral.io.AllocationError!*Stmt {
return coral.io.allocate_one(self.arena.as_allocator(), stmt);
}
pub fn deinit(self: *Root) void {
self.error_messages.deinit();
self.arena.deinit();
}
pub fn init(allocator: coral.io.Allocator) coral.io.AllocationError!Root {
const arena_page_size = 4096;
return .{
.arena = coral.arena.Stacking.init(allocator, arena_page_size),
.error_messages = MessageList.init(allocator),
.environment = .{},
};
}
pub fn parse(self: *Root, stream: *tokens.Stream) ParseError!void {
stream.skip_newlines();
const first_statement = try Stmt.parse(self, stream, &self.environment);
var current_statement = first_statement;
while (stream.token != .end) {
const next_statement = try Stmt.parse(self, stream, &self.environment);
current_statement.next = next_statement;
current_statement = next_statement;
}
self.environment.statement = first_statement;
}
};

View File

@ -1,789 +0,0 @@
const Stmt = @import("./Stmt.zig");
const coral = @import("coral");
const tokens = @import("../tokens.zig");
const tree = @import("../tree.zig");
next: ?*const Self = null,
line: tokens.Line,
kind: union (enum) {
nil_literal,
true_literal,
false_literal,
number_literal: []const coral.io.Byte,
string_literal: []const coral.io.Byte,
string_template,
symbol_literal: []const coral.io.Byte,
table_construct: TableConstruct,
key_value: KeyValue,
group: *Self,
lambda_construct: LambdaConstruct,
declaration_get: DeclarationGet,
declaration_set: DeclarationSet,
field_get: FieldGet,
field_set: FieldSet,
subscript_get: SubscriptGet,
subscript_set: SubscriptSet,
binary_op: BinaryOp,
unary_op: UnaryOp,
invoke: Invoke,
import_builtin,
print_builtin,
vec2_builtin,
vec3_builtin,
},
pub const BinaryOp = struct {
rhs_operand: *Self,
lhs_operand: *Self,
operation: Operation,
pub const Operation = enum {
addition,
subtraction,
multiplication,
divsion,
equals_comparison,
greater_than_comparison,
greater_equals_comparison,
less_than_comparison,
less_equals_comparison,
};
fn parser(comptime parse_next: Parser, comptime operations: []const BinaryOp.Operation) Parser {
const BinaryOpParser = struct {
fn parse(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
var expression = try parse_next(root, stream, environment);
inline for (operations) |operation| {
const token = comptime @as(tokens.Token, switch (operation) {
.addition => .symbol_plus,
.subtraction => .symbol_minus,
.multiplication => .symbol_asterisk,
.divsion => .symbol_forward_slash,
.equals_comparison => .symbol_double_equals,
.greater_than_comparison => .symbol_greater_than,
.greater_equals_comparison => .symbol_greater_equals,
.less_than_comparison => .symbol_less_than,
.less_equals_comparison => .symbol_less_equals,
});
if (stream.token == coral.io.tag_of(token)) {
stream.step();
if (stream.token == .end) {
return root.report_error(stream.line, "expected other half of expression after `" ++ comptime token.text() ++ "`", .{});
}
// TODO: Remove once Zig has fixed struct self-reassignment.
const unnecessary_temp = expression;
expression = try root.create_expr(.{
.line = stream.line,
.kind = .{
.binary_op = .{
.rhs_operand = try parse_next(root, stream, environment),
.operation = operation,
.lhs_operand = unnecessary_temp,
},
},
});
}
}
return expression;
}
};
return BinaryOpParser.parse;
}
};
pub const DeclarationGet = struct {
declaration: *const tree.Declaration,
};
pub const DeclarationSet = struct {
declaration: *const tree.Declaration,
assign: *const Self,
};
pub const FieldGet = struct {
identifier: []const coral.io.Byte,
object: *const Self,
};
pub const FieldSet = struct {
identifier: []const coral.io.Byte,
object: *const Self,
assign: *const Self,
};
pub const Invoke = struct {
argument: ?*const Self,
object: *const Self,
};
pub const KeyValue = struct {
key: *const Self,
value: *const Self,
};
pub const LambdaConstruct = struct {
environment: *const tree.Environment,
};
const Parser = fn (root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self;
const Self = @This();
pub const SubscriptGet = struct {
index: *const Self,
object: *const Self,
};
pub const SubscriptSet = struct {
index: *const Self,
object: *const Self,
assign: *const Self,
};
pub const TableConstruct = struct {
entry: ?*const Self,
};
const TemplateToken = union (enum) {
invalid: []const coral.io.Byte,
literal: []const coral.io.Byte,
expression: []const coral.io.Byte,
fn extract(source: *[]const coral.io.Byte) ?TemplateToken {
var cursor = @as(usize, 0);
defer source.* = source.*[cursor ..];
while (cursor < source.len) {
switch (source.*[cursor]) {
'{' => {
cursor += 1;
while (true) : (cursor += 1) {
if (cursor == source.len) {
return .{.invalid = source.*[0 .. cursor]};
}
if (source.*[cursor] == '}') {
const token = TemplateToken{.expression = source.*[1 .. cursor]};
cursor += 1;
return token;
}
}
},
else => {
cursor += 1;
while (true) : (cursor += 1) {
if (cursor == source.len) {
return .{.literal = source.*[0 .. cursor]};
}
if (source.*[cursor] == '{') {
const cursor_next = cursor + 1;
if (cursor_next == source.len) {
return .{.invalid = source.*[0 .. cursor]};
}
if (source.*[cursor_next] == '{') {
cursor = cursor_next;
return .{.literal = source.*[0 .. cursor]};
}
return .{.literal = source.*[0 .. cursor]};
}
}
}
}
}
return null;
}
};
pub const UnaryOp = struct {
operand: *Self,
operation: Operation,
pub const Operation = enum {
numeric_negation,
boolean_negation,
};
};
pub fn parse(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
const expression = try parse_additive(root, stream, environment);
if (stream.token == .symbol_equals) {
stream.skip_newlines();
if (stream.token == .end) {
return root.report_error(stream.line, "expected assignment after `=`", .{});
}
return root.create_expr(.{
.line = stream.line,
.kind = switch (expression.kind) {
.declaration_get => |declaration_get| convert: {
if (declaration_get.declaration.is.readonly) {
return root.report_error(stream.line, "readonly declarations cannot be re-assigned", .{});
}
break: convert .{
.declaration_set = .{
.assign = try parse(root, stream, environment),
.declaration = declaration_get.declaration,
},
};
},
.field_get => |field_get| .{
.field_set = .{
.assign = try parse(root, stream, environment),
.object = field_get.object,
.identifier = field_get.identifier,
},
},
.subscript_get => |subscript_get| .{
.subscript_set = .{
.assign = try parse(root, stream, environment),
.object = subscript_get.object,
.index = subscript_get.index,
},
},
else => return root.report_error(stream.line, "expected local or field on left-hand side of expression", .{}),
},
});
}
return expression;
}
const parse_additive = BinaryOp.parser(parse_equality, &.{
.addition,
.subtraction,
});
const parse_comparison = BinaryOp.parser(parse_term, &.{
.greater_than_comparison,
.greater_equals_comparison,
.less_than_comparison,
.less_equals_comparison
});
const parse_equality = BinaryOp.parser(parse_comparison, &.{
.equals_comparison,
});
fn parse_factor(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
var expression = try parse_operand(root, stream, environment);
while (true) {
switch (stream.token) {
.symbol_period => {
stream.skip_newlines();
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
const unnecessary_temp = expression;
expression = try root.create_expr(.{
.line = stream.line,
.kind = .{
.field_get = .{
.identifier = switch (stream.token) {
.identifier => |field_identifier| field_identifier,
else => return root.report_error(stream.line, "expected identifier after `.`", .{}),
},
.object = unnecessary_temp,
},
},
});
stream.skip_newlines();
},
.symbol_bracket_left => {
stream.skip_newlines();
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
const unnecessary_temp = expression;
expression = try root.create_expr(.{
.line = stream.line,
.kind = .{
.subscript_get = .{
.index = try parse(root, stream, environment),
.object = unnecessary_temp,
},
},
});
if (stream.token != .symbol_bracket_right) {
return root.report_error(stream.line, "expected closing `]` on subscript", .{});
}
stream.skip_newlines();
},
.symbol_paren_left => {
const lines_stepped = stream.line;
stream.skip_newlines();
var first_argument = @as(?*Self, null);
if (stream.token != .symbol_paren_right) {
var argument = try parse(root, stream, environment);
first_argument = argument;
while (true) {
switch (stream.token) {
.symbol_comma => stream.skip_newlines(),
.symbol_paren_right => break,
else => return root.report_error(stream.line, "expected `,` or `)` after lambda argument", .{}),
}
const next_argument = try parse(root, stream, environment);
argument.next = next_argument;
argument = next_argument;
}
}
stream.skip_newlines();
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
const unnecessary_temp = expression;
expression = try root.create_expr(.{
.line = lines_stepped,
.kind = .{
.invoke = .{
.argument = first_argument,
.object = unnecessary_temp,
},
},
});
},
else => break,
}
}
return expression;
}
fn parse_operand(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
switch (stream.token) {
.symbol_paren_left => {
stream.skip_newlines();
const expression = try parse(root, stream, environment);
if (stream.token != .symbol_paren_right) {
return root.report_error(stream.line, "expected a closing `)` after expression", .{});
}
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{.group = expression},
});
},
.keyword_nil => {
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .nil_literal,
});
},
.keyword_true => {
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .true_literal,
});
},
.keyword_false => {
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .false_literal,
});
},
.number => |value| {
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{.number_literal = value},
});
},
.string => |value| {
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{.string_literal = value},
});
},
.template_string => |value| {
const line = stream.line;
stream.skip_newlines();
return parse_template(root, value, line, environment);
},
.symbol_at => {
stream.step();
const identifier = switch (stream.token) {
.identifier => |identifier| identifier,
else => return root.report_error(stream.line, "expected identifier after `@`", .{}),
};
stream.skip_newlines();
if (coral.io.are_equal(identifier, "import")) {
return root.create_expr(.{
.line = stream.line,
.kind = .import_builtin,
});
}
if (coral.io.are_equal(identifier, "print")) {
return root.create_expr(.{
.line = stream.line,
.kind = .print_builtin,
});
}
if (coral.io.are_equal(identifier, "vec2")) {
return root.create_expr(.{
.line = stream.line,
.kind = .vec2_builtin,
});
}
if (coral.io.are_equal(identifier, "vec3")) {
return root.create_expr(.{
.line = stream.line,
.kind = .vec3_builtin,
});
}
return root.report_error(stream.line, "unexpected identifier after `@`", .{});
},
.symbol_period => {
stream.step();
const identifier = switch (stream.token) {
.identifier => |identifier| identifier,
else => return root.report_error(stream.line, "expected identifier after `.`", .{}),
};
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{.symbol_literal = identifier},
});
},
.identifier => |identifier| {
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{
.declaration_get = .{
.declaration = (try environment.resolve_declaration(identifier)) orelse {
return root.report_error(stream.line, "undefined identifier `{identifier}`", .{
.identifier = identifier,
});
}
},
},
});
},
.keyword_lambda => {
stream.skip_newlines();
if (stream.token != .symbol_paren_left) {
return root.report_error(stream.line, "expected `(` after opening lambda block", .{});
}
stream.skip_newlines();
var lambda_environment = try environment.create_enclosed(root);
while (stream.token != .symbol_paren_right) {
const identifier = switch (stream.token) {
.identifier => |identifier| identifier,
else => return root.report_error(stream.line, "expected identifier", .{}),
};
_ = lambda_environment.declare_argument(identifier) catch |declare_error| {
return root.report_declare_error(stream.line, identifier, declare_error);
};
stream.skip_newlines();
switch (stream.token) {
.symbol_comma => stream.skip_newlines(),
.symbol_paren_right => break,
else => return root.report_error(stream.line, "expected `,` or `)` after identifier", .{}),
}
}
stream.skip_newlines();
if (stream.token != .symbol_colon) {
return root.report_error(stream.line, "expected `:` after closing `)` of lambda identifiers", .{});
}
stream.skip_newlines();
if (stream.token != .keyword_end) {
const first_statement = try Stmt.parse(root, stream, lambda_environment);
var current_statement = first_statement;
while (stream.token != .keyword_end) {
const next_statement = try Stmt.parse(root, stream, lambda_environment);
current_statement.next = next_statement;
current_statement = next_statement;
}
lambda_environment.statement = first_statement;
}
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{.lambda_construct = .{.environment = lambda_environment}},
});
},
.symbol_brace_left => {
stream.skip_newlines();
if (stream.token == .symbol_brace_right) {
return root.create_expr(.{
.line = stream.line,
.kind = .{.table_construct = .{.entry = null}},
});
}
const first_entry = try parse_table_entry(root, stream, environment);
var entry = first_entry;
while (stream.token == .symbol_comma) {
stream.skip_newlines();
if (stream.token == .symbol_brace_right) {
break;
}
const expression = try parse_table_entry(root, stream, environment);
entry.next = expression;
entry = expression;
}
if (stream.token != .symbol_brace_right) {
return root.report_error(stream.line, "expected closing `}` on table construct", .{});
}
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{.table_construct = .{.entry = first_entry}},
});
},
.symbol_minus => {
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{
.unary_op = .{
.operand = try parse_factor(root, stream, environment),
.operation = .numeric_negation,
},
},
});
},
.symbol_bang => {
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{
.unary_op = .{
.operand = try parse_factor(root, stream, environment),
.operation = .boolean_negation,
},
},
});
},
else => return root.report_error(stream.line, "unexpected token in expression", .{}),
}
}
fn parse_table_entry(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
switch (stream.token) {
.symbol_period => {
stream.step();
const field = switch (stream.token) {
.identifier => |identifier| identifier,
else => return root.report_error(stream.line, "expected identifier in field symbol literal", .{}),
};
stream.skip_newlines();
if (stream.token != .symbol_equals) {
return root.report_error(stream.line, "expected `=` after table symbol key", .{});
}
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{
.key_value = .{
.value = try parse(root, stream, environment),
.key = try root.create_expr(.{
.line = stream.line,
.kind = .{.symbol_literal = field},
}),
},
},
});
},
.symbol_bracket_left => {
stream.skip_newlines();
const key = try parse(root, stream, environment);
if (stream.token != .symbol_bracket_right) {
return root.report_error(stream.line, "expected `]` after subscript index expression", .{});
}
stream.skip_newlines();
if (stream.token != .symbol_equals) {
return root.report_error(stream.line, "expected `=` after table expression key", .{});
}
stream.skip_newlines();
return root.create_expr(.{
.line = stream.line,
.kind = .{
.key_value = .{
.value = try parse(root, stream, environment),
.key = key,
},
},
});
},
else => return parse(root, stream, environment),
}
}
fn parse_template(root: *tree.Root, template: []const coral.io.Byte, line: tokens.Line, environment: *tree.Environment) tree.ParseError!*Self {
const expression_head = try root.create_expr(.{
.line = line,
.kind = .string_template,
});
var expression_tail = expression_head;
var source = template;
while (TemplateToken.extract(&source)) |token| {
const expression = try switch (token) {
.invalid => |invalid| root.report_error(line, "invalid template format: `{invalid}`", .{
.invalid = invalid,
}),
.literal => |literal| root.create_expr(.{
.line = line,
.kind = .{.string_literal = literal},
}),
.expression => |expression| create: {
var stream = tokens.Stream{
.source = expression,
.line = line,
};
stream.step();
break: create try parse(root, &stream, environment);
},
};
expression_tail.next = expression;
expression_tail = expression;
}
return expression_head;
}
const parse_term = BinaryOp.parser(parse_factor, &.{
.multiplication,
.divsion,
});

View File

@ -1,242 +0,0 @@
const Expr = @import("./Expr.zig");
const coral = @import("coral");
const tokens = @import("../tokens.zig");
const tree = @import("../tree.zig");
next: ?*const Self = null,
line: tokens.Line,
kind: union (enum) {
top_expression: *const Expr,
@"return": Return,
declare: Declare,
@"if": If,
@"while": While,
},
pub const Declare = struct {
declaration: *const tree.Declaration,
initial_expression: *const Expr,
};
pub const If = struct {
then_expression: *const Expr,
@"then": *const Self,
@"else": ?*const Self,
};
pub const Return = struct {
returned_expression: ?*const Expr,
};
const Self = @This();
pub const While = struct {
loop_expression: *const Expr,
loop: *const Self,
};
pub fn parse(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
switch (stream.token) {
.keyword_return => {
stream.step();
if (stream.token != .end and stream.token != .newline) {
return root.create_stmt(.{
.line = stream.line,
.kind = .{.@"return" = .{.returned_expression = try Expr.parse(root, stream, environment)}},
});
}
if (stream.token != .end and stream.token != .newline) {
return root.report_error(stream.line, "expected end or newline after return statement", .{});
}
return root.create_stmt(.{
.line = stream.line,
.kind = .{.@"return" = .{.returned_expression = null}},
});
},
.keyword_while => {
defer stream.skip_newlines();
stream.step();
const condition_expression = try Expr.parse(root, stream, environment);
if (stream.token != .symbol_colon) {
return root.report_error(stream.line, "expected `:` after `while` statement", .{});
}
stream.skip_newlines();
const first_statement = try parse(root, stream, environment);
{
var current_statement = first_statement;
while (stream.token != .keyword_end) {
const next_statement = try parse(root, stream, environment);
current_statement.next = next_statement;
current_statement = next_statement;
}
}
return root.create_stmt(.{
.line = stream.line,
.kind = .{
.@"while" = .{
.loop = first_statement,
.loop_expression = condition_expression,
},
},
});
},
.keyword_var, .keyword_let => {
const is_constant = stream.token == .keyword_let;
stream.skip_newlines();
const identifier = switch (stream.token) {
.identifier => |identifier| identifier,
else => return root.report_error(stream.line, "expected identifier after declaration", .{}),
};
stream.skip_newlines();
if (stream.token != .symbol_equals) {
return root.report_error(stream.line, "expected `=` after declaration `{identifier}`", .{
.identifier = identifier,
});
}
stream.skip_newlines();
return root.create_stmt(.{
.line = stream.line,
.kind = .{
.declare = .{
.initial_expression = try Expr.parse(root, stream, environment),
.declaration = declare: {
if (is_constant) {
break: declare environment.declare_constant(identifier) catch |declaration_error| {
return root.report_declare_error(stream.line, identifier, declaration_error);
};
}
break: declare environment.declare_variable(identifier) catch |declaration_error| {
return root.report_declare_error(stream.line, identifier, declaration_error);
};
},
},
},
});
},
.keyword_if => return parse_branch(root, stream, environment),
else => return root.create_stmt(.{
.line = stream.line,
.kind = .{.top_expression = try Expr.parse(root, stream, environment)},
}),
}
}
fn parse_branch(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
stream.step();
const expression = try Expr.parse(root, stream, environment);
if (stream.token != .symbol_colon) {
return root.report_error(stream.line, "expected `:` after `{token}`", .{.token = stream.token.text()});
}
stream.skip_newlines();
const first_then_statement = try parse(root, stream, environment);
var current_then_statement = first_then_statement;
while (true) {
switch (stream.token) {
.keyword_end => {
stream.skip_newlines();
return root.create_stmt(.{
.line = stream.line,
.kind = .{
.@"if" = .{
.then_expression = expression,
.@"then" = first_then_statement,
.@"else" = null,
},
},
});
},
.keyword_else => {
stream.step();
if (stream.token != .symbol_colon) {
return root.report_error(stream.line, "expected `:` after `if` statement condition", .{});
}
stream.skip_newlines();
const first_else_statement = try parse(root, stream, environment);
var current_else_statement = first_else_statement;
while (stream.token != .keyword_end) {
const next_statement = try parse(root, stream, environment);
current_else_statement.next = next_statement;
current_else_statement = next_statement;
}
stream.skip_newlines();
return root.create_stmt(.{
.line = stream.line,
.kind = .{
.@"if" = .{
.@"else" = first_else_statement,
.@"then" = first_then_statement,
.then_expression = expression,
},
}
});
},
.keyword_elif => {
return root.create_stmt(.{
.line = stream.line,
.kind = .{
.@"if" = .{
.@"else" = try parse_branch(root, stream, environment),
.@"then" = first_then_statement,
.then_expression = expression,
},
},
});
},
else => {
const next_statement = try parse(root, stream, environment);
current_then_statement.next = next_statement;
current_then_statement = next_statement;
},
}
}
}

View File

@ -1,93 +0,0 @@
const app = @import("./app.zig");
const coral = @import("coral");
const ext = @import("./ext.zig");
pub const file = @import("./file.zig");
const heap = @import("./heap.zig");
const kym = @import("./kym.zig");
fn last_sdl_error() [:0]const u8 {
return coral.io.slice_sentineled(@as(u8, 0), @as([*:0]const u8, @ptrCast(ext.SDL_GetError())));
}
pub fn run_app(file_access: file.Access) void {
defer heap.trace_leaks();
if (ext.SDL_Init(ext.SDL_INIT_EVERYTHING) != 0) {
return app.log_fail(last_sdl_error());
}
defer ext.SDL_Quit();
var script_env = kym.RuntimeEnv.init(heap.allocator, 255, .{
.print = app.log_info,
.print_error = app.log_fail,
.import_access = file_access,
}) catch {
return app.log_fail("failed to initialize script runtime");
};
defer script_env.deinit();
var manifest = app.Manifest{};
manifest.load(&script_env) catch return;
const window = create: {
const pos = ext.SDL_WINDOWPOS_CENTERED;
const flags = 0;
break: create ext.SDL_CreateWindow(&manifest.title, pos, pos, manifest.width, manifest.height, flags) orelse {
return app.log_fail(last_sdl_error());
};
};
defer ext.SDL_DestroyWindow(window);
const renderer = create: {
const default_driver_index = -1;
const flags = ext.SDL_RENDERER_ACCELERATED;
break: create ext.SDL_CreateRenderer(window, default_driver_index, flags) orelse {
return app.log_fail(last_sdl_error());
};
};
defer ext.SDL_DestroyRenderer(renderer);
{
var previous_ticks = ext.SDL_GetTicks64();
while (true) {
{
var event = @as(ext.SDL_Event, undefined);
while (ext.SDL_PollEvent(&event) != 0) {
switch (event.type) {
ext.SDL_QUIT => return,
else => {},
}
}
}
{
// Based on https://fabiensanglard.net/timer_and_framerate/index.php.
const current_ticks = ext.SDL_GetTicks64();
const milliseconds_per_second = 1000.0;
const tick_frequency = @as(u64, @intFromFloat(milliseconds_per_second / manifest.tick_rate));
while (previous_ticks < current_ticks) {
previous_ticks += tick_frequency;
}
}
_ = ext.SDL_SetRenderDrawColor(renderer, 0, 0, 0, 255);
_ = ext.SDL_RenderClear(renderer);
_ = ext.SDL_RenderPresent(renderer);
}
}
}

View File

@ -1,5 +0,0 @@
const ona = @import("ona");
pub fn main() void {
ona.run_app(.{.sandboxed_path = &ona.file.Path.cwd});
}

View File

@ -1,3 +0,0 @@
const coral = @import("coral");
const ona = @import("ona");

97
src/coral/World.zig Normal file
View File

@ -0,0 +1,97 @@
const builtin = @import("builtin");
const heap = @import("./heap.zig");
const map = @import("./map.zig");
const resource = @import("./resource.zig");
const std = @import("std");
const stack = @import("./stack.zig");
const system = @import("./system.zig");
thread_pool: ?*std.Thread.Pool = null,
thread_restricted_resources: [std.enums.values(ThreadRestriction).len]resource.Table,
event_systems: stack.Sequential(system.Schedule),
pub const Event = enum (usize) { _ };
const Self = @This();
pub const ThreadRestriction = enum {
none,
main,
};
pub fn create_event(self: *Self, label: []const u8) std.mem.Allocator.Error!Event {
var systems = try system.Schedule.init(label);
errdefer systems.deinit();
const index = self.event_systems.len();
try self.event_systems.push(systems);
return @enumFromInt(index);
}
pub fn deinit(self: *Self) void {
for (&self.thread_restricted_resources) |*resources| {
resources.deinit();
}
for (self.event_systems.values) |*schedule| {
schedule.deinit();
}
if (self.thread_pool) |thread_pool| {
thread_pool.deinit();
heap.allocator.destroy(thread_pool);
}
self.event_systems.deinit();
self.* = undefined;
}
pub fn get_resource(self: Self, thread_restriction: ThreadRestriction, comptime Resource: type) ?*Resource {
return @ptrCast(@alignCast(self.thread_restricted_resources[@intFromEnum(thread_restriction)].get(Resource)));
}
pub fn set_get_resource(self: *Self, thread_restriction: ThreadRestriction, value: anytype) std.mem.Allocator.Error!*@TypeOf(value) {
return self.thread_restricted_resources[@intFromEnum(thread_restriction)].set_get(value);
}
pub fn init(thread_count: u32) std.Thread.SpawnError!Self {
var world = Self{
.thread_restricted_resources = .{resource.Table.init(), resource.Table.init()},
.event_systems = .{.allocator = heap.allocator},
};
if (thread_count != 0 and !builtin.single_threaded) {
const thread_pool = try heap.allocator.create(std.Thread.Pool);
try thread_pool.init(.{
.allocator = heap.allocator,
.n_jobs = thread_count,
});
world.thread_pool = thread_pool;
}
return world;
}
pub fn on_event(self: *Self, event: Event, action: *const system.Info, order: system.Order) std.mem.Allocator.Error!void {
try self.event_systems.values[@intFromEnum(event)].then(self, action, order);
}
pub fn run_event(self: *Self, event: Event) anyerror!void {
try self.event_systems.values[@intFromEnum(event)].run(self);
}
pub fn set_resource(self: *Self, thread_restriction: ThreadRestriction, value: anytype) std.mem.Allocator.Error!void {
try self.thread_restricted_resources[@intFromEnum(thread_restriction)].set(value);
}

195
src/coral/ascii.zig Normal file
View File

@ -0,0 +1,195 @@
const coral = @import("./coral.zig");
const io = @import("./io.zig");
const scalars = @import("./scalars.zig");
const std = @import("std");
pub const DecimalFormat = struct {
delimiter: []const coral.Byte,
positive_prefix: enum {none, plus, space},
pub const default = DecimalFormat{
.delimiter = "",
.positive_prefix = .none,
};
pub fn parse(self: DecimalFormat, utf8: []const u8, comptime Decimal: type) ?Decimal {
if (utf8.len == 0) {
return null;
}
switch (@typeInfo(Decimal)) {
.Int => |int| {
const has_sign = switch (utf8[0]) {
'-', '+', ' ' => true,
else => false,
};
var result = @as(Decimal, 0);
for (@intFromBool(has_sign) .. utf8.len) |index| {
const radix = 10;
const code = utf8[index];
switch (code) {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
const offset_code = scalars.sub(code, '0') orelse return null;
result = scalars.mul(result, radix) orelse return null;
result = scalars.add(result, offset_code) orelse return null;
},
else => {
if (self.delimiter.len == 0 or !coral.are_equal(self.delimiter, utf8[index ..])) {
return null;
}
},
}
}
switch (int.signedness) {
.signed => {
return result * @as(Decimal, if (has_sign and utf8[0] == '-') -1 else 1);
},
.unsigned => {
if (has_sign and utf8[0] == '-') {
return null;
}
return result;
},
}
},
.Float => {
const has_sign = switch (utf8[0]) {
'-', '+', ' ' => true,
else => false,
};
// "-"
if (has_sign and utf8.len == 1) {
return null;
}
const sign_offset = @intFromBool(has_sign);
var has_decimal = utf8[sign_offset] == '.';
// "-."
if (has_decimal and (utf8.len == 2)) {
return null;
}
var result = @as(Decimal, 0);
var factor = @as(Decimal, if (has_sign and utf8[0] == '-') -1 else 1);
for (utf8[sign_offset + @intFromBool(has_decimal) .. utf8.len]) |code| {
switch (code) {
'.' => {
if (has_decimal) {
return null;
}
has_decimal = true;
},
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
if (has_decimal) {
factor /= 10.0;
}
result = ((result * 10.0) + @as(Decimal, @floatFromInt(code - '0')));
},
else => return null,
}
}
return result * factor;
},
else => @compileError("`" ++ @typeName(Decimal) ++ "` cannot be parsed from a decimal string"),
}
}
pub fn print(self: DecimalFormat, writer: io.Writer, value: anytype) io.PrintError!void {
if (value == 0) {
return io.print(writer, switch (self.positive_prefix) {
.none => "0",
.plus => "+0",
.space => " 0",
});
}
const Value = @TypeOf(value);
switch (@typeInfo(Value)) {
.Int => |int| {
const radix = 10;
var buffer = [_]u8{0} ** (1 + @max(int.bits, 1));
var buffer_start = buffer.len - 1;
{
var decomposable_value = value;
while (decomposable_value != 0) : (buffer_start -= 1) {
buffer[buffer_start] = @intCast(@mod(decomposable_value, radix) + '0');
decomposable_value = @divTrunc(decomposable_value, radix);
}
}
if (int.signedness == .unsigned and value < 0) {
buffer[buffer_start] = '-';
} else {
switch (self.positive_prefix) {
.none => buffer_start += 1,
.plus => buffer[buffer_start] = '+',
.space => buffer[buffer_start] = ' ',
}
}
return io.print(writer, buffer[buffer_start ..]);
},
.Float => |float| {
if (value < 0) {
try io.print(writer, "-");
}
const Float = @TypeOf(value);
const Int = std.meta.Int(.unsigned, float.bits);
const integer = @as(Int, @intFromFloat(value));
try self.print(writer, integer);
try io.print(writer, ".");
try self.print(writer, @as(Int, @intFromFloat((value - @as(Float, @floatFromInt(integer))) * 100)));
},
else => @compileError("`" ++ @typeName(Value) ++ "` cannot be formatted to a decimal string"),
}
}
};
pub const HexadecimalFormat = struct {
delimiter: []const u8 = "",
positive_prefix: enum {none, plus, space} = .none,
casing: enum {lower, upper} = .lower,
const default = HexadecimalFormat{
.delimiter = "",
.positive_prefix = .none,
.casing = .lower,
};
pub fn print(self: HexadecimalFormat, writer: io.Writer, value: anytype) ?usize {
// TODO: Implement.
_ = self;
_ = writer;
_ = value;
unreachable;
}
};

311
src/coral/coral.zig Normal file
View File

@ -0,0 +1,311 @@
pub const ascii = @import("./ascii.zig");
pub const dag = @import("./dag.zig");
pub const debug = @import("./debug.zig");
pub const hash = @import("./hash.zig");
pub const heap = @import("./heap.zig");
pub const io = @import("./io.zig");
pub const map = @import("./map.zig");
pub const scalars = @import("./scalars.zig");
pub const slices = @import("./slices.zig");
pub const slots = @import("./slots.zig");
pub const stack = @import("./stack.zig");
pub const system = @import("./system.zig");
pub const utf8 = @import("./utf8.zig");
pub const vectors = @import("./vectors.zig");
pub const World = @import("./World.zig");
const std = @import("std");
pub const ResourceOptions = struct {
thread_restriction: World.ThreadRestriction,
read_only: bool = false,
};
pub fn Read(comptime Value: type) type {
return Resource(Value, .{
.thread_restriction = .none,
.read_only = true,
});
}
pub fn ReadBlocking(comptime Value: type) type {
return Resource(Value, .{
.thread_restriction = .main,
.read_only = true,
});
}
pub fn Resource(comptime Value: type, comptime options: ResourceOptions) type {
const value_info = @typeInfo(Value);
const Qualified = switch (value_info) {
.Optional => @Type(.{
.Optional = .{
.child = .{
.Pointer = .{
.is_allowzero = false,
.sentinel = null,
.address_space = .generic,
.is_volatile = false,
.alignment = @alignOf(Value),
.size = .One,
.child = Value,
.is_const = options.read_only,
},
},
},
}),
else => @Type(.{
.Pointer = .{
.is_allowzero = false,
.sentinel = null,
.address_space = .generic,
.is_volatile = false,
.alignment = @alignOf(Value),
.size = .One,
.child = Value,
.is_const = options.read_only,
},
}),
};
return struct {
res: Qualified,
const Self = @This();
pub const State = struct {
res: Qualified,
};
pub fn bind(context: system.BindContext) std.mem.Allocator.Error!State {
const thread_restriction_name = switch (thread_restriction) {
.main => "main thread-restricted ",
.none => ""
};
const res = switch (options.read_only) {
true => (try context.register_read_only_resource_access(thread_restriction, Value)),
false => (try context.register_read_write_resource_access(thread_restriction, Value)),
};
return .{
.res = switch (value_info) {
.Optional => res,
else => res orelse {
@panic(std.fmt.comptimePrint("attempt to use {s}{s} {s} that has not yet been set", .{
thread_restriction_name,
if (options.read_only) "read-only" else "read-write",
@typeName(Value),
}));
},
},
};
}
pub fn init(state: *State) Self {
return .{
.res = state.res,
};
}
pub const thread_restriction = options.thread_restriction;
};
}
pub fn Write(comptime Value: type) type {
return Resource(Value, .{
.thread_restriction = .none,
});
}
pub fn WriteBlocking(comptime Value: type) type {
return Resource(Value, .{
.thread_restriction = .main,
});
}
fn parameter_type(comptime Value: type) *const system.Info.Parameter {
const has_state = @hasDecl(Value, "State");
if (@sizeOf(Value) == 0) {
@compileError("System parameters must have a non-zero size");
}
const parameters = struct {
fn bind(allocator: std.mem.Allocator, context: system.BindContext) std.mem.Allocator.Error!?*anyopaque {
if (has_state) {
const value_name = @typeName(Value);
if (!@hasDecl(Value, "bind")) {
@compileError(
"a `bind` declaration on " ++
value_name ++
" is requied for parameter types with a `State` declaration");
}
const bind_type = @typeInfo(@TypeOf(Value.bind));
if (bind_type != .Fn) {
@compileError("`bind` declaration on " ++ value_name ++ " must be a fn");
}
if (bind_type.Fn.params.len != 1 or bind_type.Fn.params[0].type.? != system.BindContext) {
@compileError(
"`bind` fn on " ++
value_name ++
" must accept " ++
@typeName(system.BindContext) ++
" as it's one and only argument");
}
const state = try allocator.create(Value.State);
state.* = switch (bind_type.Fn.return_type.?) {
Value.State => Value.bind(context),
std.mem.Allocator.Error!Value.State => try Value.bind(context),
else => @compileError(
"`bind` fn on " ++
@typeName(Value) ++
" must return " ++
@typeName(Value.State) ++
" or " ++
@typeName(std.mem.Allocator.Error!Value.State)),
};
return @ptrCast(state);
} else {
return null;
}
}
fn init(argument: *anyopaque, state: ?*anyopaque) void {
const value_name = @typeName(Value);
if (!@hasDecl(Value, "init")) {
@compileError("an `init` declaration on " ++ value_name ++ " is requied for parameter types");
}
const init_type = @typeInfo(@TypeOf(Value.init));
if (init_type != .Fn) {
@compileError("`init` declaration on " ++ value_name ++ " must be a fn");
}
if (init_type.Fn.return_type.? != Value) {
@compileError("`init` fn on " ++ value_name ++ " must return a " ++ value_name);
}
const concrete_argument = @as(*Value, @ptrCast(@alignCast(argument)));
if (has_state) {
if (init_type.Fn.params.len != 1 or init_type.Fn.params[0].type.? != *Value.State) {
@compileError("`init` fn on stateful " ++ value_name ++ " must accept a " ++ @typeName(*Value.State));
}
concrete_argument.* = Value.init(@ptrCast(@alignCast(state.?)));
} else {
if (init_type.Fn.params.len != 0) {
@compileError("`init` fn on statelss " ++ value_name ++ " cannot use parameters");
}
concrete_argument.* = Value.init();
}
}
fn unbind(allocator: std.mem.Allocator, state: ?*anyopaque) void {
if (@hasDecl(Value, "unbind")) {
if (has_state) {
const typed_state = @as(*Value.State, @ptrCast(@alignCast(state.?)));
Value.unbind(typed_state);
allocator.destroy(typed_state);
} else {
Value.unbind();
}
}
}
};
return comptime &.{
.thread_restriction = if (@hasDecl(Value, "thread_restriction")) Value.thread_restriction else .none,
.init = parameters.init,
.bind = parameters.bind,
.unbind = parameters.unbind,
};
}
pub fn system_fn(comptime call: anytype) *const system.Info {
const Call = @TypeOf(call);
const system_info = comptime generate: {
switch (@typeInfo(Call)) {
.Fn => |call_fn| {
if (call_fn.params.len > system.max_parameters) {
@compileError("number of parameters to `call` cannot be more than 16");
}
const systems = struct {
fn run(parameters: []const *const system.Info.Parameter, states: *const [system.max_parameters]?*anyopaque) anyerror!void {
var call_args = @as(std.meta.ArgsTuple(Call), undefined);
inline for (parameters, &call_args, states[0 .. parameters.len]) |parameter, *call_arg, state| {
parameter.init(call_arg, state);
}
switch (@typeInfo(call_fn.return_type.?)) {
.Void => @call(.auto, call, call_args),
.ErrorUnion => try @call(.auto, call, call_args),
else => @compileError("number of parameters to `call` must return void or !void"),
}
}
};
var parameters = @as([system.max_parameters]*const system.Info.Parameter, undefined);
var thread_restriction = World.ThreadRestriction.none;
for (0 .. call_fn.params.len) |index| {
const CallParam = call_fn.params[index].type.?;
const parameter = parameter_type(CallParam);
if (parameter.thread_restriction != .none) {
if (thread_restriction != .none and thread_restriction != parameter.thread_restriction) {
@compileError("a system may not have conflicting thread restrictions");
}
thread_restriction = parameter.thread_restriction;
}
parameters[index] = parameter;
}
break: generate &.{
.parameters = parameters,
.parameter_count = call_fn.params.len,
.execute = systems.run,
.thread_restriction = thread_restriction,
};
},
else => @compileError("parameter `call` must be a function"),
}
};
return system_info;
}

158
src/coral/dag.zig Normal file
View File

@ -0,0 +1,158 @@
const stack = @import("./stack.zig");
const slices = @import("./slices.zig");
const std = @import("std");
pub fn Graph(comptime Payload: type) type {
return struct {
node_count: usize = 0,
table: NodeTables,
const NodeTables = stack.Parallel(struct {
payload: Payload,
edges: stack.Sequential(Node),
is_occupied: bool = true,
is_visited: bool = false,
});
const Self = @This();
pub fn append(self: *Self, payload: Payload) std.mem.Allocator.Error!Node {
const node = @as(Node, @enumFromInt(self.table.len()));
try self.table.push(.{
.payload = payload,
.edges = .{.allocator = self.table.allocator},
});
self.node_count += 1;
return node;
}
pub fn clear_edges(self: *Self) void {
for (self.table.values.slice(.edges)) |*edges| {
edges.clear();
}
}
pub fn deinit(self: *Self) void {
for (self.table.values.slice(.edges)) |*edges| {
edges.deinit();
}
self.table.deinit();
self.* = undefined;
}
pub fn edge_nodes(self: Self, node: Node) ?[]const Node {
if (!self.exists(node)) {
return null;
}
return self.table.values.get_ptr(.edges, @intFromEnum(node)).?.values;
}
pub fn exists(self: Self, node: Node) bool {
return self.table.values.get(.is_occupied, @intFromEnum(node)) orelse false;
}
pub fn get_ptr(self: Self, node: Node) ?*Payload {
if (!self.exists(node)) {
return null;
}
return self.table.values.get_ptr(.payload, @intFromEnum(node)).?;
}
pub fn init(allocator: std.mem.Allocator) Self {
return .{
.table = .{.allocator = allocator},
};
}
pub fn insert_edge(self: *Self, dependant_node: Node, edge_node: Node) std.mem.Allocator.Error!bool {
if (!self.exists(edge_node)) {
return false;
}
const edges = self.table.values.get_ptr(.edges, @intFromEnum(dependant_node)) orelse {
return false;
};
if (slices.index_of(edges.values, 0, edge_node) == null) {
try edges.push(edge_node);
}
return true;
}
pub fn is_empty(self: Self) bool {
return self.node_count != 0;
}
pub fn nodes(self: *const Self) Nodes {
return .{
.occupied_table = self.table.values.slice(.is_occupied),
};
}
pub fn mark_visited(self: *Self, node: Node) bool {
if (!self.exists(node)) {
return false;
}
std.debug.assert(self.table.values.set(.is_visited, @intFromEnum(node), true));
return true;
}
pub fn remove_node(self: *Self, node: Node) ?Payload {
if (!self.exists(node)) {
return null;
}
const node_index = @intFromEnum(node);
self.table.values.get_ptr(.is_occupied, node_index).?.* = false;
self.node_count -= 1;
return self.table.values.get(.payload, node_index).?;
}
pub fn reset_visited(self: *Self) void {
@memset(self.table.values.slice(.is_visited), false);
}
pub fn visited(self: Self, node: Node) ?bool {
if (!self.exists(node)) {
return null;
}
return self.table.values.get(.is_visited, @intFromEnum(node)).?;
}
};
}
pub const Node = enum (usize) { _ };
pub const Nodes = struct {
occupied_table: []const bool,
iterations: usize = 0,
pub fn next(self: *Nodes) ?Node {
std.debug.assert(self.iterations <= self.occupied_table.len);
while (self.iterations != self.occupied_table.len) {
defer self.iterations += 1;
if (self.occupied_table[self.iterations]) {
return @enumFromInt(self.iterations);
}
}
return null;
}
};

5
src/coral/debug.zig Normal file
View File

@ -0,0 +1,5 @@
const std = @import("std");
pub fn assert_ok(error_union: anytype) @typeInfo(@TypeOf(error_union)).ErrorUnion.payload {
return error_union catch unreachable;
}

27
src/coral/hash.zig Normal file
View File

@ -0,0 +1,27 @@
const std = @import("std");
pub fn djb2(comptime int: std.builtin.Type.Int, bytes: []const u8) std.meta.Int(int.signedness, int.bits) {
var hash = @as(std.meta.Int(int.signedness, int.bits), 5381);
for (bytes) |byte| {
hash = ((hash << 5) +% hash) +% byte;
}
return hash;
}
pub fn jenkins(comptime int: std.builtin.Type.Int, bytes: []const u8) std.meta.Int(int.signedness, int.bits) {
var hash = @as(std.meta.Int(int.signedness, int.bits), 0);
for (bytes) |byte| {
hash +%= byte;
hash +%= (hash << 10);
hash ^= (hash >> 6);
}
hash +%= (hash << 3);
hash ^= (hash >> 11);
hash +%= (hash << 15);
return hash;
}

11
src/coral/heap.zig Normal file
View File

@ -0,0 +1,11 @@
const std = @import("std");
pub const allocator = gpa.allocator();
var gpa = std.heap.GeneralPurposeAllocator(.{
.thread_safe = true,
}){};
pub fn trace_leaks() void {
_ = gpa.detectLeaks();
}

277
src/coral/io.zig Normal file
View File

@ -0,0 +1,277 @@
const builtin = @import("builtin");
const coral = @import("./coral.zig");
const slices = @import("./slices.zig");
const std = @import("std");
pub const Byte = u8;
pub const Decoder = coral.io.Functor(coral.io.Error!void, &.{[]coral.Byte});
pub const Error = error {
UnavailableResource,
};
pub fn Functor(comptime Output: type, comptime input_types: []const type) type {
const InputTuple = std.meta.Tuple(input_types);
return struct {
context: *const anyopaque,
apply_with_context: *const fn (*const anyopaque, InputTuple) Output,
const Self = @This();
pub fn apply(self: *const Self, inputs: InputTuple) Output {
return self.apply_with_context(self.context, inputs);
}
pub fn bind(comptime State: type, state: *const State, comptime invoke: anytype) Self {
const is_zero_aligned = @alignOf(State) == 0;
return .{
.context = if (is_zero_aligned) state else @ptrCast(state),
.apply_with_context = struct {
fn invoke_concrete(context: *const anyopaque, inputs: InputTuple) Output {
if (is_zero_aligned) {
return @call(.auto, invoke, .{@as(*const State, @ptrCast(context))} ++ inputs);
}
return switch (@typeInfo(@typeInfo(@TypeOf(invoke)).Fn.return_type.?)) {
.ErrorUnion => try @call(.auto, invoke, .{@as(*const State, @ptrCast(@alignCast(context)))} ++ inputs),
else => @call(.auto, invoke, .{@as(*const State, @ptrCast(@alignCast(context)))} ++ inputs),
};
}
}.invoke_concrete,
};
}
pub fn bind_fn(comptime invoke: anytype) Self {
return .{
.context = undefined,
.apply_with_context = struct {
fn invoke_concrete(_: *const anyopaque, inputs: InputTuple) Output {
return @call(.auto, invoke, inputs);
}
}.invoke_concrete,
};
}
};
}
pub fn Generator(comptime Output: type, comptime input_types: []const type) type {
const InputTuple = std.meta.Tuple(input_types);
return struct {
context: *anyopaque,
yield_with_context: *const fn (*anyopaque, InputTuple) Output,
const Self = @This();
pub fn bind(comptime State: type, state: *State, comptime invoke: anytype) Self {
const is_zero_aligned = @alignOf(State) == 0;
return .{
.context = if (is_zero_aligned) state else @ptrCast(state),
.yield_with_context = struct {
fn invoke_concrete(context: *anyopaque, inputs: InputTuple) Output {
if (is_zero_aligned) {
return @call(.auto, invoke, .{@as(*State, @ptrCast(context))} ++ inputs);
}
return switch (@typeInfo(@typeInfo(@TypeOf(invoke)).Fn.return_type.?)) {
.ErrorUnion => try @call(.auto, invoke, .{@as(*State, @ptrCast(@alignCast(context)))} ++ inputs),
else => @call(.auto, invoke, .{@as(*State, @ptrCast(@alignCast(context)))} ++ inputs),
};
}
}.invoke_concrete,
};
}
pub fn bind_fn(comptime invoke: anytype) Self {
return .{
.context = undefined,
.yield_with_context = struct {
fn invoke_concrete(_: *const anyopaque, inputs: InputTuple) Output {
return @call(.auto, invoke, inputs);
}
}.invoke_concrete,
};
}
pub fn yield(self: *const Self, inputs: InputTuple) Output {
return self.yield_with_context(self.context, inputs);
}
};
}
pub const NullWritable = struct {
written: usize = 0,
pub fn writer(self: *NullWritable) Writer {
return Writer.bind(NullWritable, self, write);
}
pub fn write(self: *NullWritable, buffer: []const u8) !usize {
self.written += buffer.len;
return buffer.len;
}
};
pub const PrintError = Error || error {
IncompleteWrite,
};
pub const Reader = Generator(Error!usize, &.{[]coral.Byte});
pub const Writer = Generator(Error!usize, &.{[]const coral.Byte});
const native_endian = builtin.cpu.arch.endian();
pub fn alloc_read(input: coral.io.Reader, allocator: std.mem.Allocator) []coral.Byte {
const buffer = coral.Stack(coral.Byte){.allocator = allocator};
errdefer buffer.deinit();
const streamed = try stream_all(input.reader(), buffer.writer());
return buffer.to_allocation(streamed);
}
pub fn are_equal(a: []const Byte, b: []const Byte) bool {
if (a.len != b.len) {
return false;
}
for (0 .. a.len) |i| {
if (a[i] != b[i]) {
return false;
}
}
return true;
}
pub const bits_per_byte = 8;
pub fn bytes_of(value: anytype) []const Byte {
const pointer_info = @typeInfo(@TypeOf(value)).Pointer;
return switch (pointer_info.size) {
.One => @as([*]const Byte, @ptrCast(value))[0 .. @sizeOf(pointer_info.child)],
.Slice => @as([*]const Byte, @ptrCast(value.ptr))[0 .. @sizeOf(pointer_info.child) * value.len],
else => @compileError("`value` must be single-element pointer or slice type"),
};
}
pub fn ends_with(haystack: []const Byte, needle: []const Byte) bool {
if (needle.len > haystack.len) {
return false;
}
return are_equal(haystack[haystack.len - needle.len ..], needle);
}
pub fn print(writer: Writer, utf8: []const u8) PrintError!void {
if (try writer.yield(.{utf8}) != utf8.len) {
return error.IncompleteWrite;
}
}
pub fn skip_n(input: Reader, distance: u64) Error!void {
var buffer = @as([512]coral.Byte, undefined);
var remaining = distance;
while (remaining != 0) {
const read = try input.yield(.{buffer[0 .. @min(remaining, buffer.len)]});
if (read == 0) {
return error.UnavailableResource;
}
remaining -= read;
}
}
pub fn read_foreign(input: Reader, comptime Type: type) Error!Type {
const decoded = try read_native(input, Type);
return switch (@typeInfo(input)) {
.Struct => std.mem.byteSwapAllFields(Type, &decoded),
else => @byteSwap(decoded),
};
}
pub fn read_native(input: Reader, comptime Type: type) Error!Type {
var buffer = @as([@sizeOf(Type)]coral.Byte, undefined);
if (try input.yield(.{&buffer}) != buffer.len) {
return error.UnavailableResource;
}
return @as(*align(1) const Type, @ptrCast(&buffer)).*;
}
pub const read_little = switch (native_endian) {
.little => read_native,
.big => read_foreign,
};
pub const read_big = switch (native_endian) {
.little => read_foreign,
.big => read_native,
};
pub fn slice_sentineled(comptime sen: anytype, ptr: [*:sen]const @TypeOf(sen)) [:sen]const @TypeOf(sen) {
var len = @as(usize, 0);
while (ptr[len] != sen) {
len += 1;
}
return ptr[0 .. len:sen];
}
pub fn stream_all(input: Reader, output: Writer) Error!usize {
var buffer = @as([512]coral.Byte, undefined);
var copied = @as(usize, 0);
while (true) {
const read = try input.apply(.{&buffer});
if (read == 0) {
return copied;
}
if (try output.apply(.{buffer[0 .. read]}) != read) {
return error.UnavailableResource;
}
copied += read;
}
}
pub fn stream_n(input: Reader, output: Writer, limit: usize) Error!usize {
var buffer = @as([512]coral.Byte, undefined);
var remaining = limit;
while (true) {
const read = try input.yield(.{buffer[0 .. @min(remaining, buffer.len)]});
if (read == 0) {
return limit - remaining;
}
if (try output.yield(.{buffer[0 .. read]}) != read) {
return error.UnavailableResource;
}
remaining -= read;
}
}

289
src/coral/map.zig Normal file
View File

@ -0,0 +1,289 @@
const coral = @import("./coral.zig");
const hash = @import("./hash.zig");
const io = @import("./io.zig");
const std = @import("std");
pub fn Hashed(comptime Key: type, comptime Value: type, comptime traits: Traits(Key)) type {
const load_max = 0.75;
const max_int = std.math.maxInt(usize);
return struct {
allocator: std.mem.Allocator,
entry_map: []?Entry = &.{},
len: usize = 0,
pub const Entry = struct {
key: Key,
value: Value,
fn write_into(self: Entry, table: *Self) bool {
const hash_max = @min(max_int, table.entry_map.len);
var hashed_key = traits.hash(self.key) % hash_max;
var iterations = @as(usize, 0);
while (true) : (iterations += 1) {
std.debug.assert(iterations < table.entry_map.len);
const table_entry = &(table.entry_map[hashed_key] orelse {
table.entry_map[hashed_key] = .{
.key = self.key,
.value = self.value,
};
table.len += 1;
return true;
});
if (traits.are_equal(table_entry.key, self.key)) {
return false;
}
hashed_key = (hashed_key +% 1) % hash_max;
}
}
};
pub const Entries = struct {
table: *const Self,
iterations: usize,
pub fn next(self: *Entries) ?*Entry {
while (self.iterations < self.table.entry_map.len) {
defer self.iterations += 1;
if (self.table.entry_map[self.iterations]) |*entry| {
return entry;
}
}
return null;
}
};
const Self = @This();
pub fn entries(self: *const Self) Entries {
return .{
.table = self,
.iterations = 0,
};
}
pub fn remove(self: *Self, key: Key) ?Entry {
const hash_max = @min(max_int, self.entry_map.len);
var hashed_key = key.hash() % hash_max;
while (true) {
const entry = &(self.entry_map[hashed_key] orelse continue);
if (self.keys_equal(entry.key, key)) {
const original_entry = entry.*;
self.entry_map[hashed_key] = null;
return original_entry;
}
hashed_key = (hashed_key +% 1) % hash_max;
}
}
pub fn replace(self: *Self, key: Key, value: Value) std.mem.Allocator.Error!?Entry {
try self.rehash(load_max);
std.debug.assert(self.entry_map.len > self.len);
{
const hash_max = @min(max_int, self.entry_map.len);
var hashed_key = traits.hash(key) % hash_max;
while (true) {
const entry = &(self.entry_map[hashed_key] orelse {
self.entry_map[hashed_key] = .{
.key = key,
.value = value,
};
self.len += 1;
return null;
});
if (traits.are_equal(key, entry.key)) {
const original_entry = entry.*;
entry.* = .{
.key = key,
.value = value,
};
return original_entry;
}
hashed_key = (hashed_key +% 1) % hash_max;
}
}
}
pub fn clear(self: *Self) void {
for (self.entry_map) |*entry| {
entry.* = null;
}
self.len = 0;
}
pub fn deinit(self: *Self) void {
if (self.entry_map.len == 0) {
return;
}
self.allocator.free(self.entry_map);
self.* = undefined;
}
pub fn get_ptr(self: Self, key: Key) ?*Value {
if (self.len == 0) {
return null;
}
const hash_max = @min(max_int, self.entry_map.len);
var hashed_key = traits.hash(key) % hash_max;
var iterations = @as(usize, 0);
while (iterations < self.len) : (iterations += 1) {
const entry = &(self.entry_map[hashed_key] orelse return null);
if (traits.are_equal(entry.key, key)) {
return &entry.value;
}
hashed_key = (hashed_key +% 1) % hash_max;
}
return null;
}
pub fn get(self: Self, key: Key) ?Value {
if (self.get_ptr(key)) |value| {
return value.*;
}
return null;
}
pub fn emplace(self: *Self, key: Key, value: Value) std.mem.Allocator.Error!bool {
try self.rehash(load_max);
std.debug.assert(self.entry_map.len > self.len);
const entry = Entry{
.key = key,
.value = value,
};
return entry.write_into(self);
}
pub fn load_factor(self: Self) f32 {
return if (self.entry_map.len == 0) 1 else @as(f32, @floatFromInt(self.len)) / @as(f32, @floatFromInt(self.entry_map.len));
}
pub fn rehash(self: *Self, max_load: f32) std.mem.Allocator.Error!void {
if (self.load_factor() <= max_load) {
return;
}
var table = Self{
.allocator = self.allocator,
};
errdefer table.deinit();
table.entry_map = allocate: {
const min_len = @max(1, self.len);
const table_size = min_len * 2;
const zeroed_entry_map = try self.allocator.alloc(?Entry, table_size);
errdefer self.allocator.free(zeroed_entry_map);
for (zeroed_entry_map) |*entry| {
entry.* = null;
}
break: allocate zeroed_entry_map;
};
for (self.entry_map) |maybe_entry| {
if (maybe_entry) |entry| {
std.debug.assert(entry.write_into(&table));
}
}
self.deinit();
self.* = table;
}
};
}
pub fn Traits(comptime Key: type) type {
return struct {
are_equal: fn (Key, Key) bool,
hash: fn (Key) usize,
};
}
pub fn enum_traits(comptime Enum: type) Traits(Enum) {
const enums = struct {
fn are_equal(a: Enum, b: Enum) bool {
return a == b;
}
fn hash(value: Enum) usize {
return @intFromEnum(value) % std.math.maxInt(usize);
}
};
return .{
.are_equal = enums.are_equal,
.hash = enums.hash,
};
}
pub const string_traits = init: {
const djb2 = hash.djb2;
const strings = struct {
fn hash(value: []const u8) usize {
return djb2(@typeInfo(usize).Int, value);
}
};
break: init Traits([]const u8){
.are_equal = coral.io.are_equal,
.hash = strings.hash,
};
};
pub const usize_traits = init: {
const usizes = struct {
fn are_equal(a: usize, b: usize) bool {
return a == b;
}
fn hash(value: usize) usize {
return value;
}
};
break: init Traits(usize){
.are_equal = usizes.are_equal,
.hash = usizes.hash,
};
};

76
src/coral/resource.zig Normal file
View File

@ -0,0 +1,76 @@
const std = @import("std");
const heap = @import("./heap.zig");
const map = @import("./map.zig");
pub const Table = struct {
arena: std.heap.ArenaAllocator,
table: map.Hashed(TypeID, Entry, map.enum_traits(TypeID)),
const Entry = struct {
ptr: *anyopaque,
};
pub fn deinit(self: *Table) void {
self.table.deinit();
self.arena.deinit();
self.* = undefined;
}
pub fn get(self: Table, comptime Resource: type) ?*Resource {
if (self.table.get_ptr(type_id(Resource))) |entry| {
return @ptrCast(@alignCast(entry.ptr));
}
return null;
}
pub fn init() Table {
return .{
.arena = std.heap.ArenaAllocator.init(heap.allocator),
.table = .{.allocator = heap.allocator},
};
}
pub fn set_get(self: *Table, value: anytype) std.mem.Allocator.Error!*@TypeOf(value) {
try self.set(value);
return self.get(@TypeOf(value)).?;
}
pub fn set(self: *Table, value: anytype) std.mem.Allocator.Error!void {
const Value = @TypeOf(value);
const value_id = type_id(Value);
if (self.table.get_ptr(value_id)) |entry| {
@as(*Value, @ptrCast(@alignCast(entry.ptr))).* = value;
} else {
const resource_allocator = self.arena.allocator();
const allocated_resource = try resource_allocator.create(Value);
errdefer resource_allocator.destroy(allocated_resource);
std.debug.assert(try self.table.emplace(value_id, .{
.ptr = allocated_resource,
}));
allocated_resource.* = value;
}
}
};
pub const TypeID = enum (usize) { _ };
pub fn type_id(comptime T: type) TypeID {
const TypeHandle = struct {
comptime {
_ = T;
}
var byte: u8 = 0;
};
return @enumFromInt(@intFromPtr(&TypeHandle.byte));
}

39
src/coral/scalars.zig Normal file
View File

@ -0,0 +1,39 @@
const std = @import("std");
pub fn add(a: anytype, b: anytype) ?@TypeOf(a + b) {
const result, const overflow = @addWithOverflow(a, b);
return if (overflow == 0) result else null;
}
pub fn fractional(value: anytype, fraction: anytype) ?@TypeOf(value) {
if (fraction < 0 or fraction > 1) {
return null;
}
const Fraction = @TypeOf(fraction);
return switch (@typeInfo(Fraction)) {
.Float => @intFromFloat(@as(Fraction, @floatFromInt(value)) * fraction),
else => @compileError("`fraction` expected float type, not " ++ @typeName(Fraction)),
};
}
pub fn lerp_angle(origin_angle: anytype, target_angle: anytype, weight: anytype) @TypeOf(origin_angle, target_angle, weight) {
const angle_difference = @mod(target_angle - origin_angle, std.math.tau);
const distance = @mod(2.0 * angle_difference, std.math.tau) - angle_difference;
return origin_angle + distance * weight;
}
pub fn mul(a: anytype, b: anytype) ?@TypeOf(a * b) {
const result, const overflow = @mulWithOverflow(a, b);
return if (overflow == 0) result else null;
}
pub fn sub(a: anytype, b: anytype) ?@TypeOf(a - b) {
const result, const overflow = @subWithOverflow(a, b);
return if (overflow == 0) result else null;
}

170
src/coral/slices.zig Normal file
View File

@ -0,0 +1,170 @@
const io = @import("./io.zig");
const std = @import("std");
pub fn ElementPtr(comptime Slice: type) type {
const pointer_info = @typeInfo(Slice).Pointer;
return @Type(.{
.Pointer = .{
.size = .One,
.is_const = pointer_info.is_const,
.is_volatile = pointer_info.is_volatile,
.alignment = pointer_info.alignment,
.address_space = pointer_info.address_space,
.child = pointer_info.child,
.is_allowzero = false,
.sentinel = null,
},
});
}
pub fn Parallel(comptime Type: type) type {
const fields = @typeInfo(Type).Struct.fields;
const alignment = @alignOf(Type);
return struct {
len: usize = 0,
ptrs: [fields.len][*]align (alignment) io.Byte = undefined,
pub fn Element(comptime field: Field) type {
return fields[@intFromEnum(field)].type;
}
pub const Field = std.meta.FieldEnum(Type);
const Self = @This();
const all_fields = std.enums.values(Field);
pub fn ptr(self: Self, comptime field: Field) [*]align (alignment) Element(field) {
return @as([*]align (alignment) Element(field), @ptrCast(self.ptrs[@intFromEnum(field)]));
}
pub fn slice(self: Self, comptime field: Field) []align (alignment) Element(field) {
return self.ptr(field)[0 .. self.len];
}
pub fn slice_all(self: Self, off: usize, len: usize) ?Self {
if (len > self.len or off > len) {
return null;
}
var sliced = Self{.len = len};
inline for (0 .. fields.len) |i| {
sliced.ptrs[i] = @ptrFromInt(@intFromPtr(self.ptrs[i]) + (@sizeOf(Element(all_fields[i])) * off));
}
return sliced;
}
pub fn get(self: Self, comptime field: Field, index: usize) ?Element(field) {
if (index >= self.len) {
return null;
}
return self.ptr(field)[index];
}
pub fn get_ptr(self: Self, comptime field: Field, index: usize) ?*Element(field) {
if (index >= self.len) {
return null;
}
return &self.ptr(field)[index];
}
pub fn set(self: Self, comptime field: Field, index: usize, value: Element(field)) bool {
if (index >= self.len) {
return false;
}
self.slice(field)[index] = value;
return true;
}
pub fn set_all(self: Self, index: usize, value: Type) bool {
if (index >= self.len) {
return false;
}
inline for (0 .. fields.len) |i| {
self.slice(all_fields[i])[index] = @field(value, fields[i].name);
}
return true;
}
};
}
pub fn get(slice: anytype, index: usize) ?@typeInfo(@TypeOf(slice)).Pointer.child {
if (index >= slice.len) {
return null;
}
return slice[index];
}
pub fn get_ptr(slice: anytype, index: usize) ?ElementPtr(@TypeOf(slice)) {
if (index >= slice.len) {
return null;
}
return &slice[index];
}
pub fn index_of(haystack: anytype, offset: usize, needle: std.meta.Child(@TypeOf(haystack))) ?usize {
for (offset .. haystack.len) |i| {
if (haystack[i] == needle) {
return i;
}
}
return null;
}
pub fn index_of_any(haystack: anytype, offset: usize, needle: []const std.meta.Child(@TypeOf(haystack))) ?usize {
return std.mem.indexOfAnyPos(std.meta.Child(@TypeOf(haystack)), haystack, offset, needle);
}
pub fn index_of_seq(haystack: anytype, offset: usize, needle: []const std.meta.Child(@TypeOf(haystack))) ?usize {
return std.mem.indexOfPos(std.meta.Child(@TypeOf(haystack)), haystack, offset, needle);
}
pub fn parallel_alloc(comptime Element: type, allocator: std.mem.Allocator, n: usize) std.mem.Allocator.Error!Parallel(Element) {
const alignment = @alignOf(Element);
const Slices = Parallel(Element);
var buffers = @as([std.enums.values(Slices.Field).len][]align (alignment) io.Byte, undefined);
var buffers_allocated = @as(usize, 0);
var allocated = Slices{.len = n};
errdefer {
for (0 .. buffers_allocated) |i| {
allocator.free(buffers[i]);
}
}
const fields = @typeInfo(Element).Struct.fields;
inline for (0 .. fields.len) |i| {
buffers[i] = try allocator.alignedAlloc(io.Byte, alignment, @sizeOf(fields[i].type) * n);
buffers_allocated += 1;
allocated.ptrs[i] = buffers[i].ptr;
}
return allocated;
}
pub fn parallel_copy(comptime Element: type, target: Parallel(Element), origin: Parallel(Element)) void {
inline for (comptime std.enums.values(Parallel(Element).Field)) |field| {
@memcpy(target.slice(field), origin.slice(field));
}
}
pub fn parallel_free(comptime Element: type, allocator: std.mem.Allocator, buffers: Parallel(Element)) void {
inline for (comptime std.enums.values(Parallel(Element).Field)) |field| {
allocator.free(buffers.slice(field));
}
}

23
src/coral/slots.zig Normal file
View File

@ -0,0 +1,23 @@
const slices = @import("./slices.zig");
const std = @import("std");
pub fn Parallel(comptime Value: type) type {
const Slices = slices.Parallel(Value);
const alignment = @alignOf(Value);
return struct {
allocator: std.mem.Allocator,
slices: slices.Parallel(Value) = .{},
const Self = @This();
pub fn len(self: Self) usize {
return self.slices.len;
}
pub fn values(self: *Self, comptime field: Slices.Field) []align (alignment) Slices.Element(field) {
return self.slices.slice(field);
}
};
}

252
src/coral/stack.zig Normal file
View File

@ -0,0 +1,252 @@
const io = @import("./io.zig");
const scalars = @import("./scalars.zig");
const slices = @import("./slices.zig");
const std = @import("std");
pub fn Sequential(comptime Value: type) type {
return struct {
allocator: std.mem.Allocator,
values: []Value = &.{},
cap: usize = 0,
const Self = @This();
pub fn clear(self: *Self) void {
self.values = self.values[0 .. 0];
}
pub fn deinit(self: *Self) void {
if (self.cap == 0) {
return;
}
self.allocator.free(self.values.ptr[0 .. self.cap]);
self.* = undefined;
}
pub fn grow(self: *Self, additional: usize) std.mem.Allocator.Error!void {
const grown_capacity = self.cap + additional;
const buffer = try self.allocator.alloc(Value, grown_capacity);
errdefer self.allocator.deallocate(buffer);
if (self.cap != 0) {
@memcpy(buffer[0 .. self.values.len], self.values);
self.allocator.free(self.values.ptr[0 .. self.cap]);
}
self.values = @as([*]Value, @ptrCast(@alignCast(buffer)))[0 .. self.values.len];
self.cap = grown_capacity;
}
pub fn is_empty(self: Self) bool {
return self.values.len == 0;
}
pub fn get(self: Self) ?Value {
if (self.get_ptr()) |value| {
return value.*;
}
return null;
}
pub fn get_ptr(self: Self) ?*Value {
if (self.values.len == 0) {
return null;
}
return &self.values[self.values.len - 1];
}
pub fn len(self: Self) usize {
return self.values.len;
}
pub fn pop(self: *Self) bool {
if (self.values.len == 0) {
return false;
}
self.values = self.values[0 .. self.values.len - 1];
return true;
}
pub fn pop_many(self: *Self, n: usize) bool {
const new_length = scalars.sub(self.values.len, n) orelse {
return false;
};
self.values = self.values[0 .. new_length];
return true;
}
pub fn push(self: *Self, value: Value) std.mem.Allocator.Error!void {
if (self.values.len == self.cap) {
try self.grow(@max(1, self.cap));
}
const offset_index = self.values.len;
self.values = self.values.ptr[0 .. self.values.len + 1];
self.values[offset_index] = value;
}
pub fn push_all(self: *Self, values: []const Value) std.mem.Allocator.Error!void {
const new_length = self.values.len + values.len;
if (new_length > self.cap) {
try self.grow(new_length);
}
const offset_index = self.values.len;
self.values = self.values.ptr[0 .. new_length];
for (0 .. values.len) |index| {
self.values[offset_index + index] = values[index];
}
}
pub fn push_many(self: *Self, n: usize, value: Value) std.mem.Allocator.Error!void {
const new_length = self.values.len + n;
if (new_length > self.cap) {
try self.grow(new_length);
}
const offset_index = self.values.len;
self.values = self.values.ptr[0 .. new_length];
for (0 .. n) |index| {
self.values[offset_index + index] = value;
}
}
pub fn resize(self: *Self, size: usize, default_value: Value) std.mem.Allocator.Error!void {
if (self.cap == size) {
return;
}
const values = try self.allocator.alloc(Value, size);
for (0 .. @min(values.len, self.values.len)) |i| {
values[i] = self.values[i];
}
if (values.len > self.values.len) {
for (self.values.len .. values.len) |i| {
values[i] = default_value;
}
}
self.values = values[0 .. values.len];
self.cap = values.len;
}
pub fn to_allocation(self: *Self, size: usize, default_value: Value) std.mem.Allocator.Error![]Value {
defer {
self.values = &.{};
self.cap = 0;
}
const allocation = try self.allocator.realloc(self.values.ptr[0 .. self.cap], size);
for (allocation[@min(self.values.len, size) .. size]) |*value| {
value.* = default_value;
}
return allocation;
}
pub const writer = switch (Value) {
io.Byte => struct {
fn writer(self: *Self) io.Writer {
return io.Writer.bind(Self, self, write);
}
fn write(self: *Self, buffer: []const io.Byte) io.Error!usize {
self.push_all(buffer) catch return error.UnavailableResource;
return buffer.len;
}
}.writer,
else => @compileError("only `Stack(Byte)` has a `reader()` method"),
};
};
}
pub fn Parallel(comptime Value: type) type {
const Slices = slices.Parallel(Value);
const alignment = @alignOf(Value);
return struct {
allocator: std.mem.Allocator,
values: Slices = .{},
cap: usize = 0,
pub const Field = std.meta.FieldEnum(Value);
const Self = @This();
pub fn clear(self: *Self) void {
self.values = self.values.slice_all(0, 0);
}
pub fn deinit(self: *Self) void {
var capacity_slice = self.values;
capacity_slice.len = self.cap;
slices.parallel_free(Value, self.allocator, capacity_slice);
self.* = undefined;
}
pub fn get_ptr(self: Self, comptime field: Slices.Field) ?*align (alignment) Slices.Element(field) {
if (self.len() == 0) {
return null;
}
return &self.slices.field_slice(field)[self.len() - 1];
}
pub fn grow(self: *Self, additional: usize) std.mem.Allocator.Error!void {
const grown_capacity = self.cap + additional;
const buffer = try slices.parallel_alloc(Value, self.allocator, grown_capacity);
if (self.cap != 0) {
slices.parallel_copy(Value, buffer.slice_all(0, self.values.len).?, self.values);
slices.parallel_free(Value, self.allocator, self.values.slice_all(0, self.cap).?);
}
self.cap = grown_capacity;
self.values = buffer.slice_all(0, self.values.len).?;
}
pub fn len(self: Self) usize {
return self.values.len;
}
pub fn push(self: *Self, value: Value) std.mem.Allocator.Error!void {
if (self.len() == self.cap) {
try self.grow(@max(1, self.cap));
}
const tail_index = self.values.len;
self.values.len += 1;
std.debug.assert(self.values.set_all(tail_index, value));
}
};
}

497
src/coral/system.zig Normal file
View File

@ -0,0 +1,497 @@
const dag = @import("./dag.zig");
const heap = @import("./heap.zig");
const map = @import("./map.zig");
const resource = @import("./resource.zig");
const slices = @import("./slices.zig");
const stack = @import("./stack.zig");
const std = @import("std");
const World = @import("./World.zig");
pub const BindContext = struct {
node: dag.Node,
systems: *Schedule,
world: *World,
pub const ResourceAccess = std.meta.Tag(Schedule.ResourceAccess);
pub fn accesses_resource(self: BindContext, access: ResourceAccess, id: resource.TypeID) bool {
const resource_accesses = &self.systems.graph.get_ptr(self.node).?.resource_accesses;
for (resource_accesses.values) |resource_access| {
switch (resource_access) {
.read_only => |resource_id| {
if (access == .read_only and resource_id == id) {
return true;
}
},
.read_write => |resource_id| {
if (access == .read_write and resource_id == id) {
return true;
}
},
}
}
return false;
}
pub fn register_read_write_resource_access(self: BindContext, thread_restriction: World.ThreadRestriction, comptime Resource: type) std.mem.Allocator.Error!?*Resource {
const value = self.world.get_resource(thread_restriction, Resource) orelse {
return null;
};
const id = resource.type_id(Resource);
if (!self.accesses_resource(.read_write, id)) {
try self.systems.graph.get_ptr(self.node).?.resource_accesses.push(.{.read_write = id});
}
const read_write_resource_nodes = lazily_create: {
break: lazily_create self.systems.read_write_resource_id_nodes.get_ptr(id) orelse insert: {
std.debug.assert(try self.systems.read_write_resource_id_nodes.emplace(id, .{
.allocator = heap.allocator,
}));
break: insert self.systems.read_write_resource_id_nodes.get_ptr(id).?;
};
};
if (slices.index_of(read_write_resource_nodes.values, 0, self.node) == null) {
try read_write_resource_nodes.push(self.node);
}
return value;
}
pub fn register_read_only_resource_access(self: BindContext, thread_restriction: World.ThreadRestriction, comptime Resource: type) std.mem.Allocator.Error!?*const Resource {
const value = self.world.get_resource(thread_restriction, Resource) orelse {
return null;
};
const id = resource.type_id(Resource);
if (!self.accesses_resource(.read_only, id)) {
try self.systems.graph.get_ptr(self.node).?.resource_accesses.push(.{.read_only = id});
}
const read_only_resource_nodes = lazily_create: {
break: lazily_create self.systems.read_only_resource_id_nodes.get_ptr(id) orelse insert: {
std.debug.assert(try self.systems.read_only_resource_id_nodes.emplace(id, .{
.allocator = heap.allocator,
}));
break: insert self.systems.read_only_resource_id_nodes.get_ptr(id).?;
};
};
if (slices.index_of(read_only_resource_nodes.values, 0, self.node) == null) {
try read_only_resource_nodes.push(self.node);
}
return value;
}
};
pub const Info = struct {
execute: *const fn ([]const *const Parameter, *const [max_parameters]?*anyopaque) anyerror!void,
parameters: [max_parameters]*const Parameter = undefined,
parameter_count: u4 = 0,
thread_restriction: World.ThreadRestriction = .none,
pub const Parameter = struct {
thread_restriction: World.ThreadRestriction,
init: *const fn (*anyopaque, ?*anyopaque) void,
bind: *const fn (std.mem.Allocator, BindContext) std.mem.Allocator.Error!?*anyopaque,
unbind: *const fn (std.mem.Allocator, ?*anyopaque) void,
};
pub fn used_parameters(self: *const Info) []const *const Parameter {
return self.parameters[0 .. self.parameter_count];
}
};
pub const Order = struct {
label: []const u8 = "",
run_after: []const *const Info = &.{},
run_before: []const *const Info = &.{},
};
pub const Schedule = struct {
label: [:0]const u8,
graph: Graph,
arena: std.heap.ArenaAllocator,
system_id_nodes: map.Hashed(usize, NodeBundle, map.usize_traits),
read_write_resource_id_nodes: ResourceNodeBundle,
read_only_resource_id_nodes: ResourceNodeBundle,
parallel_work_bundles: ParallelNodeBundles,
blocking_work: NodeBundle,
const Dependency = struct {
kind: Kind,
id: usize,
const Kind = enum {
after,
before,
};
};
const Graph = dag.Graph(struct {
info: *const Info,
label: [:0]u8,
dependencies: []Dependency,
parameter_states: [max_parameters]?*anyopaque = [_]?*anyopaque{null} ** max_parameters,
resource_accesses: stack.Sequential(ResourceAccess),
});
const NodeBundle = stack.Sequential(dag.Node);
const ParallelNodeBundles = stack.Sequential(NodeBundle);
const ResourceAccess = union (enum) {
read_only: resource.TypeID,
read_write: resource.TypeID,
};
const ResourceNodeBundle = map.Hashed(resource.TypeID, NodeBundle, map.enum_traits(resource.TypeID));
pub fn deinit(self: *Schedule) void {
{
var nodes = self.system_id_nodes.entries();
while (nodes.next()) |node| {
node.value.deinit();
}
}
{
var nodes = self.read_write_resource_id_nodes.entries();
while (nodes.next()) |node| {
node.value.deinit();
}
}
{
var nodes = self.read_only_resource_id_nodes.entries();
while (nodes.next()) |node| {
node.value.deinit();
}
}
var nodes = self.graph.nodes();
while (nodes.next()) |node| {
const system = self.graph.get_ptr(node).?;
for (system.info.used_parameters(), system.parameter_states[0 .. system.info.parameter_count]) |parameter, state| {
parameter.unbind(self.arena.allocator(), state);
}
system.resource_accesses.deinit();
heap.allocator.free(system.dependencies);
heap.allocator.free(system.label);
}
for (self.parallel_work_bundles.values) |*bundle| {
bundle.deinit();
}
self.parallel_work_bundles.deinit();
self.blocking_work.deinit();
self.graph.deinit();
self.system_id_nodes.deinit();
self.read_write_resource_id_nodes.deinit();
self.read_only_resource_id_nodes.deinit();
self.arena.deinit();
}
pub fn run(self: *Schedule, world: *World) anyerror!void {
if (self.is_invalidated()) {
const work = struct {
fn regenerate_graph(schedule: *Schedule) !void {
schedule.graph.clear_edges();
var nodes = schedule.graph.nodes();
while (nodes.next()) |node| {
const system = schedule.graph.get_ptr(node).?;
for (system.dependencies) |order| {
const dependencies = schedule.system_id_nodes.get(@intFromPtr(system.info)) orelse {
@panic("unable to resolve missing explicit system dependency");
};
if (dependencies.is_empty()) {
@panic("unable to resolve missing explicit system dependency");
}
switch (order.kind) {
.after => {
for (dependencies.values) |dependency_node| {
std.debug.assert(try schedule.graph.insert_edge(node, dependency_node));
}
},
.before => {
for (dependencies.values) |dependency_node| {
std.debug.assert(try schedule.graph.insert_edge(dependency_node, node));
}
},
}
}
for (system.resource_accesses.values) |resource_access| {
switch (resource_access) {
.read_write => |resource_id| {
const read_write_dependencies = schedule.read_write_resource_id_nodes.get(resource_id) orelse {
@panic("unable to resolve missing implicit read-write parameter dependency");
};
for (read_write_dependencies.values) |dependency_node| {
std.debug.assert(try schedule.graph.insert_edge(node, dependency_node));
}
if (schedule.read_only_resource_id_nodes.get(resource_id)) |dependencies| {
for (dependencies.values) |dependency_node| {
std.debug.assert(try schedule.graph.insert_edge(node, dependency_node));
}
}
},
.read_only => |resource_id| {
if (schedule.read_only_resource_id_nodes.get(resource_id)) |dependencies| {
for (dependencies.values) |dependency_node| {
std.debug.assert(try schedule.graph.insert_edge(node, dependency_node));
}
}
},
}
}
}
}
fn populate_bundle(bundle: *NodeBundle, graph: *Graph, node: dag.Node) !void {
std.debug.assert(graph.mark_visited(node));
for (graph.edge_nodes(node).?) |edge| {
if (graph.visited(edge).?) {
continue;
}
try populate_bundle(bundle, graph, edge);
}
try bundle.push(node);
}
fn sort(schedule: *Schedule) !void {
defer schedule.graph.reset_visited();
var nodes = schedule.graph.nodes();
while (nodes.next()) |node| {
if (schedule.graph.visited(node).?) {
continue;
}
try schedule.parallel_work_bundles.push(.{.allocator = heap.allocator});
const bundle = schedule.parallel_work_bundles.get_ptr().?;
errdefer {
bundle.deinit();
std.debug.assert(schedule.parallel_work_bundles.pop());
}
try populate_bundle(bundle, &schedule.graph, node);
}
for (schedule.parallel_work_bundles.values) |*work| {
var index = @as(usize, 0);
while (index < work.len()) : (index += 1) {
const node = work.values[index];
switch (schedule.graph.get_ptr(node).?.info.thread_restriction) {
.none => continue,
.main => {
const extracted_work = work.values[index ..];
try schedule.blocking_work.push_all(extracted_work);
std.debug.assert(work.pop_many(extracted_work.len));
},
}
}
}
}
};
try work.regenerate_graph(self);
try work.sort(self);
}
// TODO: Refactor so the thread pool is a global resource rather than owned per-world.
if (world.thread_pool) |thread_pool| {
const parallel = struct {
fn run(work_group: *std.Thread.WaitGroup, graph: Graph, bundle: NodeBundle) void {
defer work_group.finish();
for (bundle.values) |node| {
const system = graph.get_ptr(node).?;
// TODO: std lib thread pool sucks for many reasons and this is one of them.
system.info.execute(system.info.used_parameters(), &system.parameter_states) catch unreachable;
}
}
};
var work_group = std.Thread.WaitGroup{};
for (self.parallel_work_bundles.values) |bundle| {
work_group.start();
try thread_pool.spawn(parallel.run, .{&work_group, self.graph, bundle});
}
thread_pool.waitAndWork(&work_group);
} else {
for (self.parallel_work_bundles.values) |bundle| {
for (bundle.values) |node| {
const system = self.graph.get_ptr(node).?;
try system.info.execute(system.info.used_parameters(), &system.parameter_states);
}
}
}
for (self.blocking_work.values) |node| {
const system = self.graph.get_ptr(node).?;
try system.info.execute(system.info.used_parameters(), &system.parameter_states);
}
}
pub fn init(label: []const u8) std.mem.Allocator.Error!Schedule {
var arena = std.heap.ArenaAllocator.init(heap.allocator);
errdefer arena.deinit();
const duped_label = try arena.allocator().dupeZ(u8, label);
return .{
.graph = Graph.init(heap.allocator),
.label = duped_label,
.arena = arena,
.system_id_nodes = .{.allocator = heap.allocator},
.read_write_resource_id_nodes = .{.allocator = heap.allocator},
.read_only_resource_id_nodes = .{.allocator = heap.allocator},
.parallel_work_bundles = .{.allocator = heap.allocator},
.blocking_work = .{.allocator = heap.allocator},
};
}
pub fn invalidate_work(self: *Schedule) void {
self.blocking_work.clear();
for (self.parallel_work_bundles.values) |*bundle| {
bundle.deinit();
}
self.parallel_work_bundles.clear();
}
pub fn is_invalidated(self: Schedule) bool {
return self.parallel_work_bundles.is_empty() and self.blocking_work.is_empty();
}
pub fn then(self: *Schedule, world: *World, info: *const Info, order: Order) std.mem.Allocator.Error!void {
const nodes = lazily_create: {
const system_id = @intFromPtr(info);
break: lazily_create self.system_id_nodes.get_ptr(system_id) orelse insert: {
std.debug.assert(try self.system_id_nodes.emplace(system_id, .{
.allocator = self.system_id_nodes.allocator,
}));
break: insert self.system_id_nodes.get_ptr(system_id).?;
};
};
const dependencies = init: {
const total_run_orders = order.run_after.len + order.run_before.len;
const dependencies = try heap.allocator.alloc(Dependency, total_run_orders);
var dependencies_written = @as(usize, 0);
for (order.run_after) |after_system| {
dependencies[dependencies_written] = .{
.id = @intFromPtr(after_system),
.kind = .after,
};
dependencies_written += 1;
}
for (order.run_before) |before_system| {
dependencies[dependencies_written] = .{
.id = @intFromPtr(before_system),
.kind = .before,
};
dependencies_written += 1;
}
break: init dependencies;
};
errdefer heap.allocator.free(dependencies);
const label = try heap.allocator.dupeZ(u8, if (order.label.len == 0) "anonymous system" else order.label);
errdefer heap.allocator.free(label);
const node = try self.graph.append(.{
.info = info,
.label = label,
.dependencies = dependencies,
.resource_accesses = .{.allocator = heap.allocator},
});
const system = self.graph.get_ptr(node).?;
errdefer {
for (info.used_parameters(), system.parameter_states[0 .. info.parameter_count]) |parameter, state| {
if (state) |initialized_state| {
parameter.unbind(self.arena.allocator(), initialized_state);
}
}
std.debug.assert(self.graph.remove_node(node) != null);
}
for (system.parameter_states[0 .. info.parameter_count], info.used_parameters()) |*state, parameter| {
state.* = try parameter.bind(self.arena.allocator(), .{
.world = world,
.node = node,
.systems = self,
});
}
try nodes.push(node);
self.invalidate_work();
}
};
pub const max_parameters = 16;

127
src/coral/utf8.zig Normal file
View File

@ -0,0 +1,127 @@
const ascii = @import("./ascii.zig");
const coral = @import("./coral.zig");
const debug = @import("./debug.zig");
const io = @import("./io.zig");
const std = @import("std");
pub fn alloc_formatted(allocator: std.mem.Allocator, comptime format: []const u8, args: anytype) std.mem.Allocator.Error![]coral.Byte {
var buffer = coral.Stack(coral.Byte){.allocator = allocator};
const formatted_len = count_formatted(format, args);
try buffer.grow(formatted_len);
errdefer buffer.deinit();
debug.assert_try(print_formatted(buffer.writer(), format, args));
return buffer.to_allocation(formatted_len, 0);
}
fn count_formatted(comptime format: []const u8, args: anytype) usize {
var count = io.defaultWritable{};
debug.assert_try(print_formatted(count.writer(), format, args));
return count.written;
}
pub fn print_formatted(writer: io.Writer, comptime format: []const u8, args: anytype) io.PrintError!void {
switch (@typeInfo(@TypeOf(args))) {
.Struct => |arguments_struct| {
comptime var arg_index = 0;
comptime var head = 0;
comptime var tail = 0;
inline while (tail < format.len) : (tail += 1) {
if (format[tail] == '{') {
if (tail > format.len) {
@compileError("expected an idenifier after opening `{`");
}
tail += 1;
switch (format[tail]) {
'{' => {
try io.print(writer, format[head .. (tail - 1)]);
tail += 1;
head = tail;
},
'}' => {
if (!arguments_struct.is_tuple) {
@compileError("all format specifiers must be named when using a named struct");
}
try io.print(writer, args[arg_index]);
arg_index += 1;
tail += 1;
head = tail;
},
else => {
if (arguments_struct.is_tuple) {
@compileError("format specifiers cannot be named when using a tuple struct");
}
try io.print(writer, format[head .. (tail - 1)]);
head = tail;
tail += 1;
if (tail >= format.len) {
@compileError("expected closing `}` or another `{` after opening `{`");
}
std.debug.assert(tail < format.len);
inline while (format[tail] != '}') {
tail += 1;
std.debug.assert(tail < format.len);
}
try print_formatted_value(writer, @field(args, format[head .. tail]));
tail += 1;
head = tail;
}
}
}
}
try io.print(writer, format[head .. ]);
},
else => @compileError("`arguments` must be a struct type"),
}
}
noinline fn print_formatted_value(writer: io.Writer, value: anytype) io.PrintError!void {
const Value = @TypeOf(value);
return switch (@typeInfo(Value)) {
.Int => ascii.DecimalFormat.default.print(writer, value),
.Float => ascii.DecimalFormat.default.print(writer, value),
.Enum => io.print(writer, @tagName(value)),
.Pointer => |pointer| switch (pointer.size) {
.Many, .C => ascii.HexadecimalFormat.default.print(writer, @intFromPtr(value)),
.One => if (pointer.child == []const u8) io.print(writer, *value) else ascii.HexadecimalFormat.default.print(writer, @intFromPtr(value)),
.Slice => if (pointer.child == u8) io.print(writer, value) else @compileError(unformattableMessage(Value)),
},
else => @compileError(unformattableMessage(Value)),
};
}
const root = @This();
fn unformattableMessage(comptime Value: type) []const u8 {
return "type `" ++ @typeName(Value) ++ "` is not formattable with this formatter";
}

50
src/coral/vectors.zig Normal file
View File

@ -0,0 +1,50 @@
const std = @import("std");
pub fn cross(v1: anytype, v2: anytype) @typeInfo(@TypeOf(v1, v2)).Vector.child {
const multipled = v1 * v2;
const vector_info = @typeInfo(@TypeOf(v1)).Vector;
var result = multipled[0];
comptime var index = @as(usize, 1);
inline while (index < vector_info.len) : (index += 1) {
result -= multipled[index];
}
return result;
}
pub fn distance(v1: anytype, v2: anytype) @typeInfo(@TypeOf(v1, v2)).Vector.child {
return length(v1 - v2);
}
pub fn dot(v1: anytype, v2: anytype) @typeInfo(@TypeOf(v1, v2)).Vector.child {
const multipled = v1 * v2;
const vector_info = @typeInfo(@TypeOf(v1)).Vector;
var result = multipled[0];
comptime var index = @as(usize, 1);
inline while (index < vector_info.len) : (index += 1) {
result += multipled[index];
}
return result;
}
pub fn length(v: anytype) @typeInfo(@TypeOf(v)).Vector.child {
return @sqrt(length_squared(v));
}
pub fn length_squared(v: anytype) @typeInfo(@TypeOf(v)).Vector.child {
return dot(v, v);
}
pub fn normal(v: anytype) @TypeOf(v) {
const ls = length_squared(v);
const Vector = @TypeOf(v);
if (ls > std.math.floatEps(@typeInfo(Vector).Vector.child)) {
return v / @as(Vector, @splat(@sqrt(ls)));
}
return v;
}

56
src/main.zig Normal file
View File

@ -0,0 +1,56 @@
const coral = @import("coral");
const std = @import("std");
const ona = @import("ona");
const Actors = struct {
instances: coral.stack.Sequential(ona.gfx.Queue.Instance2D) = .{.allocator = coral.heap.allocator},
move_x: ona.act.Axis = .{.keys = .{.a, .d}},
move_y: ona.act.Axis = .{.keys = .{.w, .s}},
};
pub fn main() !void {
try ona.start_app(setup, .{
.tick_rate = 60,
.execution = .{.thread_share = 0.1},
});
}
fn load(display: coral.ReadBlocking(ona.gfx.Display), actors: coral.Write(Actors)) !void {
display.res.resize(1280, 720);
try actors.res.instances.push_many(800, .{
.origin = .{75, 75},
.xbasis = .{100, 0},
.ybasis = .{0, 100},
.color = ona.gfx.color.compress(ona.gfx.color.rgb(1, 0, 0)),
});
}
fn exit(actors: coral.Write(Actors)) void {
actors.res.instances.deinit();
}
fn render(gfx: ona.gfx.Queue, actors: coral.Write(Actors)) !void {
try gfx.buffer.draw_2d(.{
.mesh_2d = gfx.primitives.quad_mesh,
.instances = actors.res.instances.values,
});
}
fn update(actors: coral.Write(Actors), mapping: coral.Read(ona.act.Mapping)) !void {
actors.res.instances.values[0].origin += .{
mapping.res.axis_strength(actors.res.move_x),
mapping.res.axis_strength(actors.res.move_y),
};
}
fn setup(world: *coral.World, events: ona.App.Events) !void {
try world.set_resource(.none, Actors{});
try world.on_event(events.load, coral.system_fn(load), .{.label = "load"});
try world.on_event(events.update, coral.system_fn(update), .{.label = "update"});
try world.on_event(events.exit, coral.system_fn(exit), .{.label = "exit"});
try world.on_event(events.render, coral.system_fn(render), .{.label = "render actors"});
}

22
src/ona/App.zig Normal file
View File

@ -0,0 +1,22 @@
const coral = @import("coral");
events: *const Events,
target_frame_time: f64,
is_running: bool,
pub const Events = struct {
load: coral.World.Event,
pre_update: coral.World.Event,
update: coral.World.Event,
post_update: coral.World.Event,
render: coral.World.Event,
finish: coral.World.Event,
exit: coral.World.Event,
};
const Self = @This();
pub fn quit(self: *Self) void {
self.is_running = false;
}

61
src/ona/act.zig Normal file
View File

@ -0,0 +1,61 @@
const App = @import("./App.zig");
const coral = @import("coral");
const gfx = @import("./gfx.zig");
const msg = @import("./msg.zig");
const std = @import("std");
pub const Axis = struct {
keys: ?[2]gfx.Input.Key = null,
};
pub const Mapping = struct {
keys_pressed: ScancodeSet = ScancodeSet.initEmpty(),
keys_held: ScancodeSet = ScancodeSet.initEmpty(),
const ScancodeSet = std.bit_set.StaticBitSet(512);
pub fn axis_strength(self: Mapping, axis: Axis) f32 {
if (axis.keys) |keys| {
const key_down, const key_up = keys;
const is_key_down_held = self.keys_held.isSet(key_down.scancode());
const is_key_up_held = self.keys_held.isSet(key_up.scancode());
if (is_key_down_held or is_key_up_held) {
return
@as(f32, @floatFromInt(@intFromBool(is_key_up_held))) -
@as(f32, @floatFromInt(@intFromBool(is_key_down_held)));
}
}
return 0;
}
};
pub fn setup(world: *coral.World, events: App.Events) std.mem.Allocator.Error!void {
try world.set_resource(.none, Mapping{});
try world.on_event(events.pre_update, coral.system_fn(update), .{
.label = "update act",
});
}
pub fn update(inputs: msg.Receive(gfx.Input), mapping: coral.Write(Mapping)) void {
mapping.res.keys_pressed = Mapping.ScancodeSet.initEmpty();
for (inputs.messages()) |message| {
switch (message) {
.key_down => |key| {
mapping.res.keys_pressed.set(key.scancode());
mapping.res.keys_held.set(key.scancode());
},
.key_up => |key| {
mapping.res.keys_held.unset(key.scancode());
},
}
}
}

View File

@ -1,3 +1,4 @@
pub usingnamespace @cImport({ pub usingnamespace @cImport({
@cInclude("SDL2/SDL.h"); @cInclude("SDL2/SDL.h");
}); });

116
src/ona/gfx.zig Normal file
View File

@ -0,0 +1,116 @@
const App = @import("./App.zig");
const Device = @import("./gfx/Device.zig");
pub const Queue = @import("./gfx/Queue.zig");
pub const color = @import("./gfx/color.zig");
const coral = @import("coral");
const ext = @import("./ext.zig");
const msg = @import("./msg.zig");
const std = @import("std");
pub const Display = struct {
sdl_window: *ext.SDL_Window,
clear_color: color.Value = color.black,
device: Device,
pub fn resize(self: Display, width: u16, height: u16) void {
ext.SDL_SetWindowSize(self.sdl_window, width, height);
}
pub fn retitle(self: Display, title: []const u8) void {
var sentineled_title = [_:0]u8{0} ** 255;
@memcpy(sentineled_title[0 .. @min(title.len, sentineled_title.len)], title);
ext.SDL_SetWindowTitle(self.sdl_window, &sentineled_title);
}
pub fn set_resizable(self: Display, resizable: bool) void {
ext.SDL_SetWindowResizable(self.sdl_window, @intFromBool(resizable));
}
};
pub const Error = error {
SDLError,
};
pub const Handle = Queue.Handle;
pub const Input = union (enum) {
key_up: Key,
key_down: Key,
pub const Key = enum (u32) {
a = ext.SDL_SCANCODE_A,
d = ext.SDL_SCANCODE_D,
s = ext.SDL_SCANCODE_S,
w = ext.SDL_SCANCODE_W,
pub fn scancode(key: Key) u32 {
return @intFromEnum(key);
}
};
};
pub const MeshPrimitives = struct {
quad_mesh: Handle,
};
pub fn poll(app: coral.Write(App), inputs: msg.Send(Input)) !void {
var event = @as(ext.SDL_Event, undefined);
while (ext.SDL_PollEvent(&event) != 0) {
switch (event.type) {
ext.SDL_QUIT => app.res.quit(),
ext.SDL_KEYUP => try inputs.push(.{.key_up = @enumFromInt(event.key.keysym.scancode)}),
ext.SDL_KEYDOWN => try inputs.push(.{.key_down = @enumFromInt(event.key.keysym.scancode)}),
else => {},
}
}
}
pub fn setup(world: *coral.World, events: App.Events) (Error || std.Thread.SpawnError || std.mem.Allocator.Error)!void {
if (ext.SDL_Init(ext.SDL_INIT_VIDEO) != 0) {
return error.SDLError;
}
const sdl_window = create: {
const position = ext.SDL_WINDOWPOS_CENTERED;
const flags = ext.SDL_WINDOW_OPENGL;
const width = 640;
const height = 480;
break: create ext.SDL_CreateWindow("Ona", position, position, width, height, flags) orelse {
return error.SDLError;
};
};
errdefer ext.SDL_DestroyWindow(sdl_window);
var device = try Device.init(sdl_window);
errdefer device.deinit();
try world.set_resource(.main, Display{
.device = device,
.sdl_window = sdl_window,
});
try world.on_event(events.pre_update, coral.system_fn(poll), .{.label = "poll gfx"});
try world.on_event(events.exit, coral.system_fn(stop), .{.label = "stop gfx"});
try world.on_event(events.finish, coral.system_fn(submit), .{.label = "submit gfx"});
}
pub fn stop(display: coral.WriteBlocking(Display)) void {
display.res.device.deinit();
ext.SDL_DestroyWindow(display.res.sdl_window);
}
pub fn submit(display: coral.WriteBlocking(Display)) void {
display.res.device.submit(display.res.sdl_window, display.res.clear_color);
}

387
src/ona/gfx/Device.zig Normal file
View File

@ -0,0 +1,387 @@
const Queue = @import("./Queue.zig");
const color = @import("./color.zig");
const coral = @import("coral");
const draw_2d = @import("./shaders/draw_2d.glsl.zig");
const ext = @import("../ext.zig");
const sokol = @import("sokol");
const std = @import("std");
thread: std.Thread,
render_state: *RenderState,
const AtomicBool = std.atomic.Value(bool);
const RenderWork = struct {
pipeline_2d: sokol.gfx.Pipeline,
instance_2d_buffers: coral.stack.Sequential(sokol.gfx.Buffer),
resources: coral.stack.Sequential(Resource),
const Resource = union (enum) {
mesh_2d: struct {
index_count: u32,
vertex_buffer: sokol.gfx.Buffer,
index_buffer: sokol.gfx.Buffer,
},
};
const buffer_indices = .{
.mesh = 0,
.instance = 1,
};
fn deinit(self: *RenderWork) void {
sokol.gfx.destroyPipeline(self.pipeline_2d);
for (self.instance_2d_buffers.values) |buffer| {
sokol.gfx.destroyBuffer(buffer);
}
self.instance_2d_buffers.deinit();
for (self.resources.values) |resource| {
switch (resource) {
.mesh_2d => |mesh_2d| {
sokol.gfx.destroyBuffer(mesh_2d.vertex_buffer);
sokol.gfx.destroyBuffer(mesh_2d.index_buffer);
},
}
}
self.resources.deinit();
self.* = undefined;
}
fn init(allocator: std.mem.Allocator) RenderWork {
return .{
.pipeline_2d = sokol.gfx.makePipeline(.{
.label = "2D drawing pipeline",
.layout = .{
.attrs = get: {
var attrs = [_]sokol.gfx.VertexAttrState{.{}} ** 16;
attrs[draw_2d.ATTR_vs_mesh_xy] = .{
.format = .FLOAT2,
.buffer_index = buffer_indices.mesh,
};
attrs[draw_2d.ATTR_vs_instance_xbasis] = .{
.format = .FLOAT2,
.buffer_index = buffer_indices.instance,
};
attrs[draw_2d.ATTR_vs_instance_ybasis] = .{
.format = .FLOAT2,
.buffer_index = buffer_indices.instance,
};
attrs[draw_2d.ATTR_vs_instance_origin] = .{
.format = .FLOAT2,
.buffer_index = buffer_indices.instance,
};
attrs[draw_2d.ATTR_vs_instance_color] = .{
.format = .UBYTE4N,
.buffer_index = buffer_indices.instance,
};
attrs[draw_2d.ATTR_vs_instance_depth] = .{
.format = .FLOAT,
.buffer_index = buffer_indices.instance,
};
break: get attrs;
},
.buffers = get: {
var buffers = [_]sokol.gfx.VertexBufferLayoutState{.{}} ** 8;
buffers[buffer_indices.instance].step_func = .PER_INSTANCE;
break: get buffers;
},
},
.shader = sokol.gfx.makeShader(draw_2d.draw2dShaderDesc(sokol.gfx.queryBackend())),
.index_type = .UINT16,
}),
.instance_2d_buffers = .{.allocator = coral.heap.allocator},
.resources = .{.allocator = allocator},
};
}
fn process_draw_2d_commands(self: *RenderWork, commands: []const Queue.Buffer.Draw2DCommand, target: Queue.Target) std.mem.Allocator.Error!void {
const max_instances = 512;
var instance_2d_buffers_used = @as(usize, 0);
sokol.gfx.applyPipeline(self.pipeline_2d);
sokol.gfx.applyUniforms(.VS, draw_2d.SLOT_Screen, sokol.gfx.asRange(&draw_2d.Screen{
.screen_size = .{target.width, target.height},
}));
for (commands) |command| {
const mesh_2d = &self.resources.values[command.mesh_2d.index().?].mesh_2d;
const instance_size = @sizeOf(Queue.Instance2D);
const full_instance_buffer_count = command.instances.len / max_instances;
for (0 .. full_instance_buffer_count) |i| {
defer instance_2d_buffers_used += 1;
if (instance_2d_buffers_used == self.instance_2d_buffers.len()) {
const instance_2d_buffer = sokol.gfx.makeBuffer(.{
.size = @sizeOf(Queue.Instance2D) * max_instances,
.usage = .STREAM,
.label = "2D drawing instance buffer",
});
errdefer sokol.gfx.destroyBuffer(instance_2d_buffer);
try self.instance_2d_buffers.push(instance_2d_buffer);
}
sokol.gfx.applyBindings(.{
.vertex_buffers = get_buffers: {
var buffers = [_]sokol.gfx.Buffer{.{}} ** 8;
buffers[buffer_indices.instance] = self.instance_2d_buffers.values[instance_2d_buffers_used];
buffers[buffer_indices.mesh] = mesh_2d.vertex_buffer;
break: get_buffers buffers;
},
.index_buffer = mesh_2d.index_buffer,
});
sokol.gfx.updateBuffer(self.instance_2d_buffers.values[instance_2d_buffers_used], .{
.ptr = command.instances.ptr + (max_instances * i),
.size = instance_size * max_instances,
});
sokol.gfx.draw(0, mesh_2d.index_count, max_instances);
}
defer instance_2d_buffers_used += 1;
if (instance_2d_buffers_used == self.instance_2d_buffers.len()) {
const instance_2d_buffer = sokol.gfx.makeBuffer(.{
.size = @sizeOf(Queue.Instance2D) * max_instances,
.usage = .STREAM,
.label = "2D drawing instance buffer",
});
errdefer sokol.gfx.destroyBuffer(instance_2d_buffer);
try self.instance_2d_buffers.push(instance_2d_buffer);
}
sokol.gfx.applyBindings(.{
.vertex_buffers = get_buffers: {
var buffers = [_]sokol.gfx.Buffer{.{}} ** 8;
buffers[buffer_indices.instance] = self.instance_2d_buffers.values[instance_2d_buffers_used];
buffers[buffer_indices.mesh] = mesh_2d.vertex_buffer;
break: get_buffers buffers;
},
.index_buffer = mesh_2d.index_buffer,
});
const remaining_instances = command.instances.len % max_instances;
sokol.gfx.updateBuffer(self.instance_2d_buffers.values[instance_2d_buffers_used], .{
.ptr = command.instances.ptr + full_instance_buffer_count,
.size = instance_size * remaining_instances,
});
sokol.gfx.draw(0, mesh_2d.index_count, @intCast(remaining_instances));
}
}
fn process_queue(self: *RenderWork, buffer: *const Queue.Buffer, target: Queue.Target) std.mem.Allocator.Error!void {
for (buffer.open_commands.values) |command| {
switch (command.resource) {
.texture => {
},
.mesh_2d => |mesh_2d| {
const index_buffer = sokol.gfx.makeBuffer(.{
.data = sokol.gfx.asRange(mesh_2d.indices),
.type = .INDEXBUFFER,
});
const vertex_buffer = sokol.gfx.makeBuffer(.{
.data = sokol.gfx.asRange(mesh_2d.vertices),
.type = .VERTEXBUFFER,
});
errdefer {
sokol.gfx.destroyBuffer(index_buffer);
sokol.gfx.destroyBuffer(vertex_buffer);
}
if (mesh_2d.indices.len > std.math.maxInt(u32)) {
return error.OutOfMemory;
}
try self.resources.push(.{
.mesh_2d = .{
.index_buffer = index_buffer,
.vertex_buffer = vertex_buffer,
.index_count = @intCast(mesh_2d.indices.len),
},
});
},
}
}
try self.process_draw_2d_commands(buffer.draw_2d_commands.values, target);
}
};
const RenderState = struct {
finished: std.Thread.Semaphore = .{},
is_running: AtomicBool = AtomicBool.init(true),
ready: std.Thread.Semaphore = .{},
clear_color: color.Value = color.compress(color.black),
pixel_width: c_int = 0,
pixel_height: c_int = 0,
};
const Self = @This();
pub fn deinit(self: *Self) void {
self.render_state.is_running.store(false, .monotonic);
self.render_state.ready.post();
self.thread.join();
coral.heap.allocator.destroy(self.render_state);
self.* = undefined;
}
pub fn init(sdl_window: *ext.SDL_Window) (std.mem.Allocator.Error || std.Thread.SpawnError)!Self {
const render_state = try coral.heap.allocator.create(RenderState);
errdefer coral.heap.allocator.destroy(render_state);
render_state.* = .{};
const self = Self{
.thread = spawn_thread: {
const thread = try std.Thread.spawn(.{}, run, .{sdl_window, render_state});
thread.setName("Ona Graphics") catch {
std.log.warn("failed to name the graphics thread", .{});
};
break: spawn_thread thread;
},
.render_state = render_state,
};
self.submit(sdl_window, .{0, 0, 0, 1});
return self;
}
fn run(sdl_window: *ext.SDL_Window, render_state: *RenderState) !void {
var result = @as(c_int, 0);
result |= ext.SDL_GL_SetAttribute(ext.SDL_GL_CONTEXT_FLAGS, ext.SDL_GL_CONTEXT_FORWARD_COMPATIBLE_FLAG);
result |= ext.SDL_GL_SetAttribute(ext.SDL_GL_CONTEXT_PROFILE_MASK, ext.SDL_GL_CONTEXT_PROFILE_CORE);
result |= ext.SDL_GL_SetAttribute(ext.SDL_GL_CONTEXT_MAJOR_VERSION, 3);
result |= ext.SDL_GL_SetAttribute(ext.SDL_GL_CONTEXT_MINOR_VERSION, 3);
result |= ext.SDL_GL_SetAttribute(ext.SDL_GL_DOUBLEBUFFER, 1);
if (result != 0) {
std.log.err("failed to set necessary OpenGL flags in graphics", .{});
}
const context = ext.SDL_GL_CreateContext(sdl_window);
defer ext.SDL_GL_DeleteContext(context);
render_state.finished.post();
sokol.gfx.setup(.{
.environment = .{
.defaults = .{
.color_format = .RGBA8,
.depth_format = .DEPTH_STENCIL,
.sample_count = 1,
},
},
.logger = .{
.func = sokol.log.func,
},
});
defer sokol.gfx.shutdown();
var render_work = RenderWork.init(coral.heap.allocator);
defer render_work.deinit();
while (render_state.is_running.load(.monotonic)) {
render_state.ready.wait();
defer render_state.finished.post();
sokol.gfx.beginPass(.{
.swapchain = .{
.width = render_state.pixel_width,
.height = render_state.pixel_height,
.sample_count = 1,
.color_format = .RGBA8,
.depth_format = .DEPTH_STENCIL,
.gl = .{.framebuffer = 0},
},
.action = .{
.colors = get: {
var actions = [_]sokol.gfx.ColorAttachmentAction{.{}} ** 4;
actions[0] = .{
.load_action = .CLEAR,
.clear_value = @as(sokol.gfx.Color, @bitCast(render_state.clear_color)),
};
break: get actions;
},
}
});
try Queue.consume_submitted(Queue.Consumer.bind(RenderWork, &render_work, RenderWork.process_queue), .{
.width = @floatFromInt(render_state.pixel_width),
.height = @floatFromInt(render_state.pixel_height),
});
sokol.gfx.endPass();
sokol.gfx.commit();
ext.SDL_GL_SwapWindow(sdl_window);
}
}
pub fn submit(self: Self, sdl_window: *ext.SDL_Window, clear_color: color.Value) void {
self.render_state.finished.wait();
ext.SDL_GL_GetDrawableSize(sdl_window, &self.render_state.pixel_width, &self.render_state.pixel_height);
std.debug.assert(self.render_state.pixel_width > 0 and self.render_state.pixel_height > 0);
self.render_state.clear_color = clear_color;
Queue.swap();
self.render_state.ready.post();
}

344
src/ona/gfx/Queue.zig Normal file
View File

@ -0,0 +1,344 @@
const color = @import("./color.zig");
const coral = @import("coral");
const std = @import("std");
buffer: *Buffer,
primitives: *const Primitives,
const AtomicCount = std.atomic.Value(usize);
pub const Buffer = struct {
arena: std.heap.ArenaAllocator,
closed_handles: coral.stack.Sequential(usize),
open_commands: coral.stack.Sequential(OpenCommand),
draw_2d_commands: coral.stack.Sequential(Draw2DCommand),
close_commands: coral.stack.Sequential(CloseCommand),
pub const CloseCommand = struct {
handle: Handle,
};
pub const Draw2DCommand = struct {
instances: []const Instance2D,
mesh_2d: Handle,
};
pub const OpenCommand = struct {
handle: Handle = .none,
label: ?[]const u8 = null,
resource: union (enum) {
texture: Texture,
mesh_2d: Mesh2D,
},
pub const Mesh2D = struct {
vertices: []const Vertex2D,
indices: []const u16,
};
pub const Texture = struct {
width: u16,
height: u16,
format: Format,
access: Access,
pub const Access = enum {
static,
};
pub const Format = enum {
rgba8888,
bgra8888,
argb8888,
rgb888,
bgr888,
pub fn byte_size(self: Format) usize {
return switch (self) {
.rgba8888, .bgra8888, .argb8888 => 4,
.rgb888, .bgr888 => 3,
};
}
};
};
};
fn deinit(self: *Buffer) void {
self.arena.deinit();
self.closed_handles.deinit();
self.open_commands.deinit();
self.draw_2d_commands.deinit();
self.close_commands.deinit();
}
fn init(allocator: std.mem.Allocator) Buffer {
return .{
.arena = std.heap.ArenaAllocator.init(allocator),
.closed_handles = .{.allocator = allocator},
.open_commands = .{.allocator = allocator},
.draw_2d_commands = .{.allocator = allocator},
.close_commands = .{.allocator = allocator},
};
}
pub fn clear(self: *Buffer) void {
self.close_commands.clear();
self.draw_2d_commands.clear();
self.open_commands.clear();
if (!self.arena.reset(.retain_capacity)) {
std.log.warn("failed to reset the buffer of a gfx queue with retained capacity", .{});
}
}
pub fn draw_2d(self: *Buffer, command: Draw2DCommand) std.mem.Allocator.Error!void {
try self.draw_2d_commands.push(.{
.instances = try self.arena.allocator().dupe(Instance2D, command.instances),
.mesh_2d = command.mesh_2d,
});
}
pub fn open(self: *Buffer, command: OpenCommand) std.mem.Allocator.Error!Handle {
const reserved_handle = @as(Handle, switch (command.handle) {
.none => @enumFromInt(reserve_handle: {
if (self.closed_handles.get()) |handle| {
std.debug.assert(self.closed_handles.pop());
break: reserve_handle handle;
}
break: reserve_handle next_handle.fetchAdd(1, .monotonic);
}),
_ => |handle| handle,
});
std.debug.assert(reserved_handle != .none);
const arena_allocator = self.arena.allocator();
try self.open_commands.push(.{
.resource = switch (command.resource) {
.texture => |texture| .{
.texture = .{
.width = texture.width,
.height = texture.height,
.format = texture.format,
.access = texture.access,
},
},
.mesh_2d => |mesh_2d| .{
.mesh_2d = .{
.indices = try arena_allocator.dupe(u16, mesh_2d.indices),
.vertices = try arena_allocator.dupe(Vertex2D, mesh_2d.vertices),
},
},
},
.label = if (command.label) |label| try arena_allocator.dupe(coral.io.Byte, label) else null,
.handle = reserved_handle,
});
return reserved_handle;
}
};
pub const Consumer = coral.io.Generator(std.mem.Allocator.Error!void, &.{*const Buffer, Target});
pub const Handle = enum (usize) {
none,
_,
pub fn index(self: Handle) ?usize {
return switch (self) {
.none => null,
_ => @intFromEnum(self) - 1,
};
}
};
pub const Instance2D = extern struct {
xbasis: Point2D = .{1, 0},
ybasis: Point2D = .{0, 1},
origin: Point2D = @splat(0),
color: color.Compressed = color.compress(color.white),
depth: f32 = 0,
};
const Node = struct {
buffers: [2]Buffer,
is_swapped: bool = false,
ref_count: AtomicCount = AtomicCount.init(1),
has_next: ?*Node = null,
has_prev: ?*Node = null,
fn acquire(self: *Node) void {
self.ref_count.fetchAdd(1, .monotonic);
}
fn pending(self: *Node) *Buffer {
return &self.buffers[@intFromBool(self.is_swapped)];
}
fn release(self: *Node) void {
if (self.ref_count.fetchSub(1, .monotonic) == 1) {
mutex.lock();
defer mutex.unlock();
if (self.has_prev) |prev| {
prev.has_next = self.has_next;
} else {
has_head = self.has_next;
}
if (self.has_next) |next| {
next.has_prev = self.has_prev;
} else {
has_tail = self.has_prev;
}
for (&self.buffers) |*buffer| {
buffer.deinit();
}
coral.heap.allocator.destroy(self);
}
}
fn submitted(self: *Node) *Buffer {
return &self.buffers[@intFromBool(!self.is_swapped)];
}
};
pub const Point2D = @Vector(2, f32);
pub const Primitives = struct {
quad_mesh: Handle,
};
const Self = @This();
pub const State = struct {
node: *Node,
primitives: *const Primitives,
};
pub const Target = struct {
width: f32,
height: f32,
};
pub const Vertex2D = struct {
xy: Point2D,
};
pub fn bind(context: coral.system.BindContext) std.mem.Allocator.Error!State {
const queue = try coral.heap.allocator.create(Node);
errdefer coral.heap.allocator.destroy(queue);
queue.* = .{
.buffers = .{Buffer.init(coral.heap.allocator), Buffer.init(coral.heap.allocator)},
};
mutex.lock();
defer mutex.unlock();
if (has_tail) |tail| {
tail.has_next = queue;
queue.has_prev = tail;
} else {
std.debug.assert(has_head == null);
has_head = queue;
}
has_tail = queue;
return .{
.primitives = (try context.register_read_only_resource_access(.none, Primitives)) orelse create: {
const buffer = queue.pending();
const half_extent = 0.5;
try context.world.set_resource(.none, Primitives{
.quad_mesh = try buffer.open(.{
.label = "quad mesh primitive",
.resource = .{
.mesh_2d = .{
.indices = &.{0, 1, 2, 0, 2, 3},
.vertices = &.{
.{.xy = .{-half_extent, half_extent}},// .uv = .{0, 1}},
.{.xy = .{half_extent, half_extent}},// .uv = .{1, 1}},
.{.xy = .{half_extent, -half_extent}},// .uv = .{1, 0}},
.{.xy = .{-half_extent, -half_extent}},// .uv = .{0, 0}},
},
},
},
}),
});
break: create (try context.register_read_only_resource_access(.none, Primitives)).?;
},
.node = queue,
};
}
pub fn consume_submitted(consumer: Consumer, target: Target) std.mem.Allocator.Error!void {
mutex.lock();
defer mutex.unlock();
var has_node = has_head;
var iterations = @as(usize, 0);
while (has_node) |node| : ({
has_node = node.has_next;
iterations += 1;
}) {
const buffer = &node.buffers[@intFromBool(!node.is_swapped)];
try consumer.yield(.{buffer, target});
buffer.clear();
}
}
var has_head = @as(?*Node, null);
var has_tail = @as(?*Node, null);
pub fn init(state: *State) Self {
return .{
.buffer = state.node.pending(),
.primitives = state.primitives,
};
}
var mutex = std.Thread.Mutex{};
var next_handle = AtomicCount.init(1);
pub fn swap() void {
mutex.lock();
defer mutex.unlock();
var has_node = has_head;
while (has_node) |node| : (has_node = node.has_next) {
node.is_swapped = !node.is_swapped;
}
}
pub fn unbind(state: *State) void {
state.node.release();
}

15
src/ona/gfx/color.zig Normal file
View File

@ -0,0 +1,15 @@
pub const Compressed = @Vector(4, u8);
pub const Value = @Vector(4, f32);
pub const black = Value{0, 0, 0, 1};
pub fn compress(color: Value) Compressed {
return @intFromFloat(color * @as(Value, @splat(255)));
}
pub fn rgb(r: f32, g: f32, b: f32) Value {
return .{r, g, b, 1};
}
pub const white = Value{1, 1, 1, 1};

1
src/ona/gfx/shaders/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
*.glsl.zig

View File

@ -0,0 +1,44 @@
@header const Vec2 = @Vector(2, f32)
@ctype vec2 Vec2
@vs vs
in vec2 mesh_xy;
in vec2 instance_xbasis;
in vec2 instance_ybasis;
in vec2 instance_origin;
in vec4 instance_color;
in float instance_depth;
uniform Screen {
vec2 screen_size;
};
out vec4 color;
void main() {
// Calculate the world position of the vertex
const vec2 world_position = instance_origin + mesh_xy.x * instance_xbasis + mesh_xy.y * instance_ybasis;
// Convert world position to normalized device coordinates (NDC)
// Assuming the screen coordinates range from (0, 0) to (screen_size.x, screen_size.y)
const vec2 ndc_position = (vec2(world_position.x, -world_position.y) / screen_size) * 2.0 - vec2(1.0, -1.0);
// Set the position of the vertex in clip space
gl_Position = vec4(ndc_position, instance_depth, 1.0);
// Set the output color
color = instance_color;
}
@end
@fs fs
in vec4 color;
out vec4 texel;
void main() {
texel = color;
}
@end
@program draw_2d vs fs

134
src/ona/msg.zig Normal file
View File

@ -0,0 +1,134 @@
const App = @import("./App.zig");
const coral = @import("coral");
const std = @import("std");
fn Channel(comptime Message: type) type {
return struct {
buffers: [2]coral.stack.Sequential(Message),
swapped: bool = false,
ticks: u1 = 0,
const Self = @This();
fn cleanup(channel: coral.Write(Self)) void {
channel.res.deinit();
}
pub fn deinit(self: *Self) void {
for (&self.buffers) |*buffer| {
buffer.deinit();
}
self.* = undefined;
}
fn swap(channel: coral.Write(Self)) void {
channel.res.ticks = coral.scalars.add(channel.res.ticks, 1) orelse 0;
if (channel.res.ticks == 0) {
channel.res.swapped = !channel.res.swapped;
channel.res.buffers[@intFromBool(channel.res.swapped)].clear();
}
}
fn init(allocator: std.mem.Allocator) Self {
return .{
.buffers = .{
.{.allocator = allocator},
.{.allocator = allocator},
},
};
}
fn messages(self: Self) []const Message {
return self.buffers[@intFromBool(!self.swapped)].values;
}
fn push(self: *Self, message: Message) std.mem.Allocator.Error!void {
try self.buffers[@intFromBool(self.swapped)].push(message);
}
};
}
pub fn Receive(comptime Message: type) type {
const TypedChannel = Channel(Message);
return struct {
channel: *const TypedChannel,
const Self = @This();
pub const State = struct {
channel: *const TypedChannel,
};
pub fn bind(context: coral.system.BindContext) std.mem.Allocator.Error!State {
return .{
.channel = (try context.register_read_only_resource_access(thread_restriction, TypedChannel)) orelse set: {
try context.world.set_resource(thread_restriction, TypedChannel.init(coral.heap.allocator));
break: set (try context.register_read_only_resource_access(thread_restriction, TypedChannel)).?;
},
};
}
pub fn init(state: *State) Self {
return .{
.channel = state.channel,
};
}
pub fn messages(self: Self) []const Message {
return self.channel.messages();
}
};
}
pub fn Send(comptime Message: type) type {
const TypedChannel = Channel(Message);
return struct {
channel: *TypedChannel,
const Self = @This();
pub const State = struct {
channel: *TypedChannel,
};
pub fn bind(context: coral.system.BindContext) std.mem.Allocator.Error!State {
return .{
.channel = (try context.register_read_write_resource_access(thread_restriction, TypedChannel)) orelse set: {
try context.world.set_resource(thread_restriction, TypedChannel.init(coral.heap.allocator));
const app = context.world.get_resource(.none, App).?;
try context.world.on_event(app.events.post_update, coral.system_fn(TypedChannel.swap), .{
.label = "swap channel of " ++ @typeName(Message),
});
try context.world.on_event(app.events.exit, coral.system_fn(TypedChannel.cleanup), .{
.label = "clean up channel of " ++ @typeName(Message),
});
break: set (try context.register_read_write_resource_access(thread_restriction, TypedChannel)).?;
},
};
}
pub fn init(state: *State) Self {
return .{
.channel = state.channel,
};
}
pub fn push(self: Self, message: Message) std.mem.Allocator.Error!void {
try self.channel.push(message);
}
};
}
const thread_restriction = coral.World.ThreadRestriction.none;

103
src/ona/ona.zig Normal file
View File

@ -0,0 +1,103 @@
pub const App = @import("./App.zig");
pub const act = @import("./act.zig");
const coral = @import("coral");
const ext = @import("./ext.zig");
pub const gfx = @import("./gfx.zig");
pub const msg = @import("./msg.zig");
const std = @import("std");
pub const Options = struct {
tick_rate: u64,
execution: Execution,
middlewares: []const *const Setup = default_middlewares,
pub const Execution = union (enum) {
single_threaded,
thread_share: f32,
};
};
pub const Setup = fn (*coral.World, App.Events) anyerror!void;
pub const default_middlewares = &.{
gfx.setup,
act.setup,
};
pub fn start_app(setup: Setup, options: Options) anyerror!void {
defer {
coral.heap.trace_leaks();
ext.SDL_Quit();
}
var world = try switch (options.execution) {
.single_threaded => coral.World.init(0),
.thread_share => |thread_share| init: {
const cpu_count = @as(u32, @intCast(std.math.clamp(std.Thread.getCpuCount() catch |cpu_count_error| {
@panic(switch (cpu_count_error) {
error.PermissionDenied => "permission denied retrieving CPU count",
error.SystemResources => "system resources are preventing retrieval of the CPU count",
error.Unexpected => "unexpected failure retrieving CPU count",
});
}, 0, std.math.maxInt(u32))));
break: init coral.World.init(coral.scalars.fractional(cpu_count, thread_share) orelse 0);
},
};
defer world.deinit();
const events = App.Events{
.load = try world.create_event("load"),
.pre_update = try world.create_event("pre-update"),
.update = try world.create_event("update"),
.post_update = try world.create_event("post-update"),
.render = try world.create_event("render"),
.finish = try world.create_event("finish"),
.exit = try world.create_event("exit"),
};
const app = try world.set_get_resource(.none, App{
.events = &events,
.target_frame_time = 1.0 / @as(f64, @floatFromInt(options.tick_rate)),
.is_running = true,
});
for (options.middlewares) |setup_middleware| {
try setup_middleware(&world, events);
}
try setup(&world, events);
try world.run_event(events.load);
var ticks_previous = std.time.milliTimestamp();
var accumulated_time = @as(f64, 0);
while (app.is_running) {
const ticks_current = std.time.milliTimestamp();
const milliseconds_per_second = 1000.0;
const delta_time = @as(f64, @floatFromInt(ticks_current - ticks_previous)) / milliseconds_per_second;
ticks_previous = ticks_current;
accumulated_time += delta_time;
try world.run_event(events.pre_update);
while (accumulated_time >= app.target_frame_time) : (accumulated_time -= app.target_frame_time) {
try world.run_event(events.update);
}
try world.run_event(events.post_update);
try world.run_event(events.render);
try world.run_event(events.finish);
}
try world.run_event(events.exit);
}

BIN
tools/sokol-shdc (Stored with Git LFS) Executable file

Binary file not shown.

BIN
tools/sokol-shdc.exe (Stored with Git LFS) Normal file

Binary file not shown.