Array and Table Literal Expressions for Kym #11
|
@ -0,0 +1,102 @@
|
|||
const debug = @import("./debug.zig");
|
||||
|
||||
const io = @import("./io.zig");
|
||||
|
||||
const list = @import("./list.zig");
|
||||
|
||||
const math = @import("./math.zig");
|
||||
|
||||
pub const Stacking = struct {
|
||||
base_allocator: io.Allocator,
|
||||
min_page_size: usize,
|
||||
allocations: list.Stack(usize) = .{},
|
||||
pages: list.Stack(Page) = .{},
|
||||
|
||||
const Page = struct {
|
||||
buffer: []u8,
|
||||
used: usize,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
fn available(self: Self) usize {
|
||||
return self.buffer.len - self.used;
|
||||
}
|
||||
};
|
||||
|
||||
pub fn allocate(self: *Stacking, allocation_size: usize) io.AllocationError![]u8 {
|
||||
const alignment = @as(usize, 4);
|
||||
const aligned_allocation_size = (allocation_size + alignment - 1) & ~(alignment - 1);
|
||||
|
||||
if (self.pages.values.len == 0) {
|
||||
const page = try self.allocate_page(math.max(self.min_page_size, aligned_allocation_size));
|
||||
|
||||
page.used = allocation_size;
|
||||
|
||||
return page.buffer[0 .. allocation_size];
|
||||
}
|
||||
|
||||
var page = self.current_page() orelse unreachable;
|
||||
|
||||
if (page.available() <= aligned_allocation_size) {
|
||||
page = try self.allocate_page(math.max(self.min_page_size, aligned_allocation_size));
|
||||
}
|
||||
|
||||
debug.assert(page.available() >= allocation_size);
|
||||
|
||||
defer page.used += aligned_allocation_size;
|
||||
|
||||
return page.buffer[page.used .. (page.used + allocation_size)];
|
||||
}
|
||||
|
||||
fn allocate_page(self: *Stacking, page_size: usize) io.AllocationError!*Page {
|
||||
var buffer = try io.allocate_many(u8, page_size, self.base_allocator);
|
||||
|
||||
errdefer io.deallocate(self.base_allocator, buffer);
|
||||
|
||||
try self.pages.push_one(self.base_allocator, .{
|
||||
.buffer = buffer,
|
||||
.used = 0,
|
||||
});
|
||||
|
||||
return (self.current_page() orelse unreachable);
|
||||
}
|
||||
|
||||
pub fn as_allocator(self: *Stacking) io.Allocator {
|
||||
return io.Allocator.bind(Stacking, self, struct {
|
||||
fn reallocate(stacking: *Stacking, options: io.AllocationOptions) ?[]u8 {
|
||||
const allocation = options.allocation orelse {
|
||||
return stacking.allocate(options.size) catch null;
|
||||
};
|
||||
|
||||
if (allocation.len == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const reallocation = stacking.allocate(allocation.len) catch {
|
||||
return null;
|
||||
};
|
||||
|
||||
io.copy(reallocation, allocation);
|
||||
|
||||
return reallocation;
|
||||
}
|
||||
}.reallocate);
|
||||
}
|
||||
|
||||
pub fn clear_allocations(self: *Stacking) void {
|
||||
for (self.pages.values) |page| {
|
||||
io.deallocate(self.base_allocator, page.buffer);
|
||||
}
|
||||
|
||||
self.pages.deinit(self.base_allocator);
|
||||
self.allocations.deinit(self.base_allocator);
|
||||
}
|
||||
|
||||
fn current_page(self: Stacking) ?*Page {
|
||||
if (self.pages.values.len == 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return &self.pages.values[self.pages.values.len - 1];
|
||||
}
|
||||
};
|
|
@ -1,13 +1,13 @@
|
|||
///
|
||||
/// Arena-based memory allocation strategies.
|
||||
///
|
||||
pub const arena = @import("./arena.zig");
|
||||
|
||||
///
|
||||
/// Debug build-only utilities and sanity-checkers.
|
||||
///
|
||||
pub const debug = @import("./debug.zig");
|
||||
|
||||
///
|
||||
/// Heap memory allocation strategies.
|
||||
///
|
||||
pub const heap = @import("./heap.zig");
|
||||
|
||||
///
|
||||
/// Platform-agnostic data input and output operations.
|
||||
///
|
||||
|
|
|
@ -1,96 +0,0 @@
|
|||
const debug = @import("./debug.zig");
|
||||
|
||||
const io = @import("./io.zig");
|
||||
|
||||
const math = @import("./math.zig");
|
||||
|
||||
const table = @import("./table.zig");
|
||||
|
||||
pub const Bucketed = struct {
|
||||
base_allocator: io.Allocator,
|
||||
slab_table: SlabTable,
|
||||
|
||||
const Slab = struct {
|
||||
count: usize = 0,
|
||||
buffer: []u8 = &.{},
|
||||
erased: []usize = &.{},
|
||||
|
||||
fn create(self: *Slab, allocator: io.Allocator) ?[]u8 {
|
||||
if (self.count == self.erased.len) {
|
||||
const buffer = io.allocate_many(allocator, u8, math.max(1, self.buffer.len * 2)) orelse return null;
|
||||
|
||||
errdefer io.deallocate(allocator, buffer);
|
||||
|
||||
const erased = io.allocate_many(allocator, usize, math.max(1, self.erased.len * 2)) orelse return null;
|
||||
|
||||
errdefer io.deallocate(allocator, erased);
|
||||
|
||||
self.buffer = buffer;
|
||||
self.erased = erased;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn destroy(self: *Slab) void {
|
||||
_ = self;
|
||||
}
|
||||
};
|
||||
|
||||
const SlabTable = table.Hashed(table.unsigned_key(@bitSizeOf(usize)), *Slab);
|
||||
|
||||
fn acquire_slab(self: *Bucketed, slab_element_size: usize) ?*Slab {
|
||||
if (slab_element_size == 0) return null;
|
||||
|
||||
return self.slab_table.lookup(slab_element_size) orelse create_slab: {
|
||||
const allocated_slab = io.allocate_one(self.base_allocator, Slab);
|
||||
|
||||
errdefer io.deallocate(self.base_allocator, allocated_slab);
|
||||
|
||||
allocated_slab.* = .{.size = slab_element_size};
|
||||
|
||||
debug.assert(self.size_buckets.insert(slab_element_size, allocated_slab) catch return null);
|
||||
|
||||
break: create_slab allocated_slab;
|
||||
};
|
||||
}
|
||||
|
||||
pub fn as_allocator(self: *Bucketed) io.Allocator {
|
||||
return io.Allocator.bind(self, reallocate);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Bucketed) void {
|
||||
var slab_iterator = SlabTable.Iterator{.table = self.slab_table};
|
||||
|
||||
while (slab_iterator.next()) |slab| {
|
||||
slab.free(self.base_allocator);
|
||||
}
|
||||
|
||||
self.size_buckets.free(self.base_allocator);
|
||||
}
|
||||
|
||||
pub fn init(base_allocator: io.Allocator) io.AllocationError!Bucketed {
|
||||
return Bucketed{
|
||||
.base_allocator = base_allocator,
|
||||
.size_buckets = &.{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn owns(self: Bucketed, memory: []const u8) bool {
|
||||
return io.overlaps(memory.ptr, (self.slab_table.lookup(memory.len) orelse return false).buffer);
|
||||
}
|
||||
|
||||
pub fn reallocate(self: *Bucketed, options: io.AllocationOptions) ?[]u8 {
|
||||
const origin_slab = self.acquire_slab(options.size) orelse return null;
|
||||
const existing_allocation = options.allocation orelse return origin_slab.create(self.base_allocator);
|
||||
|
||||
defer origin_slab.destroy(existing_allocation);
|
||||
|
||||
const target_slab = self.acquire_slab(existing_allocation.len) orelse return null;
|
||||
const updated_allocation = target_slab.create(existing_allocation.len);
|
||||
|
||||
io.copy(updated_allocation, existing_allocation);
|
||||
|
||||
return updated_allocation;
|
||||
}
|
||||
};
|
|
@ -11,12 +11,48 @@ pub const AllocationOptions = struct {
|
|||
size: usize,
|
||||
};
|
||||
|
||||
pub const Allocator = Functor(?[]u8, AllocationOptions);
|
||||
pub const Allocator = Generator(?[]u8, AllocationOptions);
|
||||
|
||||
///
|
||||
/// Function pointer coupled with a state context for providing dynamic dispatch over a given `Input` and `Output`.
|
||||
/// Function pointer coupled with an immutable state context for providing dynamic dispatch over a given `Input` and
|
||||
/// `Output`.
|
||||
///
|
||||
pub fn Functor(comptime Output: type, comptime Input: type) type {
|
||||
return struct {
|
||||
context: *const anyopaque,
|
||||
invoker: *const fn (capture: *const anyopaque, input: Input) Output,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub fn bind(comptime State: type, state: *const State, comptime invoker: fn (capture: *const State, input: Input) Output) Self {
|
||||
return .{
|
||||
.context = state,
|
||||
|
||||
.invoker = struct {
|
||||
fn invoke_opaque(context: *const anyopaque, input: Input) Output {
|
||||
const state_alignment = @alignOf(State);
|
||||
|
||||
if (state_alignment == 0) {
|
||||
return invoker(@ptrCast(*const State, context), input);
|
||||
}
|
||||
|
||||
return invoker(@ptrCast(*const State, @alignCast(state_alignment, context)), input);
|
||||
}
|
||||
}.invoke_opaque,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn invoke(self: Self, input: Input) Output {
|
||||
return self.invoker(self.context, input);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
///
|
||||
/// Function pointer coupled with a mutable state context for providing dynamic dispatch over a given `Input` and
|
||||
/// `Output`.
|
||||
///
|
||||
pub fn Generator(comptime Output: type, comptime Input: type) type {
|
||||
return struct {
|
||||
context: *anyopaque,
|
||||
invoker: *const fn (capture: *anyopaque, input: Input) Output,
|
||||
|
@ -47,7 +83,7 @@ pub fn Functor(comptime Output: type, comptime Input: type) type {
|
|||
};
|
||||
}
|
||||
|
||||
pub const Reader = Functor(?usize, []u8);
|
||||
pub const Reader = Generator(?usize, []u8);
|
||||
|
||||
pub const StreamError = error {
|
||||
ReadFailure,
|
||||
|
@ -66,7 +102,7 @@ pub const FixedBuffer = struct {
|
|||
slice: []u8,
|
||||
|
||||
pub fn as_writer(self: *FixedBuffer) Writer {
|
||||
return Writer.bind(self, struct {
|
||||
return Writer.bind(FixedBuffer, self, struct {
|
||||
fn write(writable_memory: *FixedBuffer, data: []const u8) ?usize {
|
||||
return writable_memory.write(data);
|
||||
}
|
||||
|
@ -104,7 +140,7 @@ pub const GrowingBuffer = struct {
|
|||
bytes: []const u8,
|
||||
};
|
||||
|
||||
const Appender = Functor(AllocationError!void, AppendOptions);
|
||||
const Appender = Generator(AllocationError!void, AppendOptions);
|
||||
|
||||
pub fn as_writer(self: *GrowingBuffer) Writer {
|
||||
return Writer.bind(GrowingBuffer, self, struct {
|
||||
|
@ -136,7 +172,7 @@ pub const GrowingBuffer = struct {
|
|||
}
|
||||
};
|
||||
|
||||
pub const Writer = Functor(?usize, []const u8);
|
||||
pub const Writer = Generator(?usize, []const u8);
|
||||
|
||||
pub fn allocate_many(comptime Type: type, amount: usize, allocator: Allocator) AllocationError![]Type {
|
||||
if (@sizeOf(Type) == 0) {
|
||||
|
@ -187,46 +223,32 @@ pub fn compare(this: []const u8, that: []const u8) isize {
|
|||
return @intCast(isize, this.len) - @intCast(isize, that.len);
|
||||
}
|
||||
|
||||
pub fn deallocate(allocator: Allocator, allocation: anytype) void {
|
||||
const Element = @TypeOf(allocation);
|
||||
|
||||
switch (@typeInfo(Element).Pointer.size) {
|
||||
.One => {
|
||||
debug.assert(allocator.invoke(.{
|
||||
.allocation = @ptrCast([*]u8, allocation)[0 .. @sizeOf(Element)],
|
||||
.size = 0
|
||||
}) == null);
|
||||
},
|
||||
|
||||
.Slice => {
|
||||
debug.assert(allocator.invoke(.{
|
||||
.allocation = @ptrCast([*]u8, allocation.ptr)[0 .. (@sizeOf(Element) * allocation.len)],
|
||||
.size = 0
|
||||
}) == null);
|
||||
},
|
||||
|
||||
.Many, .C => @compileError("length of allocation must be known to deallocate"),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn bytes_to(comptime Type: type, source_bytes: []const u8) ?Type {
|
||||
const type_size = @sizeOf(Type);
|
||||
|
||||
if (source_bytes.len != type_size) return null;
|
||||
|
||||
var target_bytes = @as([type_size]u8, undefined);
|
||||
|
||||
copy(&target_bytes, source_bytes);
|
||||
|
||||
return @bitCast(Type, target_bytes);
|
||||
}
|
||||
|
||||
pub fn copy(target: []u8, source: []const u8) void {
|
||||
var index: usize = 0;
|
||||
|
||||
while (index < source.len) : (index += 1) target[index] = source[index];
|
||||
}
|
||||
|
||||
pub fn deallocate(allocator: Allocator, allocation: anytype) void {
|
||||
const Allocation = @TypeOf(allocation);
|
||||
|
||||
switch (@typeInfo(Allocation)) {
|
||||
.Pointer => |allocation_pointer| {
|
||||
_ = allocator.invoke(.{
|
||||
.allocation = switch (allocation_pointer.size) {
|
||||
.One => @ptrCast([*]u8, allocation)[0 .. @sizeOf(Allocation)],
|
||||
.Slice => @ptrCast([*]u8, allocation.ptr)[0 .. (@sizeOf(Allocation) * allocation.len)],
|
||||
.Many, .C => @compileError("length of allocation must be known to deallocate"),
|
||||
},
|
||||
|
||||
.size = 0,
|
||||
});
|
||||
},
|
||||
|
||||
else => @compileError("cannot deallocate " ++ allocation),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn ends_with(target: []const u8, match: []const u8) bool {
|
||||
if (target.len < match.len) return false;
|
||||
|
||||
|
@ -270,10 +292,6 @@ pub const null_writer = Writer.bind(&null_context, struct {
|
|||
}
|
||||
}.write);
|
||||
|
||||
pub fn overlaps(pointer: [*]u8, memory_range: []u8) bool {
|
||||
return (pointer >= memory_range.ptr) and (pointer < (memory_range.ptr + memory_range.len));
|
||||
}
|
||||
|
||||
pub fn reallocate(allocator: Allocator, allocation: anytype, amount: usize) AllocationError![]@typeInfo(@TypeOf(allocation)).Pointer.child {
|
||||
const pointer_info = @typeInfo(@TypeOf(allocation)).Pointer;
|
||||
const Element = pointer_info.child;
|
||||
|
@ -315,6 +333,10 @@ pub fn swap(comptime Element: type, this: *Element, that: *Element) void {
|
|||
that.* = temp;
|
||||
}
|
||||
|
||||
pub fn tag_of(comptime value: anytype) Tag(@TypeOf(value)) {
|
||||
return @as(Tag(@TypeOf(value)), value);
|
||||
}
|
||||
|
||||
pub fn zero(target: []u8) void {
|
||||
for (target) |*t| t.* = 0;
|
||||
}
|
||||
|
|
|
@ -9,8 +9,8 @@ const math = @import("./math.zig");
|
|||
///
|
||||
pub fn Stack(comptime Value: type) type {
|
||||
return struct {
|
||||
capacity: usize,
|
||||
values: []Value,
|
||||
capacity: usize = 0,
|
||||
values: []Value = &.{},
|
||||
|
||||
///
|
||||
/// Stack type.
|
||||
|
@ -23,8 +23,8 @@ pub fn Stack(comptime Value: type) type {
|
|||
/// The returned buffer may be used to write to the stack without needing to explicitly pass an allocator
|
||||
/// context, as well decay further into a generic [io.Writer] type.
|
||||
///
|
||||
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
||||
/// `self`.
|
||||
/// *Note* if `capacity` is a non-zero value, `allocator` must reference the same allocation strategy as the one
|
||||
/// originally used to allocate the current internal buffer.
|
||||
///
|
||||
pub fn as_buffer(self: *Self, allocator: io.Allocator) io.GrowingBuffer {
|
||||
return io.GrowingBuffer.bind(Self, allocator, self, push_all);
|
||||
|
@ -44,12 +44,17 @@ pub fn Stack(comptime Value: type) type {
|
|||
///
|
||||
/// To clear all items from the stack while preserving the current internal buffer, see [clear] instead.
|
||||
///
|
||||
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
||||
/// `self`.
|
||||
/// *Note* if the `capacity` field of `self` is a non-zero value, `allocator` must reference the same allocation
|
||||
/// strategy as the one originally used to allocate the current internal buffer.
|
||||
///
|
||||
pub fn deinit(self: *Self, allocator: io.Allocator) void {
|
||||
if (self.capacity == 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
io.deallocate(allocator, self.values);
|
||||
|
||||
self.values = &.{};
|
||||
self.capacity = 0;
|
||||
}
|
||||
|
||||
|
@ -77,8 +82,8 @@ pub fn Stack(comptime Value: type) type {
|
|||
/// Growing ahead of pushing operations is useful when the upper bound of pushes is well-understood, as it can
|
||||
/// reduce the number of allocations required per push.
|
||||
///
|
||||
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
||||
/// `self`.
|
||||
/// *Note* if the `capacity` field of `self` is a non-zero value, `allocator` must reference the same allocation
|
||||
/// strategy as the one originally used to allocate the current internal buffer.
|
||||
///
|
||||
pub fn grow(self: *Self, allocator: io.Allocator, growth_amount: usize) io.AllocationError!void {
|
||||
const grown_capacity = self.capacity + growth_amount;
|
||||
|
@ -86,34 +91,18 @@ pub fn Stack(comptime Value: type) type {
|
|||
|
||||
errdefer io.deallocate(allocator, values);
|
||||
|
||||
if (self.capacity != 0) {
|
||||
for (0 .. self.values.len) |index| {
|
||||
values[index] = self.values[index];
|
||||
}
|
||||
|
||||
io.deallocate(allocator, self.values);
|
||||
}
|
||||
|
||||
self.values = values;
|
||||
self.capacity = grown_capacity;
|
||||
}
|
||||
|
||||
///
|
||||
/// Attempts to allocate and return an empty stack with an internal buffer of `initial_capacity` size using
|
||||
/// `allocator` as the memory allocation strategy.
|
||||
///
|
||||
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required for an
|
||||
/// internal buffer of `initial_capacity` size.
|
||||
///
|
||||
pub fn init(allocator: io.Allocator, initial_capacity: usize) !Self {
|
||||
const values = try io.allocate_many(Value, initial_capacity, allocator);
|
||||
|
||||
errdefer io.deallocate(values);
|
||||
|
||||
return Self{
|
||||
.capacity = initial_capacity,
|
||||
.values = values[0 .. 0],
|
||||
};
|
||||
}
|
||||
|
||||
///
|
||||
/// Attempts to remove the last element of `self` that was inserted, if one exists, returning it or `null` if
|
||||
/// `self` is empty.
|
||||
|
@ -137,8 +126,8 @@ pub fn Stack(comptime Value: type) type {
|
|||
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
||||
/// internal buffer of `self` when necessary.
|
||||
///
|
||||
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
||||
/// `self`.
|
||||
/// *Note* if the `capacity` field of `self` is a non-zero value, `allocator` must reference the same allocation
|
||||
/// strategy as the one originally used to allocate the current internal buffer.
|
||||
///
|
||||
pub fn push_all(self: *Self, allocator: io.Allocator, values: []const Value) io.AllocationError!void {
|
||||
const new_length = self.values.len + values.len;
|
||||
|
@ -163,8 +152,8 @@ pub fn Stack(comptime Value: type) type {
|
|||
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
||||
/// internal buffer of `self` when necessary.
|
||||
///
|
||||
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
||||
/// `self`.
|
||||
/// *Note* if the `capacity` field of `self` is a non-zero value, `allocator` must reference the same allocation
|
||||
/// strategy as the one originally used to allocate the current internal buffer.
|
||||
///
|
||||
pub fn push_many(self: *Self, allocator: io.Allocator, value: Value, amount: usize) io.AllocationError!void {
|
||||
const new_length = self.values.len + amount;
|
||||
|
@ -189,8 +178,8 @@ pub fn Stack(comptime Value: type) type {
|
|||
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
||||
/// internal buffer of `self` when necessary.
|
||||
///
|
||||
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
||||
/// `self`.
|
||||
/// *Note* if the `capacity` field of `self` is a non-zero value, `allocator` must reference the same allocation
|
||||
/// strategy as the one originally used to allocate the current internal buffer.
|
||||
///
|
||||
pub fn push_one(self: *Self, allocator: io.Allocator, value: Value) io.AllocationError!void {
|
||||
if (self.values.len == self.capacity) {
|
||||
|
|
|
@ -6,8 +6,8 @@ const io = @import("./io.zig");
|
|||
|
||||
pub fn Map(comptime Index: type, comptime Element: type) type {
|
||||
return struct {
|
||||
free_index: Index,
|
||||
entries: []Entry,
|
||||
free_index: Index = 0,
|
||||
entries: []Entry = &.{},
|
||||
|
||||
const Entry = union (enum) {
|
||||
free_index: usize,
|
||||
|
@ -28,17 +28,6 @@ pub fn Map(comptime Index: type, comptime Element: type) type {
|
|||
io.deallocate(allocator, self.entries);
|
||||
}
|
||||
|
||||
pub fn init(allocator: io.Allocator) io.AllocationError!Self {
|
||||
const entries = try io.allocate_many(Entry, 4, allocator);
|
||||
|
||||
errdefer io.deallocate(allocator, entries);
|
||||
|
||||
return Self{
|
||||
.free_index = 0,
|
||||
.entries = entries,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn insert(self: *Self, allocator: io.Allocator, value: Element) io.AllocationError!Index {
|
||||
_ = self;
|
||||
_ = allocator;
|
||||
|
|
|
@ -22,8 +22,8 @@ pub fn Hashed(comptime Key: type, comptime Value: type, comptime keyer: Keyer(Ke
|
|||
const growth_factor = 0.6;
|
||||
|
||||
return struct {
|
||||
count: usize,
|
||||
table: []?Entry,
|
||||
count: usize = 0,
|
||||
table: []?Entry = &.{},
|
||||
|
||||
///
|
||||
/// Key-value pair bundling.
|
||||
|
@ -82,7 +82,7 @@ pub fn Hashed(comptime Key: type, comptime Value: type, comptime keyer: Keyer(Ke
|
|||
///
|
||||
pub fn assign(self: *Self, allocator: io.Allocator, key: Key, value: Value) io.AllocationError!?Entry {
|
||||
if (self.calculate_load_factor() >= load_max) {
|
||||
const growth_size = @intToFloat(f64, self.table.len) * growth_factor;
|
||||
const growth_size = @intToFloat(f64, math.max(1, self.table.len)) * growth_factor;
|
||||
|
||||
if (growth_size > math.max_int(@typeInfo(usize).Int)) {
|
||||
return error.OutOfMemory;
|
||||
|
@ -129,7 +129,7 @@ pub fn Hashed(comptime Key: type, comptime Value: type, comptime keyer: Keyer(Ke
|
|||
/// Returns the calculated load factor of `self` at the moment.
|
||||
///
|
||||
pub fn calculate_load_factor(self: Self) f32 {
|
||||
return @intToFloat(f32, self.count) / @intToFloat(f32, self.table.len);
|
||||
return if (self.table.len == 0) 1 else @intToFloat(f32, self.count) / @intToFloat(f32, self.table.len);
|
||||
}
|
||||
|
||||
///
|
||||
|
@ -160,28 +160,6 @@ pub fn Hashed(comptime Key: type, comptime Value: type, comptime keyer: Keyer(Ke
|
|||
self.count = 0;
|
||||
}
|
||||
|
||||
///
|
||||
/// Attempts to allocate and return an empty table with an implementation-defined initial capacity using
|
||||
/// `allocator` as the memory allocation strategy.
|
||||
///
|
||||
/// The function returns [AllocationError] instead if `allocator` cannot commit the memory required for the
|
||||
/// table capcity size.
|
||||
///
|
||||
pub fn init(allocator: io.Allocator) io.AllocationError!Self {
|
||||
const table = try io.allocate_many(?Entry, 4, allocator);
|
||||
|
||||
errdefer io.deallocate(allocator, table);
|
||||
|
||||
for (table) |*entry| {
|
||||
entry.* = null;
|
||||
}
|
||||
|
||||
return Self{
|
||||
.table = table,
|
||||
.count = 0,
|
||||
};
|
||||
}
|
||||
|
||||
///
|
||||
/// Attempts to write the `key`-`value` pair into `self`, using `allocator` as the memory allocation strategy,
|
||||
/// if no value already exists with a matching `key`, returning `true` if it was inserted, otherwise `false`.
|
||||
|
@ -194,17 +172,16 @@ pub fn Hashed(comptime Key: type, comptime Value: type, comptime keyer: Keyer(Ke
|
|||
///
|
||||
pub fn insert(self: *Self, allocator: io.Allocator, key: Key, value: Value) io.AllocationError!bool {
|
||||
if (self.calculate_load_factor() >= load_max) {
|
||||
const growth_size = @intToFloat(f64, self.table.len) * growth_factor;
|
||||
const growth_amount = @intToFloat(f64, self.table.len) * growth_factor;
|
||||
const min_size = 1;
|
||||
|
||||
if (growth_size > math.max_int(@typeInfo(usize).Int)) {
|
||||
return error.OutOfMemory;
|
||||
}
|
||||
|
||||
try self.rehash(allocator, @floatToInt(usize, growth_size));
|
||||
try self.rehash(allocator, self.table.len + math.max(min_size, @floatToInt(usize, growth_amount)));
|
||||
}
|
||||
|
||||
debug.assert(self.table.len > self.count);
|
||||
|
||||
defer self.count += 1;
|
||||
|
||||
return (Entry{
|
||||
.key = key,
|
||||
.value = value,
|
||||
|
@ -222,8 +199,9 @@ pub fn Hashed(comptime Key: type, comptime Value: type, comptime keyer: Keyer(Ke
|
|||
{
|
||||
const hash_max = math.min(math.max_int(hash_info), self.table.len);
|
||||
var hashed_key = math.wrap(keyer.hasher(key), math.min_int(hash_info), hash_max);
|
||||
var iterations = @as(usize, 0);
|
||||
|
||||
while (true) {
|
||||
while (iterations < self.count) : (iterations += 1) {
|
||||
const entry = &(self.table[hashed_key] orelse return null);
|
||||
|
||||
if (keyer.comparer(entry.key, key) == 0) {
|
||||
|
@ -260,6 +238,8 @@ pub fn Hashed(comptime Key: type, comptime Value: type, comptime keyer: Keyer(Ke
|
|||
entry.* = null;
|
||||
}
|
||||
|
||||
if (old_table.len != 0)
|
||||
{
|
||||
for (old_table) |maybe_entry| {
|
||||
if (maybe_entry) |entry| {
|
||||
debug.assert(entry.write_into(self.table));
|
||||
|
@ -268,6 +248,7 @@ pub fn Hashed(comptime Key: type, comptime Value: type, comptime keyer: Keyer(Ke
|
|||
|
||||
io.deallocate(allocator, old_table);
|
||||
}
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
|
|
|
@ -1,9 +1,24 @@
|
|||
const std = @import("std");
|
||||
const debug = @import("./debug.zig");
|
||||
|
||||
const io = @import("./io.zig");
|
||||
|
||||
const math = @import("./math.zig");
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
///
|
||||
///
|
||||
///
|
||||
pub const ArgsContext = struct {
|
||||
writer: io.Writer,
|
||||
index: usize,
|
||||
};
|
||||
|
||||
///
|
||||
///
|
||||
///
|
||||
pub const ArgsFormatter = io.Functor(PrintError!void, ArgsContext);
|
||||
|
||||
///
|
||||
/// Errors that may occur during utf8-encoded int parsing.
|
||||
///
|
||||
|
@ -31,6 +46,17 @@ pub const PrintError = error {
|
|||
PrintIncomplete,
|
||||
};
|
||||
|
||||
pub fn format_args(args: anytype) ArgsFormatter {
|
||||
const Args = @TypeOf(args);
|
||||
|
||||
return ArgsFormatter.bind(Args, &args, struct {
|
||||
fn get(typed_args: *const Args, context: ArgsContext) PrintError!void {
|
||||
_ = typed_args;
|
||||
_ = context;
|
||||
}
|
||||
}.get);
|
||||
}
|
||||
|
||||
///
|
||||
/// Attempts to parse a float value of type described by `float` from `utf8`.
|
||||
///
|
||||
|
@ -146,6 +172,91 @@ pub fn print(writer: io.Writer, utf8: []const u8) PrintError!void {
|
|||
}
|
||||
}
|
||||
|
||||
///
|
||||
///
|
||||
///
|
||||
pub fn print_float(comptime float: std.builtin.Type.Float, writer: io.Writer, value: @Type(float)) PrintError!void {
|
||||
_ = writer;
|
||||
_ = value;
|
||||
}
|
||||
|
||||
///
|
||||
/// Prints the format string `format` with all `{...}` placeholders substituted with the a value in `args` corresponding
|
||||
/// to the ordinality of each substitution.
|
||||
///
|
||||
/// Specifiers may be placed inside `{}` to augment the behavior of the corresponding [FormatArg]. For example, to print
|
||||
/// a float formatted with an integer component padding of `3`, a decimal padding of `2`, and a prefix for positives as
|
||||
/// well as negatives, you may specify it as `{+3.2}`.
|
||||
///
|
||||
/// To prevent braces from being interpreted as format placeholders, simply double-brace them (`"{{"`) and the format
|
||||
/// string will substitute them as a single brace.
|
||||
///
|
||||
/// *Note* the function assumes that, for every specifier in `format`, there is a corresponding [FormatArg] in `args`.
|
||||
/// Further, any instance of `{` is assumed to be a placeholder and must be terminated with a corresponding `}` before
|
||||
/// the end of string. Failure to meet any of these requirements will result in safety-checked runtime behavior.
|
||||
///
|
||||
pub fn print_formatted(writer: io.Writer, format: []const u8, args_formatter: ArgsFormatter) PrintError!void {
|
||||
const usize_int = @typeInfo(usize).Int;
|
||||
var head = @as(usize, 0);
|
||||
var tail = @as(usize, 0);
|
||||
var arg_index = @as(usize, 0);
|
||||
|
||||
while (tail < format.len) : (tail += 1) {
|
||||
if (format[tail] == '{') {
|
||||
debug.assert(tail < format.len);
|
||||
|
||||
tail += 1;
|
||||
|
||||
switch (format[tail]) {
|
||||
'{' => {
|
||||
try print(writer, format[head .. tail]);
|
||||
|
||||
tail += 1;
|
||||
head = tail;
|
||||
},
|
||||
|
||||
'}' => {
|
||||
try print(writer, format[head .. tail]);
|
||||
|
||||
try args_formatter.invoke(.{
|
||||
.index = arg_index,
|
||||
.writer = writer,
|
||||
});
|
||||
|
||||
arg_index += 1;
|
||||
tail += 1;
|
||||
head = tail;
|
||||
},
|
||||
|
||||
else => {
|
||||
try print(writer, format[head .. tail]);
|
||||
|
||||
tail += 1;
|
||||
head = tail;
|
||||
|
||||
debug.assert(tail < format.len);
|
||||
|
||||
while (format[tail] != '}') {
|
||||
tail += 1;
|
||||
|
||||
debug.assert(tail < format.len);
|
||||
}
|
||||
|
||||
const arg_index_name = format[head .. tail];
|
||||
|
||||
try args_formatter.invoke(.{
|
||||
.index = parse_int(usize_int, arg_index_name, .{}) catch @panic("invalid arg index value"),
|
||||
.writer = writer,
|
||||
});
|
||||
|
||||
tail += 1;
|
||||
head = tail;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
///
|
||||
/// Attempts to print the int `value` described by `int` to `writer`.
|
||||
///
|
||||
|
|
|
@ -0,0 +1,341 @@
|
|||
const coral = @import("coral");
|
||||
|
||||
const tokens = @import("./tokens.zig");
|
||||
|
||||
const types = @import("./types.zig");
|
||||
|
||||
allocator: coral.io.Allocator,
|
||||
arena: coral.arena.Stacking,
|
||||
statements: StatementList,
|
||||
error_message: []const u8,
|
||||
|
||||
pub const Expression = union (enum) {
|
||||
nil_literal,
|
||||
true_literal,
|
||||
false_literal,
|
||||
integer_literal: types.Integer,
|
||||
float_literal: types.Float,
|
||||
string_literal: []const u8,
|
||||
array_literal: coral.list.Stack(Expression),
|
||||
|
||||
table_literal: coral.list.Stack(struct {
|
||||
identifier: []const u8,
|
||||
expression: Expression,
|
||||
}),
|
||||
|
||||
grouped_expression: *Expression,
|
||||
|
||||
binary_operation: struct {
|
||||
operator: tokens.Token,
|
||||
lhs_expression: *Expression,
|
||||
rhs_expression: *Expression,
|
||||
},
|
||||
|
||||
unary_operation: struct {
|
||||
operator: tokens.Token,
|
||||
expression: *Expression,
|
||||
},
|
||||
};
|
||||
|
||||
const ExpressionParser = fn (self: *Self, tokenizer: *tokens.Tokenizer) types.ParseError!Expression;
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub const Statement = union (enum) {
|
||||
return_expression: Expression,
|
||||
return_nothing,
|
||||
};
|
||||
|
||||
const StatementList = coral.list.Stack(Statement);
|
||||
|
||||
const UnaryOperation = enum {
|
||||
boolean_negation,
|
||||
numeric_negation,
|
||||
};
|
||||
|
||||
fn binary_operation_parser(comptime parse_next: ExpressionParser, comptime operators: []const tokens.Token) ExpressionParser {
|
||||
return struct {
|
||||
fn parse(self: *Self, tokenizer: *tokens.Tokenizer) types.ParseError!Expression {
|
||||
var expression = try parse_next(self, tokenizer);
|
||||
|
||||
{
|
||||
const allocator = self.arena.as_allocator();
|
||||
|
||||
inline for (operators) |operator| {
|
||||
if (tokenizer.current_token == coral.io.tag_of(operator)) {
|
||||
try self.check_syntax(
|
||||
tokenizer.step(.{.include_newlines = true}),
|
||||
"expected right-hand side of expression after `" ++ comptime operator.text() ++ "`");
|
||||
|
||||
expression = .{
|
||||
.binary_operation = .{
|
||||
.operator = operator,
|
||||
.lhs_expression = try coral.io.allocate_one(allocator, expression),
|
||||
.rhs_expression = try coral.io.allocate_one(allocator, try parse_next(self, tokenizer)),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
}.parse;
|
||||
}
|
||||
|
||||
fn check_syntax(self: *Self, condition: bool, error_message: []const u8) types.ParseError!void {
|
||||
if (condition) {
|
||||
return;
|
||||
}
|
||||
|
||||
return self.fail_syntax(error_message);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Self) void {
|
||||
self.arena.clear_allocations();
|
||||
self.statements.deinit(self.allocator);
|
||||
}
|
||||
|
||||
fn fail_syntax(self: *Self, error_message: []const u8) types.ParseError {
|
||||
self.error_message = error_message;
|
||||
|
||||
return error.BadSyntax;
|
||||
}
|
||||
|
||||
pub fn init(allocator: coral.io.Allocator) coral.io.AllocationError!Self {
|
||||
return Self{
|
||||
.arena = .{
|
||||
.base_allocator = allocator,
|
||||
.min_page_size = 4096,
|
||||
},
|
||||
|
||||
.allocator = allocator,
|
||||
.statements = .{},
|
||||
.error_message = "",
|
||||
};
|
||||
}
|
||||
|
||||
pub fn list_statements(self: Self) []const Statement {
|
||||
return self.statements.values;
|
||||
}
|
||||
|
||||
pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) types.ParseError!void {
|
||||
self.reset();
|
||||
|
||||
errdefer self.reset();
|
||||
|
||||
var has_not_returned_yet = true;
|
||||
|
||||
while (tokenizer.step(.{.include_newlines = false})) {
|
||||
switch (tokenizer.current_token) {
|
||||
.keyword_return => {
|
||||
try self.check_syntax(has_not_returned_yet, "cannot return more than once per function scope");
|
||||
|
||||
try self.statements.push_one(self.allocator, get_statement: {
|
||||
if (tokenizer.step(.{.include_newlines = true})) {
|
||||
if (tokenizer.current_token != .newline) {
|
||||
break: get_statement .{.return_expression = try self.parse_expression(tokenizer)};
|
||||
}
|
||||
|
||||
if (tokenizer.step(.{.include_newlines = true})) {
|
||||
try self.check_syntax(
|
||||
tokenizer.current_token == .newline,
|
||||
"expected end of declaration after return expression");
|
||||
}
|
||||
}
|
||||
|
||||
break: get_statement .return_nothing;
|
||||
});
|
||||
|
||||
has_not_returned_yet = false;
|
||||
},
|
||||
|
||||
else => return self.fail_syntax("invalid statement"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const parse_comparison = binary_operation_parser(parse_term, &.{
|
||||
.symbol_greater_than,
|
||||
.symbol_greater_equals,
|
||||
.symbol_less_than,
|
||||
.symbol_less_equals
|
||||
});
|
||||
|
||||
const parse_equality = binary_operation_parser(parse_comparison, &.{
|
||||
.symbol_double_equals,
|
||||
});
|
||||
|
||||
const parse_expression = binary_operation_parser(parse_equality, &.{
|
||||
.symbol_plus,
|
||||
.symbol_minus,
|
||||
});
|
||||
|
||||
fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) types.ParseError!Expression {
|
||||
switch (tokenizer.current_token) {
|
||||
.symbol_paren_left => {
|
||||
try self.check_syntax(tokenizer.step(.{.include_newlines = false}), "expected an expression after `(`");
|
||||
|
||||
const expression = try self.parse_expression(tokenizer);
|
||||
|
||||
try self.check_syntax(
|
||||
tokenizer.step(.{.include_newlines = false}) and tokenizer.current_token == .symbol_paren_right,
|
||||
"expected a closing `)` after expression");
|
||||
|
||||
return Expression{.grouped_expression = try coral.io.allocate_one(self.arena.as_allocator(), expression)};
|
||||
},
|
||||
|
||||
.integer => |value| {
|
||||
_ = tokenizer.step(.{.include_newlines = false});
|
||||
|
||||
return Expression{
|
||||
.integer_literal = coral.utf8.parse_int(@typeInfo(types.Integer).Int, value, .{}) catch |parse_error| {
|
||||
return self.fail_syntax(switch (parse_error) {
|
||||
error.BadSyntax => "invalid integer literal",
|
||||
error.IntOverflow => "integer literal is too big",
|
||||
});
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
.real => |value| {
|
||||
_ = tokenizer.step(.{.include_newlines = false});
|
||||
|
||||
return Expression{
|
||||
.float_literal = coral.utf8.parse_float(@typeInfo(types.Float).Float, value) catch |parse_error| {
|
||||
return self.fail_syntax(switch (parse_error) {
|
||||
error.BadSyntax => "invalid float literal",
|
||||
});
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
.string => |value| {
|
||||
_ = tokenizer.step(.{.include_newlines = false});
|
||||
|
||||
return Expression{.string_literal = value};
|
||||
},
|
||||
|
||||
.symbol_bracket_left => {
|
||||
try self.check_syntax(tokenizer.step(.{.include_newlines = false}), "unexpected end of array literal");
|
||||
|
||||
var expression = Expression{.array_literal = .{}};
|
||||
|
||||
coral.debug.assert(expression == .array_literal);
|
||||
|
||||
const allocator = self.arena.as_allocator();
|
||||
const array_average_maximum = 32;
|
||||
|
||||
try expression.array_literal.grow(allocator, array_average_maximum);
|
||||
|
||||
while (true) {
|
||||
switch (tokenizer.current_token) {
|
||||
.symbol_bracket_right => {
|
||||
_ = tokenizer.step(.{.include_newlines = false});
|
||||
|
||||
return expression;
|
||||
},
|
||||
|
||||
else => {
|
||||
try self.check_syntax(
|
||||
tokenizer.step(.{.include_newlines = false}),
|
||||
"expected `]` or expression after `[`");
|
||||
|
||||
try expression.array_literal.push_one(allocator, try self.parse_expression(tokenizer));
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_brace_left => {
|
||||
try self.check_syntax(tokenizer.step(.{.include_newlines = false}), "unexpected end of table literal");
|
||||
|
||||
var expression = Expression{.table_literal = .{}};
|
||||
|
||||
coral.debug.assert(expression == .table_literal);
|
||||
|
||||
const allocator = self.arena.as_allocator();
|
||||
|
||||
while (true) {
|
||||
switch (tokenizer.current_token) {
|
||||
.symbol_brace_right => {
|
||||
_ = tokenizer.step(.{.include_newlines = false});
|
||||
|
||||
return expression;
|
||||
},
|
||||
|
||||
.local => |identifier| {
|
||||
try self.check_syntax(
|
||||
tokenizer.step(.{.include_newlines = false}) and tokenizer.current_token == .symbol_equals,
|
||||
"expected `=` after identifier");
|
||||
|
||||
try self.check_syntax(tokenizer.step(.{.include_newlines = false}), "unexpected end after `=`");
|
||||
|
||||
try expression.table_literal.push_one(allocator, .{
|
||||
.identifier = identifier,
|
||||
.expression = try self.parse_expression(tokenizer),
|
||||
});
|
||||
|
||||
switch (tokenizer.current_token) {
|
||||
.symbol_comma => _ = tokenizer.step(.{.include_newlines = false}),
|
||||
|
||||
.symbol_brace_right => {
|
||||
_ = tokenizer.step(.{.include_newlines = false});
|
||||
|
||||
return expression;
|
||||
},
|
||||
|
||||
else => return self.fail_syntax("expected `,` or `}` after expression"),
|
||||
}
|
||||
},
|
||||
|
||||
else => return self.fail_syntax("expected `}` or fields in table literal"),
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_minus => {
|
||||
try self.check_syntax(
|
||||
tokenizer.step(.{.include_newlines = false}),
|
||||
"expected expression after numeric negation (`-`)");
|
||||
|
||||
return Expression{
|
||||
.unary_operation = .{
|
||||
.expression = try coral.io.allocate_one(
|
||||
self.arena.as_allocator(),
|
||||
try self.parse_factor(tokenizer)),
|
||||
|
||||
.operator = .symbol_minus,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
.symbol_bang => {
|
||||
try self.check_syntax(
|
||||
tokenizer.step(.{.include_newlines = false}),
|
||||
"expected expression after numeric negation (`!`)");
|
||||
|
||||
return Expression{
|
||||
.unary_operation = .{
|
||||
.expression = try coral.io.allocate_one(
|
||||
self.arena.as_allocator(),
|
||||
try self.parse_factor(tokenizer)),
|
||||
|
||||
.operator = .symbol_bang,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
else => return self.fail_syntax("unexpected token in expression"),
|
||||
}
|
||||
}
|
||||
|
||||
const parse_term = binary_operation_parser(parse_factor, &.{
|
||||
.symbol_asterisk,
|
||||
.symbol_forward_slash,
|
||||
});
|
||||
|
||||
pub fn reset(self: *Self) void {
|
||||
self.statements.clear();
|
||||
self.arena.clear_allocations();
|
||||
}
|
|
@ -1,6 +1,6 @@
|
|||
const Environment = @import("./Environment.zig");
|
||||
const Ast = @import("./Ast.zig");
|
||||
|
||||
const ast = @import("./ast.zig");
|
||||
const Environment = @import("./Environment.zig");
|
||||
|
||||
const coral = @import("coral");
|
||||
|
||||
|
@ -51,29 +51,15 @@ fn clear_error_details(self: *Self) void {
|
|||
}
|
||||
|
||||
pub fn compile(self: *Self, data: []const u8) types.RuntimeError!void {
|
||||
var ast = try Ast.init(self.env.allocator);
|
||||
|
||||
errdefer ast.deinit();
|
||||
|
||||
{
|
||||
var tokenizer = tokens.Tokenizer{.source = data};
|
||||
var parsed_statements = try ast.ParsedStatements.init(self.env.allocator, &tokenizer);
|
||||
|
||||
switch (parsed_statements) {
|
||||
.valid => |*statements| {
|
||||
defer statements.deinit(self.env.allocator);
|
||||
|
||||
for (statements.list.values) |statement| {
|
||||
switch (statement) {
|
||||
.return_expression => |return_expression| {
|
||||
try self.compile_expression(return_expression);
|
||||
try self.emit_opcode(.ret);
|
||||
},
|
||||
|
||||
.return_nothing => {
|
||||
try self.emit_opcode(.push_nil);
|
||||
try self.emit_opcode(.ret);
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.invalid => |invalid| {
|
||||
ast.parse(&tokenizer) catch |init_error| {
|
||||
if (init_error == error.OutOfMemory) {
|
||||
self.clear_error_details();
|
||||
|
||||
try self.message_data.push_all(self.env.allocator, "@(");
|
||||
|
@ -89,16 +75,31 @@ pub fn compile(self: *Self, data: []const u8) types.RuntimeError!void {
|
|||
return error.OutOfMemory;
|
||||
};
|
||||
|
||||
coral.utf8.print(message_writer, invalid) catch {
|
||||
coral.utf8.print(message_writer, ast.error_message) catch {
|
||||
return error.OutOfMemory;
|
||||
};
|
||||
}
|
||||
|
||||
return error.BadSyntax;
|
||||
return init_error;
|
||||
};
|
||||
}
|
||||
|
||||
for (ast.list_statements()) |statement| {
|
||||
switch (statement) {
|
||||
.return_expression => |return_expression| {
|
||||
try self.compile_expression(return_expression);
|
||||
try self.emit_opcode(.ret);
|
||||
},
|
||||
|
||||
.return_nothing => {
|
||||
try self.emit_opcode(.push_nil);
|
||||
try self.emit_opcode(.ret);
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compile_expression(self: *Self, expression: ast.Expression) types.RuntimeError!void {
|
||||
pub fn compile_expression(self: *Self, expression: Ast.Expression) types.RuntimeError!void {
|
||||
switch (expression) {
|
||||
.nil_literal => try self.emit_opcode(.push_nil),
|
||||
.true_literal => try self.emit_opcode(.push_true),
|
||||
|
@ -159,25 +160,27 @@ pub fn compile_expression(self: *Self, expression: ast.Expression) types.Runtime
|
|||
try self.compile_expression(operation.lhs_expression.*);
|
||||
try self.compile_expression(operation.rhs_expression.*);
|
||||
|
||||
try self.emit_opcode(switch (operation.kind) {
|
||||
.addition => .add,
|
||||
.subtraction => .sub,
|
||||
.multiplication => .mul,
|
||||
.division => .div,
|
||||
.equality_comparison => .compare_eq,
|
||||
.greater_than_comparison => .compare_gt,
|
||||
.greater_equals_comparison => .compare_ge,
|
||||
.less_than_comparison => .compare_lt,
|
||||
.less_equals_comparison => .compare_le,
|
||||
try self.emit_opcode(switch (operation.operator) {
|
||||
.symbol_plus => .add,
|
||||
.symbol_minus => .sub,
|
||||
.symbol_asterisk => .mul,
|
||||
.symbol_forward_slash => .div,
|
||||
.symbol_double_equals => .compare_eq,
|
||||
.symbol_greater_than => .compare_gt,
|
||||
.symbol_greater_equals => .compare_ge,
|
||||
.symbol_less_than => .compare_lt,
|
||||
.symbol_less_equals => .compare_le,
|
||||
else => unreachable,
|
||||
});
|
||||
},
|
||||
|
||||
.unary_operation => |operation| {
|
||||
try self.compile_expression(operation.expression.*);
|
||||
|
||||
try self.emit_opcode(switch (operation.kind) {
|
||||
.boolean_negation => .not,
|
||||
.numeric_negation => .neg,
|
||||
try self.emit_opcode(switch (operation.operator) {
|
||||
.symbol_bang => .not,
|
||||
.symbol_minus => .neg,
|
||||
else => unreachable,
|
||||
});
|
||||
},
|
||||
|
||||
|
@ -217,20 +220,16 @@ pub fn error_details(self: Self) []const u8 {
|
|||
}
|
||||
|
||||
pub fn init(env: *Environment, chunk_name: []const u8) coral.io.AllocationError!Self {
|
||||
var bytecode_buffer = try Buffer.init(env.allocator, 0);
|
||||
var message_data = Buffer{};
|
||||
|
||||
errdefer bytecode_buffer.deinit(env.allocator);
|
||||
|
||||
var message_data = try Buffer.init(env.allocator, chunk_name.len);
|
||||
try message_data.push_all(env.allocator, chunk_name);
|
||||
|
||||
errdefer message_data.deinit(env.allocator);
|
||||
|
||||
message_data.push_all(env.allocator, chunk_name) catch unreachable;
|
||||
|
||||
return Self{
|
||||
.env = env,
|
||||
.message_data = message_data,
|
||||
.bytecode_buffer = bytecode_buffer,
|
||||
.bytecode_buffer = .{},
|
||||
.message_name_len = chunk_name.len,
|
||||
};
|
||||
}
|
||||
|
|
|
@ -235,8 +235,10 @@ pub fn execute_file(self: *Self, fs: file.System, file_path: file.Path) ExecuteF
|
|||
|
||||
defer readable_file.close();
|
||||
|
||||
var file_source = coral.list.Stack(u8){};
|
||||
const file_size = (try fs.query_info(file_path)).size;
|
||||
var file_source = try coral.list.Stack(u8).init(self.allocator, file_size);
|
||||
|
||||
try file_source.grow(self.allocator, file_size);
|
||||
|
||||
defer file_source.deinit(self.allocator);
|
||||
|
||||
|
@ -287,41 +289,35 @@ pub fn get_object(self: *Self, indexable: types.Ref, index: types.Ref) types.Run
|
|||
}
|
||||
|
||||
pub fn init(allocator: coral.io.Allocator, options: InitOptions) !Self {
|
||||
var values = try ValueStack.init(allocator, options.values_max * options.calls_max);
|
||||
|
||||
errdefer values.deinit(allocator);
|
||||
|
||||
var calls = try CallStack.init(allocator, options.calls_max);
|
||||
|
||||
errdefer calls.deinit(allocator);
|
||||
|
||||
var interned = try InternTable.init(allocator);
|
||||
|
||||
errdefer interned.deinit(allocator);
|
||||
|
||||
var heap = try ObjectSlab.init(allocator);
|
||||
|
||||
errdefer heap.deinit(allocator);
|
||||
|
||||
var environment = Self{
|
||||
var env = Self{
|
||||
.global_object = 0,
|
||||
.allocator = allocator,
|
||||
.reporter = options.reporter,
|
||||
.interned = interned,
|
||||
.values = values,
|
||||
.calls = calls,
|
||||
.heap = heap,
|
||||
.interned = .{},
|
||||
.values = .{},
|
||||
.calls = .{},
|
||||
.heap = .{},
|
||||
};
|
||||
|
||||
const globals = try environment.new_object(&.{}, .{
|
||||
errdefer {
|
||||
env.values.deinit(allocator);
|
||||
env.calls.deinit(allocator);
|
||||
}
|
||||
|
||||
try env.values.grow(allocator, options.values_max * options.calls_max);
|
||||
try env.calls.grow(allocator, options.calls_max);
|
||||
|
||||
{
|
||||
const globals = try env.new_object(&.{}, .{
|
||||
.identity = "KYM GLOBAL OBJECT OC DO NOT STEAL",
|
||||
});
|
||||
|
||||
coral.debug.assert(globals == .object);
|
||||
|
||||
environment.global_object = globals.object;
|
||||
env.global_object = globals.object;
|
||||
}
|
||||
|
||||
return environment;
|
||||
return env;
|
||||
}
|
||||
|
||||
pub fn intern(self: *Self, string: []const u8) coral.io.AllocationError!types.Ref {
|
||||
|
@ -360,17 +356,13 @@ pub fn new_object(self: *Self, userdata: []const u8, info: ObjectInfo) coral.io.
|
|||
|
||||
coral.io.copy(allocation, userdata);
|
||||
|
||||
var fields = try Object.Fields.init(self.allocator);
|
||||
|
||||
errdefer fields.deinit(self.allocator);
|
||||
|
||||
return .{.object = try self.heap.insert(self.allocator, .{
|
||||
.ref_count = 1,
|
||||
|
||||
.state = .{
|
||||
.info = info,
|
||||
.userdata = allocation,
|
||||
.fields = fields,
|
||||
.fields = .{},
|
||||
},
|
||||
})};
|
||||
}
|
||||
|
|
|
@ -1,755 +0,0 @@
|
|||
const coral = @import("coral");
|
||||
|
||||
const tokens = @import("./tokens.zig");
|
||||
|
||||
const types = @import("./types.zig");
|
||||
|
||||
const ArrayElements = coral.list.Stack(Expression);
|
||||
|
||||
pub const BinaryOperation = enum {
|
||||
addition,
|
||||
subtraction,
|
||||
multiplication,
|
||||
division,
|
||||
equality_comparison,
|
||||
greater_than_comparison,
|
||||
greater_equals_comparison,
|
||||
less_than_comparison,
|
||||
less_equals_comparison,
|
||||
};
|
||||
|
||||
pub const ParsedExpression = union (enum) {
|
||||
valid: Expression,
|
||||
invalid: []const u8,
|
||||
|
||||
pub fn init(allocator: coral.io.Allocator, tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedExpression {
|
||||
var parsed_lhs_expression = try init_equality(allocator, tokenizer);
|
||||
|
||||
switch (parsed_lhs_expression) {
|
||||
.valid => |*lhs_expression| {
|
||||
var expression = lhs_expression.*;
|
||||
var is_invalid = true;
|
||||
|
||||
defer if (is_invalid) {
|
||||
expression.deinit(allocator);
|
||||
};
|
||||
|
||||
if (tokenizer.current_token == .symbol_plus) {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected right-hand side of expression after `+`"};
|
||||
}
|
||||
|
||||
var parsed_rhs_expression = try init_equality(allocator, tokenizer);
|
||||
|
||||
switch (parsed_rhs_expression) {
|
||||
.valid => |*rhs_expression| {
|
||||
errdefer rhs_expression.deinit(allocator);
|
||||
|
||||
expression = try Expression.init_binary_operation(
|
||||
allocator,
|
||||
.addition,
|
||||
lhs_expression,
|
||||
rhs_expression);
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.current_token == .symbol_minus) {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected right-hand side of expression after `-`"};
|
||||
}
|
||||
|
||||
var parsed_rhs_expression = try init_equality(allocator, tokenizer);
|
||||
|
||||
switch (parsed_rhs_expression) {
|
||||
.valid => |*rhs_expression| {
|
||||
errdefer rhs_expression.deinit(allocator);
|
||||
|
||||
expression = try Expression.init_binary_operation(
|
||||
allocator,
|
||||
.subtraction,
|
||||
lhs_expression,
|
||||
rhs_expression);
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
is_invalid = false;
|
||||
|
||||
return ParsedExpression{.valid = expression};
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_comparison(
|
||||
allocator: coral.io.Allocator,
|
||||
tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedExpression {
|
||||
|
||||
var parsed_lhs_expression = try init_term(allocator, tokenizer);
|
||||
|
||||
switch (parsed_lhs_expression) {
|
||||
.valid => |*lhs_expression| {
|
||||
var expression = lhs_expression.*;
|
||||
var is_invalid = true;
|
||||
|
||||
defer if (is_invalid) {
|
||||
expression.deinit(allocator);
|
||||
};
|
||||
|
||||
if (tokenizer.current_token == .symbol_greater_than) {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected right-hand side of expression after `>`"};
|
||||
}
|
||||
|
||||
var parsed_rhs_expression = try init_term(allocator, tokenizer);
|
||||
|
||||
switch (parsed_rhs_expression) {
|
||||
.valid => |*rhs_expression| {
|
||||
errdefer rhs_expression.deinit(allocator);
|
||||
|
||||
expression = try Expression.init_binary_operation(
|
||||
allocator,
|
||||
.greater_than_comparison,
|
||||
lhs_expression,
|
||||
rhs_expression);
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.current_token == .symbol_greater_equals) {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected right-hand side of expression after `>=`"};
|
||||
}
|
||||
|
||||
var parsed_rhs_expression = try init_term(allocator, tokenizer);
|
||||
|
||||
switch (parsed_rhs_expression) {
|
||||
.valid => |*rhs_expression| {
|
||||
errdefer rhs_expression.deinit(allocator);
|
||||
|
||||
expression = try Expression.init_binary_operation(
|
||||
allocator,
|
||||
.greater_equals_comparison,
|
||||
lhs_expression,
|
||||
rhs_expression);
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.current_token == .symbol_less_than) {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected right-hand side of expression after `<`"};
|
||||
}
|
||||
|
||||
var parsed_rhs_expression = try init_term(allocator, tokenizer);
|
||||
|
||||
switch (parsed_rhs_expression) {
|
||||
.valid => |*rhs_expression| {
|
||||
errdefer rhs_expression.deinit(allocator);
|
||||
|
||||
expression = try Expression.init_binary_operation(
|
||||
allocator,
|
||||
.less_than_comparison,
|
||||
lhs_expression,
|
||||
rhs_expression);
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.current_token == .symbol_less_equals) {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected right-hand side of expression after `<=`"};
|
||||
}
|
||||
|
||||
var parsed_rhs_expression = try init_term(allocator, tokenizer);
|
||||
|
||||
switch (parsed_rhs_expression) {
|
||||
.valid => |*rhs_expression| {
|
||||
errdefer rhs_expression.deinit(allocator);
|
||||
|
||||
expression = try Expression.init_binary_operation(
|
||||
allocator,
|
||||
.less_equals_comparison,
|
||||
lhs_expression,
|
||||
rhs_expression);
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
is_invalid = false;
|
||||
|
||||
return ParsedExpression{.valid = expression};
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
fn init_equality(
|
||||
allocator: coral.io.Allocator,
|
||||
tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedExpression {
|
||||
|
||||
var parsed_lhs_expression = try init_comparison(allocator, tokenizer);
|
||||
|
||||
switch (parsed_lhs_expression) {
|
||||
.valid => |*lhs_expression| {
|
||||
var expression = lhs_expression.*;
|
||||
var is_invalid = true;
|
||||
|
||||
defer if (is_invalid) {
|
||||
expression.deinit(allocator);
|
||||
};
|
||||
|
||||
if (tokenizer.current_token == .symbol_double_equals) {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected right-hand side of expression after `==`"};
|
||||
}
|
||||
|
||||
var parsed_rhs_expression = try init_comparison(allocator, tokenizer);
|
||||
|
||||
switch (parsed_rhs_expression) {
|
||||
.valid => |*rhs_expression| {
|
||||
errdefer rhs_expression.deinit(allocator);
|
||||
|
||||
expression = try Expression.init_binary_operation(
|
||||
allocator,
|
||||
.equality_comparison,
|
||||
lhs_expression,
|
||||
rhs_expression);
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
is_invalid = false;
|
||||
|
||||
return ParsedExpression{.valid = expression};
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
fn init_factor(
|
||||
allocator: coral.io.Allocator,
|
||||
tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedExpression {
|
||||
|
||||
switch (tokenizer.current_token) {
|
||||
.symbol_paren_left => {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected an expression after `(`"};
|
||||
}
|
||||
|
||||
var parsed_expression = try ParsedExpression.init(allocator, tokenizer);
|
||||
|
||||
switch (parsed_expression) {
|
||||
.valid => |*expression| {
|
||||
var is_invalid = true;
|
||||
|
||||
defer if (is_invalid) {
|
||||
expression.deinit(allocator);
|
||||
};
|
||||
|
||||
if ((!tokenizer.step(.ignore_newlines)) or (tokenizer.current_token != .symbol_paren_right)) {
|
||||
return ParsedExpression{.invalid = "expected a closing `)` after expression"};
|
||||
}
|
||||
|
||||
is_invalid = false;
|
||||
|
||||
return ParsedExpression{
|
||||
.valid = .{.grouped_expression = try coral.io.allocate_one(allocator, expression.*)},
|
||||
};
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
},
|
||||
|
||||
.integer => |value| return ParsedExpression{
|
||||
.valid = .{
|
||||
.integer_literal = coral.utf8.parse_int(
|
||||
@typeInfo(types.Integer).Int,
|
||||
value, .{}) catch |parse_error| {
|
||||
|
||||
return ParsedExpression{
|
||||
.invalid = switch (parse_error) {
|
||||
error.BadSyntax => "invalid integer literal",
|
||||
error.IntOverflow => "integer literal is too big",
|
||||
}
|
||||
};
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
.real => |value| return ParsedExpression{
|
||||
.valid = .{
|
||||
.float_literal = coral.utf8.parse_float(
|
||||
@typeInfo(types.Float).Float,
|
||||
value) catch |parse_error| {
|
||||
|
||||
return ParsedExpression{
|
||||
.invalid = switch (parse_error) {
|
||||
error.BadSyntax => "invalid float literal",
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
.string => |value| return ParsedExpression{
|
||||
.valid = .{.string_literal = value},
|
||||
},
|
||||
|
||||
.symbol_bracket_left => {
|
||||
if (tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "unexpected end of array literal"};
|
||||
}
|
||||
|
||||
var is_invalid = true;
|
||||
var array_elements = try ArrayElements.init(allocator, 0);
|
||||
|
||||
defer if (is_invalid) {
|
||||
array_elements.deinit(allocator);
|
||||
};
|
||||
|
||||
while (true) {
|
||||
switch (tokenizer.current_token) {
|
||||
.symbol_bracket_right => {
|
||||
_ = tokenizer.step(.ignore_newlines);
|
||||
is_invalid = false;
|
||||
|
||||
return ParsedExpression{
|
||||
.valid = .{.array_literal = array_elements},
|
||||
};
|
||||
},
|
||||
|
||||
else => {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected `]` or expression after `[`"};
|
||||
}
|
||||
|
||||
var parsed_expression = try ParsedExpression.init(allocator, tokenizer);
|
||||
|
||||
switch (parsed_expression) {
|
||||
.valid => |*expression| {
|
||||
errdefer expression.deinit(allocator);
|
||||
|
||||
try array_elements.push_one(allocator, expression.*);
|
||||
},
|
||||
|
||||
.invalid => |detail| return ParsedExpression{.invalid = detail},
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_brace_left => {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "unexpected end of table literal"};
|
||||
}
|
||||
|
||||
var is_invalid = true;
|
||||
var table_fields = try TableFields.init(allocator, 0);
|
||||
|
||||
defer if (is_invalid) {
|
||||
table_fields.deinit(allocator);
|
||||
};
|
||||
|
||||
while (true) {
|
||||
switch (tokenizer.current_token) {
|
||||
.symbol_brace_right => {
|
||||
_ = tokenizer.step(.ignore_newlines);
|
||||
is_invalid = false;
|
||||
|
||||
return ParsedExpression{
|
||||
.valid = .{.table_literal = table_fields},
|
||||
};
|
||||
},
|
||||
|
||||
.local => |identifier| {
|
||||
const key = identifier;
|
||||
|
||||
if (!tokenizer.step(.ignore_newlines) or tokenizer.current_token != .symbol_equals) {
|
||||
return ParsedExpression{.invalid = "expected `=` after identifier"};
|
||||
}
|
||||
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "unexpected end after `=`"};
|
||||
}
|
||||
|
||||
var parsed_expression = try init(allocator, tokenizer);
|
||||
|
||||
switch (parsed_expression) {
|
||||
.valid => |*expression| {
|
||||
errdefer expression.deinit(allocator);
|
||||
|
||||
try table_fields.push_one(allocator, .{
|
||||
.identifier = key,
|
||||
.expression = expression.*,
|
||||
});
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "unexpected end after expression"};
|
||||
}
|
||||
|
||||
switch (tokenizer.current_token) {
|
||||
.symbol_comma => _ = tokenizer.step(.ignore_newlines),
|
||||
|
||||
.symbol_brace_right => {
|
||||
_ = tokenizer.step(.ignore_newlines);
|
||||
is_invalid = false;
|
||||
|
||||
return ParsedExpression{
|
||||
.valid = .{.table_literal = table_fields},
|
||||
};
|
||||
},
|
||||
|
||||
else => return ParsedExpression{.invalid = "expected `,` or `}` after expression"},
|
||||
}
|
||||
},
|
||||
|
||||
else => return ParsedExpression{.invalid = "expected `}` or fields in table literal"},
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_minus => {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected expression after numeric negation (`-`)"};
|
||||
}
|
||||
|
||||
var parsed_factor_expression = try init_factor(allocator, tokenizer);
|
||||
|
||||
switch (parsed_factor_expression) {
|
||||
.valid => |*factor_expression| {
|
||||
errdefer factor_expression.deinit(allocator);
|
||||
|
||||
return ParsedExpression{
|
||||
.valid = try Expression.init_unary_operation(
|
||||
allocator,
|
||||
.numeric_negation,
|
||||
factor_expression),
|
||||
};
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_bang => {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected expression after boolean negation (`!`)"};
|
||||
}
|
||||
|
||||
var parsed_factor_expression = try init_factor(allocator, tokenizer);
|
||||
|
||||
switch (parsed_factor_expression) {
|
||||
.valid => |*factor_expression| {
|
||||
errdefer factor_expression.deinit(allocator);
|
||||
|
||||
return ParsedExpression{
|
||||
.valid = try Expression.init_unary_operation(
|
||||
allocator,
|
||||
.boolean_negation,
|
||||
factor_expression),
|
||||
};
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
},
|
||||
|
||||
else => return ParsedExpression{.invalid = "unexpected token in expression"},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_term(allocator: coral.io.Allocator, tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedExpression {
|
||||
var parsed_lhs_expression = try init_factor(allocator, tokenizer);
|
||||
|
||||
switch (parsed_lhs_expression) {
|
||||
.valid => |*lhs_expression| {
|
||||
var expression = lhs_expression.*;
|
||||
var is_invalid = true;
|
||||
|
||||
defer if (is_invalid) {
|
||||
expression.deinit(allocator);
|
||||
};
|
||||
|
||||
if (tokenizer.current_token == .symbol_asterisk) {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected right-hand side of expression after `*`"};
|
||||
}
|
||||
|
||||
var parsed_rhs_expression = try init_factor(allocator, tokenizer);
|
||||
|
||||
switch (parsed_rhs_expression) {
|
||||
.valid => |*rhs_expression| {
|
||||
errdefer rhs_expression.deinit(allocator);
|
||||
|
||||
expression = try Expression.init_binary_operation(
|
||||
allocator,
|
||||
.multiplication,
|
||||
lhs_expression,
|
||||
rhs_expression);
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
if (tokenizer.current_token == .symbol_forward_slash) {
|
||||
if (!tokenizer.step(.ignore_newlines)) {
|
||||
return ParsedExpression{.invalid = "expected right-hand side of expression after `/`"};
|
||||
}
|
||||
|
||||
var parsed_rhs_expression = try init_equality(allocator, tokenizer);
|
||||
|
||||
switch (parsed_rhs_expression) {
|
||||
.valid => |*rhs_expression| {
|
||||
errdefer rhs_expression.deinit(allocator);
|
||||
|
||||
expression = try Expression.init_binary_operation(
|
||||
allocator,
|
||||
.division,
|
||||
lhs_expression,
|
||||
rhs_expression);
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
|
||||
is_invalid = false;
|
||||
|
||||
return ParsedExpression{.valid = expression};
|
||||
},
|
||||
|
||||
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
pub const ParsedStatements = union (enum) {
|
||||
valid: Statements,
|
||||
invalid: []const u8,
|
||||
|
||||
pub fn init(allocator: coral.io.Allocator, tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedStatements {
|
||||
var statements_list = try Statements.List.init(allocator, 0);
|
||||
var has_returned = false;
|
||||
var is_invalid = true;
|
||||
|
||||
defer if (is_invalid) {
|
||||
for (statements_list.values) |*statement| {
|
||||
statement.deinit(allocator);
|
||||
}
|
||||
|
||||
statements_list.deinit(allocator);
|
||||
};
|
||||
|
||||
while (tokenizer.step(.ignore_newlines)) {
|
||||
switch (tokenizer.current_token) {
|
||||
.keyword_return => {
|
||||
if (has_returned) {
|
||||
return ParsedStatements{.invalid = "cannot return more than once per function scope"};
|
||||
}
|
||||
|
||||
if (tokenizer.step(.include_newlines) and (tokenizer.current_token != .newline)) {
|
||||
var parsed_expression = try ParsedExpression.init(allocator, tokenizer);
|
||||
|
||||
switch (parsed_expression) {
|
||||
.valid => |*expression| {
|
||||
errdefer expression.deinit(allocator);
|
||||
|
||||
try statements_list.push_one(allocator, .{
|
||||
.return_expression = expression.*,
|
||||
});
|
||||
},
|
||||
|
||||
.invalid => |details| {
|
||||
return ParsedStatements{.invalid = details};
|
||||
},
|
||||
}
|
||||
} else {
|
||||
try statements_list.push_one(allocator, .return_nothing);
|
||||
}
|
||||
|
||||
if (tokenizer.step(.ignore_newlines) and tokenizer.current_token != .newline) {
|
||||
return ParsedStatements{.invalid = "expected newline"};
|
||||
}
|
||||
|
||||
has_returned = true;
|
||||
},
|
||||
|
||||
else => return ParsedStatements{.invalid = "invalid statement"},
|
||||
}
|
||||
}
|
||||
|
||||
is_invalid = false;
|
||||
|
||||
return ParsedStatements{
|
||||
.valid = .{
|
||||
.list = statements_list,
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Expression = union (enum) {
|
||||
nil_literal,
|
||||
true_literal,
|
||||
false_literal,
|
||||
integer_literal: types.Integer,
|
||||
float_literal: types.Float,
|
||||
string_literal: []const u8,
|
||||
array_literal: ArrayElements,
|
||||
table_literal: TableFields,
|
||||
grouped_expression: *Expression,
|
||||
|
||||
binary_operation: struct {
|
||||
kind: BinaryOperation,
|
||||
lhs_expression: *Expression,
|
||||
rhs_expression: *Expression,
|
||||
},
|
||||
|
||||
unary_operation: struct {
|
||||
kind: UnaryOperation,
|
||||
expression: *Expression,
|
||||
},
|
||||
|
||||
fn deinit(self: *Expression, allocator: coral.io.Allocator) void {
|
||||
switch (self.*) {
|
||||
.nil_literal, .true_literal, .false_literal, .integer_literal, .float_literal, .string_literal => {},
|
||||
|
||||
.array_literal => |*elements| {
|
||||
for (elements.values) |*element_expression| {
|
||||
element_expression.deinit(allocator);
|
||||
}
|
||||
|
||||
elements.deinit(allocator);
|
||||
},
|
||||
|
||||
.table_literal => |*fields| {
|
||||
for (fields.values) |*field| {
|
||||
field.expression.deinit(allocator);
|
||||
}
|
||||
|
||||
fields.deinit(allocator);
|
||||
},
|
||||
|
||||
.grouped_expression => |expression| {
|
||||
expression.deinit(allocator);
|
||||
},
|
||||
|
||||
.binary_operation => |operation| {
|
||||
operation.lhs_expression.deinit(allocator);
|
||||
coral.io.deallocate(allocator, operation.lhs_expression);
|
||||
operation.rhs_expression.deinit(allocator);
|
||||
coral.io.deallocate(allocator, operation.rhs_expression);
|
||||
},
|
||||
|
||||
.unary_operation => |operation| {
|
||||
operation.expression.deinit(allocator);
|
||||
coral.io.deallocate(allocator, operation.expression);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn init_binary_operation(
|
||||
allocator: coral.io.Allocator,
|
||||
kind: BinaryOperation,
|
||||
lhs_expression: *const Expression,
|
||||
rhs_operation: *const Expression) coral.io.AllocationError!Expression {
|
||||
|
||||
const allocated_lhs_expression = try coral.io.allocate_one(allocator, lhs_expression.*);
|
||||
|
||||
errdefer coral.io.deallocate(allocator, allocated_lhs_expression);
|
||||
|
||||
const allocated_rhs_expression = try coral.io.allocate_one(allocator, rhs_operation.*);
|
||||
|
||||
errdefer coral.io.deallocate(allocator, allocated_rhs_expression);
|
||||
|
||||
return Expression{
|
||||
.binary_operation = .{
|
||||
.kind = kind,
|
||||
.lhs_expression = allocated_lhs_expression,
|
||||
.rhs_expression = allocated_rhs_expression,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
fn init_unary_operation(
|
||||
allocator: coral.io.Allocator,
|
||||
kind: UnaryOperation,
|
||||
expression: *const Expression) coral.io.AllocationError!Expression {
|
||||
|
||||
const allocated_expression = try coral.io.allocate_one(allocator, expression.*);
|
||||
|
||||
errdefer coral.io.deallocate(allocator, allocated_expression);
|
||||
|
||||
return Expression{
|
||||
.unary_operation = .{
|
||||
.kind = kind,
|
||||
.expression = allocated_expression,
|
||||
},
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Statements = struct {
|
||||
list: List,
|
||||
|
||||
const List = coral.list.Stack(union (enum) {
|
||||
return_expression: Expression,
|
||||
return_nothing,
|
||||
|
||||
const Self = @This();
|
||||
|
||||
fn deinit(self: *Self, allocator: coral.io.Allocator) void {
|
||||
switch (self.*) {
|
||||
.return_expression => |*expression| {
|
||||
expression.deinit(allocator);
|
||||
},
|
||||
|
||||
.return_nothing => {},
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
pub fn deinit(self: *Statements, allocator: coral.io.Allocator) void {
|
||||
for (self.list.values) |*statement| {
|
||||
statement.deinit(allocator);
|
||||
}
|
||||
|
||||
self.list.deinit(allocator);
|
||||
}
|
||||
};
|
||||
|
||||
const TableFields = coral.list.Stack(struct {
|
||||
identifier: []const u8,
|
||||
expression: Expression,
|
||||
});
|
||||
|
||||
pub const UnaryOperation = enum {
|
||||
boolean_negation,
|
||||
numeric_negation,
|
||||
};
|
|
@ -7,7 +7,6 @@ pub const Token = union(enum) {
|
|||
global: []const u8,
|
||||
local: []const u8,
|
||||
|
||||
symbol_assign,
|
||||
symbol_plus,
|
||||
symbol_minus,
|
||||
symbol_asterisk,
|
||||
|
@ -43,25 +42,32 @@ pub const Token = union(enum) {
|
|||
pub fn text(self: Token) []const u8 {
|
||||
return switch (self) {
|
||||
.unknown => |unknown| @ptrCast([*]const u8, &unknown)[0 .. 1],
|
||||
.identifier_global => |identifier| identifier,
|
||||
.identifier_local => |identifier| identifier,
|
||||
.newline => "newline",
|
||||
|
||||
.assign => "=",
|
||||
.plus => "+",
|
||||
.minus => "-",
|
||||
.asterisk => "*",
|
||||
.forward_slash => "/",
|
||||
.paren_left => "(",
|
||||
.paren_right => ")",
|
||||
.bang => "!",
|
||||
.comma => ",",
|
||||
.at => "@",
|
||||
.brace_left => "{",
|
||||
.brace_right => "}",
|
||||
.bracket_left => "[",
|
||||
.bracket_right => "]",
|
||||
.period => ".",
|
||||
.arrow => "=>",
|
||||
.global => |identifier| identifier,
|
||||
.local => |identifier| identifier,
|
||||
|
||||
.symbol_plus => "+",
|
||||
.symbol_minus => "-",
|
||||
.symbol_asterisk => "*",
|
||||
.symbol_forward_slash => "/",
|
||||
.symbol_paren_left => "(",
|
||||
.symbol_paren_right => ")",
|
||||
.symbol_bang => "!",
|
||||
.symbol_comma => ",",
|
||||
.symbol_at => "@",
|
||||
.symbol_brace_left => "{",
|
||||
.symbol_brace_right => "}",
|
||||
.symbol_bracket_left => "[",
|
||||
.symbol_bracket_right => "]",
|
||||
.symbol_period => ".",
|
||||
.symbol_lambda => "=>",
|
||||
.symbol_less_than => "<",
|
||||
.symbol_less_equals => "<=",
|
||||
.symbol_greater_than => ">",
|
||||
.symbol_greater_equals => ">=",
|
||||
.symbol_equals => "=",
|
||||
.symbol_double_equals => "==",
|
||||
|
||||
.integer => |literal| literal,
|
||||
.real => |literal| literal,
|
||||
|
@ -79,17 +85,23 @@ pub const Token = union(enum) {
|
|||
pub const Tokenizer = struct {
|
||||
source: []const u8,
|
||||
lines_stepped: usize = 1,
|
||||
previous_token: Token = .newline,
|
||||
current_token: Token = .newline,
|
||||
current_token: Token = .{.unknown = 0},
|
||||
|
||||
pub fn step(self: *Tokenizer, newline_rules: enum { ignore_newlines, include_newlines }) bool {
|
||||
self.previous_token = self.current_token;
|
||||
const StepOptions = struct {
|
||||
include_newlines: bool,
|
||||
};
|
||||
|
||||
const default_token = Token{.unknown = 0};
|
||||
|
||||
pub fn step(self: *Tokenizer, options: StepOptions) bool {
|
||||
var cursor = @as(usize, 0);
|
||||
|
||||
defer self.source = self.source[cursor ..];
|
||||
|
||||
while (cursor < self.source.len) switch (self.source[cursor]) {
|
||||
defer @import("std").debug.print("{s}\n", .{self.current_token.text()});
|
||||
|
||||
while (cursor < self.source.len) {
|
||||
switch (self.source[cursor]) {
|
||||
'#' => {
|
||||
cursor += 1;
|
||||
|
||||
|
@ -103,15 +115,11 @@ pub const Tokenizer = struct {
|
|||
'\n' => {
|
||||
cursor += 1;
|
||||
|
||||
switch (newline_rules) {
|
||||
.include_newlines => {
|
||||
if (options.include_newlines) {
|
||||
self.lines_stepped += 1;
|
||||
self.current_token = .newline;
|
||||
|
||||
return true;
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
},
|
||||
|
||||
|
@ -401,7 +409,8 @@ pub const Tokenizer = struct {
|
|||
|
||||
return true;
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
self.current_token = .newline;
|
||||
|
||||
|
|
|
@ -28,10 +28,13 @@ pub const Ref = union (Primitive) {
|
|||
object: Object,
|
||||
};
|
||||
|
||||
pub const RuntimeError = coral.io.AllocationError || CheckError || error {
|
||||
pub const ParseError = error {
|
||||
OutOfMemory,
|
||||
BadSyntax,
|
||||
};
|
||||
|
||||
pub const RuntimeError = CheckError || ParseError;
|
||||
|
||||
pub const Val = union (Primitive) {
|
||||
nil,
|
||||
false,
|
||||
|
|
|
@ -59,17 +59,15 @@ pub fn run_app(base_file_system: file.System) void {
|
|||
const Logger = struct {
|
||||
const Self = @This();
|
||||
|
||||
fn log(_: *Self, message: []const u8) void {
|
||||
fn log(_: *const Self, message: []const u8) void {
|
||||
ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "%.*s", message.len, message.ptr);
|
||||
}
|
||||
};
|
||||
|
||||
var logger = Logger{};
|
||||
|
||||
var script_environment = kym.Environment.init(heap.allocator, .{
|
||||
.values_max = 512,
|
||||
.calls_max = 512,
|
||||
.reporter = kym.Environment.Reporter.bind(Logger, &logger, Logger.log),
|
||||
.reporter = kym.Environment.Reporter.bind(Logger, &.{}, Logger.log),
|
||||
}) catch {
|
||||
return ext.SDL_LogError(ext.SDL_LOG_CATEGORY_APPLICATION, "failed to initialize Kym vm\n");
|
||||
};
|
||||
|
|
|
@ -1,5 +1,12 @@
|
|||
const coral = @import("coral");
|
||||
|
||||
const ona = @import("ona");
|
||||
|
||||
pub fn main() !void {
|
||||
var data = [_]u8{0} ** 32;
|
||||
var fixed_buffer = coral.io.FixedBuffer{.slice = &data};
|
||||
|
||||
try coral.utf8.print_formatted(fixed_buffer.as_writer(), "hello, {}", coral.utf8.format_args(.{"world"}));
|
||||
|
||||
ona.run_app(.{.sandboxed_path = &ona.file.Path.cwd});
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue