Add AST generation stage to Kym parser
continuous-integration/drone/push Build is failing
Details
continuous-integration/drone/push Build is failing
Details
This commit is contained in:
parent
bb8cb43843
commit
5642f399b9
|
@ -47,11 +47,7 @@ pub fn Functor(comptime Output: type, comptime Input: type) type {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
pub const ReadError = error {
|
pub const Reader = Functor(?usize, []u8);
|
||||||
IoUnavailable,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Reader = Functor(ReadError!usize, []u8);
|
|
||||||
|
|
||||||
pub fn Tag(comptime Element: type) type {
|
pub fn Tag(comptime Element: type) type {
|
||||||
return switch (@typeInfo(Element)) {
|
return switch (@typeInfo(Element)) {
|
||||||
|
@ -66,7 +62,7 @@ pub const WritableMemory = struct {
|
||||||
|
|
||||||
pub fn as_writer(self: *WritableMemory) Writer {
|
pub fn as_writer(self: *WritableMemory) Writer {
|
||||||
return Writer.bind(self, struct {
|
return Writer.bind(self, struct {
|
||||||
fn write(writable_memory: *WritableMemory, data: []const u8) WriteError!usize {
|
fn write(writable_memory: *WritableMemory, data: []const u8) ?usize {
|
||||||
return writable_memory.write(data);
|
return writable_memory.write(data);
|
||||||
}
|
}
|
||||||
}.write);
|
}.write);
|
||||||
|
@ -94,11 +90,7 @@ pub const WritableMemory = struct {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const WriteError = error{
|
pub const Writer = Functor(?usize, []const u8);
|
||||||
IoUnavailable,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Writer = Functor(WriteError!usize, []const u8);
|
|
||||||
|
|
||||||
pub fn allocate_many(comptime Type: type, amount: usize, allocator: Allocator) AllocationError![]Type {
|
pub fn allocate_many(comptime Type: type, amount: usize, allocator: Allocator) AllocationError![]Type {
|
||||||
if (@sizeOf(Type) == 0) {
|
if (@sizeOf(Type) == 0) {
|
||||||
|
@ -114,14 +106,20 @@ pub fn allocate_many(comptime Type: type, amount: usize, allocator: Allocator) A
|
||||||
}))[0 .. amount];
|
}))[0 .. amount];
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn allocate_one(comptime Type: type, allocator: Allocator) AllocationError!*Type {
|
pub fn allocate_one(allocator: Allocator, value: anytype) AllocationError!*@TypeOf(value) {
|
||||||
|
const Type = @TypeOf(value);
|
||||||
|
|
||||||
if (@sizeOf(Type) == 0) {
|
if (@sizeOf(Type) == 0) {
|
||||||
@compileError("Cannot allocate memory for 0-byte type " ++ @typeName(Type));
|
@compileError("Cannot allocate memory for 0-byte type " ++ @typeName(Type));
|
||||||
}
|
}
|
||||||
|
|
||||||
return @ptrCast(*Type, @alignCast(@alignOf(Type), allocator.invoke(.{.size = @sizeOf(Type)}) orelse {
|
const allocation = @ptrCast(*Type, @alignCast(@alignOf(Type), allocator.invoke(.{.size = @sizeOf(Type)}) orelse {
|
||||||
return error.OutOfMemory;
|
return error.OutOfMemory;
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
allocation.* = value;
|
||||||
|
|
||||||
|
return allocation;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn bytes_of(value: anytype) []const u8 {
|
pub fn bytes_of(value: anytype) []const u8 {
|
||||||
|
@ -256,13 +254,13 @@ pub fn sentinel_index(comptime element: type, comptime sentinel: element, sequen
|
||||||
return index;
|
return index;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn stream(output: Writer, input: Reader, buffer: []u8) (ReadError || WriteError)!u64 {
|
pub fn stream(output: Writer, input: Reader, buffer: []u8) ?u64 {
|
||||||
var total_written: u64 = 0;
|
var total_written: u64 = 0;
|
||||||
var read = try input.invoke(buffer);
|
var read = input.invoke(buffer) orelse return null;
|
||||||
|
|
||||||
while (read != 0) {
|
while (read != 0) {
|
||||||
total_written += try output.invoke(buffer[0..read]);
|
total_written += output.invoke(buffer[0..read]) orelse return null;
|
||||||
read = try input.invoke(buffer);
|
read = input.invoke(buffer) orelse return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
return total_written;
|
return total_written;
|
||||||
|
|
|
@ -5,13 +5,16 @@ const io = @import("./io.zig");
|
||||||
const math = @import("./math.zig");
|
const math = @import("./math.zig");
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Returns a dynamically sized stack capable of holding `Element`.
|
/// Returns a dynamically sized stack capable of holding `Value`.
|
||||||
///
|
///
|
||||||
pub fn Stack(comptime Element: type) type {
|
pub fn Stack(comptime Value: type) type {
|
||||||
return struct {
|
return struct {
|
||||||
capacity: usize,
|
capacity: usize,
|
||||||
values: []Element,
|
values: []Value,
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Stack type.
|
||||||
|
///
|
||||||
const Self = @This();
|
const Self = @This();
|
||||||
|
|
||||||
///
|
///
|
||||||
|
@ -38,11 +41,13 @@ pub fn Stack(comptime Element: type) type {
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Attempts to remove `amount` number of `Element`s from the stack, returning `bool` if it was successful,
|
/// Attempts to remove `amount` number of `Value`s from the stack, returning `bool` if it was successful,
|
||||||
/// otherwise `false` if the stack contains fewer elements than `amount`.
|
/// otherwise `false` if the stack contains fewer elements than `amount`.
|
||||||
///
|
///
|
||||||
pub fn drop(self: *Self, amount: usize) bool {
|
pub fn drop(self: *Self, amount: usize) bool {
|
||||||
if (amount > self.values.len) return false;
|
if (amount > self.values.len) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
self.values = self.values[0 .. self.values.len - amount];
|
self.values = self.values[0 .. self.values.len - amount];
|
||||||
|
|
||||||
|
@ -52,7 +57,7 @@ pub fn Stack(comptime Element: type) type {
|
||||||
///
|
///
|
||||||
/// Attempts to grow the internal buffer of `self` by `growth_amount` using `allocator`.
|
/// Attempts to grow the internal buffer of `self` by `growth_amount` using `allocator`.
|
||||||
///
|
///
|
||||||
/// The function returns [AllocatorError] instead if `allocator` cannot commit the memory required to grow the
|
/// The function returns [io.AllocatorError] if `allocator` could not commit the memory required to grow the
|
||||||
/// internal buffer by `growth_amount`, leaving `self` in the same state that it was in prior to starting the
|
/// internal buffer by `growth_amount`, leaving `self` in the same state that it was in prior to starting the
|
||||||
/// grow.
|
/// grow.
|
||||||
///
|
///
|
||||||
|
@ -64,17 +69,13 @@ pub fn Stack(comptime Element: type) type {
|
||||||
///
|
///
|
||||||
pub fn grow(self: *Self, allocator: io.Allocator, growth_amount: usize) io.AllocationError!void {
|
pub fn grow(self: *Self, allocator: io.Allocator, growth_amount: usize) io.AllocationError!void {
|
||||||
const grown_capacity = self.capacity + growth_amount;
|
const grown_capacity = self.capacity + growth_amount;
|
||||||
const values = (try io.allocate_many(Element, grown_capacity, allocator))[0 .. self.values.len];
|
const values = (try io.allocate_many(Value, grown_capacity, allocator))[0 .. self.values.len];
|
||||||
|
|
||||||
errdefer io.deallocate(allocator, values);
|
errdefer io.deallocate(allocator, values);
|
||||||
|
|
||||||
{
|
for (0 .. self.values.len) |index| {
|
||||||
var index: usize = 0;
|
|
||||||
|
|
||||||
while (index < self.values.len) {
|
|
||||||
values[index] = self.values[index];
|
values[index] = self.values[index];
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
io.deallocate(allocator, self.values);
|
io.deallocate(allocator, self.values);
|
||||||
|
|
||||||
|
@ -86,11 +87,11 @@ pub fn Stack(comptime Element: type) type {
|
||||||
/// Attempts to allocate and return an empty stack with an internal buffer of `initial_capacity` size using
|
/// Attempts to allocate and return an empty stack with an internal buffer of `initial_capacity` size using
|
||||||
/// `allocator` as the memory allocation strategy.
|
/// `allocator` as the memory allocation strategy.
|
||||||
///
|
///
|
||||||
/// The function returns [AllocationError] instead if `allocator` cannot commit the memory required for an
|
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required for an
|
||||||
/// internal buffer of `initial_capacity` size.
|
/// internal buffer of `initial_capacity` size.
|
||||||
///
|
///
|
||||||
pub fn init(allocator: io.Allocator, initial_capacity: usize) !Self {
|
pub fn init(allocator: io.Allocator, initial_capacity: usize) !Self {
|
||||||
const values = try io.allocate_many(Element, initial_capacity, allocator);
|
const values = try io.allocate_many(Value, initial_capacity, allocator);
|
||||||
|
|
||||||
errdefer io.deallocate(values);
|
errdefer io.deallocate(values);
|
||||||
|
|
||||||
|
@ -101,16 +102,32 @@ pub fn Stack(comptime Element: type) type {
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Attempts to push every `Element` in `values` to `self` using `allocator` to grow the internal buffer as
|
/// Attempts to remove the last element of `self` that was inserted, if one exists, returning it or `null` if
|
||||||
|
/// `self` is empty.
|
||||||
|
///
|
||||||
|
pub fn pop(self: *Self) ?Value {
|
||||||
|
if (self.values.len == 0) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const last_index = self.values.len - 1;
|
||||||
|
|
||||||
|
defer self.values = self.values[0 .. last_index];
|
||||||
|
|
||||||
|
return self.values[last_index];
|
||||||
|
}
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Attempts to push every `Value` in `values` to `self` using `allocator` to grow the internal buffer as
|
||||||
/// necessary.
|
/// necessary.
|
||||||
///
|
///
|
||||||
/// The function returns [AllocationError] instead if `allocator` cannot commit the memory required to grow the
|
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
||||||
/// internal buffer of `self` when necessary.
|
/// internal buffer of `self` when necessary.
|
||||||
///
|
///
|
||||||
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
||||||
/// `self`.
|
/// `self`.
|
||||||
///
|
///
|
||||||
pub fn push_all(self: *Self, allocator: io.Allocator, values: []const Element) io.AllocationError!void {
|
pub fn push_all(self: *Self, allocator: io.Allocator, values: []const Value) io.AllocationError!void {
|
||||||
const new_length = self.values.len + values.len;
|
const new_length = self.values.len + values.len;
|
||||||
|
|
||||||
if (new_length >= self.capacity) {
|
if (new_length >= self.capacity) {
|
||||||
|
@ -121,24 +138,22 @@ pub fn Stack(comptime Element: type) type {
|
||||||
|
|
||||||
self.values = self.values.ptr[0 .. new_length];
|
self.values = self.values.ptr[0 .. new_length];
|
||||||
|
|
||||||
{
|
for (0 .. values.len) |index| {
|
||||||
var index: usize = 0;
|
self.values[offset_index + index] = values[index];
|
||||||
|
|
||||||
while (index < values.len) : (index += 1) self.values[offset_index + index] = values[index];
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Attempts to push the `Element` in `value` to `self` by `amount` number of times using `allocator` to grow
|
/// Attempts to push the `Value` in `value` to `self` by `amount` number of times using `allocator` to grow
|
||||||
/// the internal buffer as necessary.
|
/// the internal buffer as necessary.
|
||||||
///
|
///
|
||||||
/// The function returns [AllocationError] instead if `allocator` cannot commit the memory required to grow the
|
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
||||||
/// internal buffer of `self` when necessary.
|
/// internal buffer of `self` when necessary.
|
||||||
///
|
///
|
||||||
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
||||||
/// `self`.
|
/// `self`.
|
||||||
///
|
///
|
||||||
pub fn push_many(self: *Self, allocator: io.Allocator, value: Element, amount: usize) io.AllocationError!void {
|
pub fn push_many(self: *Self, allocator: io.Allocator, value: Value, amount: usize) io.AllocationError!void {
|
||||||
const new_length = self.values.len + amount;
|
const new_length = self.values.len + amount;
|
||||||
|
|
||||||
if (new_length >= self.capacity) {
|
if (new_length >= self.capacity) {
|
||||||
|
@ -149,24 +164,22 @@ pub fn Stack(comptime Element: type) type {
|
||||||
|
|
||||||
self.values = self.values.ptr[0 .. new_length];
|
self.values = self.values.ptr[0 .. new_length];
|
||||||
|
|
||||||
{
|
for (0 .. amount) |index| {
|
||||||
var index: usize = 0;
|
self.values[offset_index + index] = value;
|
||||||
|
|
||||||
while (index < amount) : (index += 1) self.values[offset_index + index] = value;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Attempts to push the `Element` in `value` to `self` using `allocator` to grow the internal buffer as
|
/// Attempts to push the `Value` in `value` to `self` using `allocator` to grow the internal buffer as
|
||||||
/// necessary.
|
/// necessary.
|
||||||
///
|
///
|
||||||
/// The function returns [AllocationError] instead if `allocator` cannot commit the memory required to grow the
|
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required to grow the
|
||||||
/// internal buffer of `self` when necessary.
|
/// internal buffer of `self` when necessary.
|
||||||
///
|
///
|
||||||
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize
|
||||||
/// `self`.
|
/// `self`.
|
||||||
///
|
///
|
||||||
pub fn push_one(self: *Self, allocator: io.Allocator, value: Element) io.AllocationError!void {
|
pub fn push_one(self: *Self, allocator: io.Allocator, value: Value) io.AllocationError!void {
|
||||||
if (self.values.len == self.capacity) {
|
if (self.values.len == self.capacity) {
|
||||||
try self.grow(allocator, math.max(1, self.capacity));
|
try self.grow(allocator, math.max(1, self.capacity));
|
||||||
}
|
}
|
||||||
|
@ -181,45 +194,71 @@ pub fn Stack(comptime Element: type) type {
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Binds `stack` to a [io.Allocator], returning it.
|
/// Generic, byte-writable interface for all list types supported by the module.
|
||||||
///
|
///
|
||||||
pub fn stack_as_allocator(stack: *Stack(u8)) io.Allocator {
|
/// As the type is only a thin wrapper around other resources, it does not manage any memory nor is it permitted to
|
||||||
return io.Allocator.bind(stack, struct {
|
/// outlive the resources it references.
|
||||||
pub fn reallocate(writable_stack: *Stack(u8), existing_allocation: ?[*]u8, allocation_size: usize) ?[*]u8 {
|
///
|
||||||
if (allocation_size == 0) return null;
|
pub const Writable = struct {
|
||||||
|
allocator: io.Allocator,
|
||||||
|
|
||||||
writable_stack.push_all(io.bytes_of(&allocation_size)) catch return null;
|
list: union (enum) {
|
||||||
|
stack: *ByteStack,
|
||||||
const usize_size = @sizeOf(usize);
|
},
|
||||||
|
|
||||||
errdefer debug.assert(writable_stack.drop(usize_size));
|
|
||||||
|
|
||||||
const allocation_index = writable_stack.values.len;
|
|
||||||
|
|
||||||
if (existing_allocation) |allocation| {
|
|
||||||
const existing_allocation_size = @intToPtr(*const usize, @ptrToInt(allocation) - usize_size).*;
|
|
||||||
|
|
||||||
writable_stack.push_all(allocation[0 .. existing_allocation_size]) catch return null;
|
|
||||||
} else {
|
|
||||||
writable_stack.push_many(0, allocation_size) catch return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
return @ptrCast([*]u8, &writable_stack.values[allocation_index]);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Binds `stack` to a [io.Writer], returning it.
|
/// Stack of bytes.
|
||||||
///
|
///
|
||||||
pub fn stack_as_writer(stack: *Stack(u8)) io.Writer {
|
const ByteStack = Stack(u8);
|
||||||
return io.Writer.bind(stack, struct {
|
|
||||||
pub fn write(writable_stack: *Stack(u8), buffer: []const u8) io.WriteError!usize {
|
///
|
||||||
writable_stack.push_all(buffer) catch |grow_error| switch (grow_error) {
|
/// Binds and returns `self` as a [io.Writer].
|
||||||
error.OutOfMemory => return error.IoUnavailable,
|
///
|
||||||
|
pub fn as_writer(self: *Writable) io.Writer {
|
||||||
|
return io.Writer.bind(Writable, self, struct {
|
||||||
|
fn write(writable: *Writable, buffer: []const u8) ?usize {
|
||||||
|
writable.write(buffer) catch |allocation_error| switch (allocation_error) {
|
||||||
|
error.OutOfMemory => return null,
|
||||||
};
|
};
|
||||||
|
|
||||||
return buffer.len;
|
return buffer.len;
|
||||||
}
|
}
|
||||||
});
|
}.write);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Returns a new [Writable] from wrapping `stack` and `allocator`.
|
||||||
|
///
|
||||||
|
/// *Note* `allocator` must reference the same allocation strategy as the one originally used to initialize `stack`.
|
||||||
|
///
|
||||||
|
pub fn from_stack(allocator: io.Allocator, stack: *ByteStack) Writable {
|
||||||
|
return .{
|
||||||
|
.allocator = allocator,
|
||||||
|
.list = .{.stack = stack},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Attempts to write the singular `byte` to the list referenced by `self`.
|
||||||
|
///
|
||||||
|
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required by the internal
|
||||||
|
/// list.
|
||||||
|
///
|
||||||
|
pub fn put(self: *Writable, byte: u8) io.AllocationError!void {
|
||||||
|
try switch (self.list) {
|
||||||
|
.stack => |stack| stack.push_one(self.allocator, byte),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Attempst to write all of `bytes` to the list referenced by `self`.
|
||||||
|
///
|
||||||
|
/// The function returns [io.AllocationError] if `allocator` could not commit the memory required by the internal
|
||||||
|
/// list.
|
||||||
|
///
|
||||||
|
pub fn write(self: *Writable, bytes: []const u8) io.AllocationError!void {
|
||||||
|
try switch (self.list) {
|
||||||
|
.stack => |stack| stack.push_all(self.allocator, bytes),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
|
@ -7,22 +7,18 @@ pub const CheckedArithmeticError = error {
|
||||||
IntOverflow,
|
IntOverflow,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn Float(comptime bits: comptime_int) type {
|
///
|
||||||
return @Type(.{.Float = .{.bits = bits}});
|
/// Returns the float type described by `float`.
|
||||||
|
///
|
||||||
|
pub fn Float(comptime float: std.builtin.Type.Float) type {
|
||||||
|
return @Type(.{.Float = float});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn Signed(comptime bits: comptime_int) type {
|
///
|
||||||
return @Type(.{.Int = .{
|
/// Returns the integer type described by `int`.
|
||||||
.signedness = .signed,
|
///
|
||||||
.bits = bits,
|
pub fn Int(comptime int: std.builtin.Type.Int) type {
|
||||||
}});
|
return @Type(.{.Int = int});
|
||||||
}
|
|
||||||
|
|
||||||
pub fn Unsigned(comptime bits: comptime_int) type {
|
|
||||||
return @Type(.{.Int = .{
|
|
||||||
.signedness = .unsigned,
|
|
||||||
.bits = bits,
|
|
||||||
}});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
|
@ -56,24 +52,18 @@ pub fn checked_add(a: anytype, b: anytype) CheckedArithmeticError!@TypeOf(a + b)
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Attempts to perform a checked integer cast to `Int` on `value`, returning the result or [CheckedArithmeticError] if
|
/// Attempts to perform a checked integer cast to the type expressed by `int` on `value`, returning the result or
|
||||||
/// the operation tried to invoke safety-checked behavior.
|
/// [CheckedArithmeticError] if the operation tried to invoke safety-checked behavior.
|
||||||
///
|
///
|
||||||
/// `checked_cast` can be seen as an alternative to the language-native `@intCast` builtin that exposes the safety-
|
/// `checked_cast` can be seen as an alternative to the language-native `@intCast` builtin that exposes the safety-
|
||||||
/// checked behavior in the form of an error type that may be caught or tried on.
|
/// checked behavior in the form of an error type that may be caught or tried on.
|
||||||
///
|
///
|
||||||
pub fn checked_cast(comptime Int: type, value: anytype) CheckedArithmeticError!Int {
|
pub fn checked_cast(comptime int: std.builtin.Type.Int, value: anytype) CheckedArithmeticError!Int(int) {
|
||||||
const int_type_info = @typeInfo(Int);
|
if ((value < min_int(int)) or (value > max_int(int))) {
|
||||||
|
|
||||||
if (int_type_info != .Int) {
|
|
||||||
@compileError("`Int` must be of type int");
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((value < min_int(int_type_info.Int)) or (value > max_int(int_type_info.Int))) {
|
|
||||||
return error.IntOverflow;
|
return error.IntOverflow;
|
||||||
}
|
}
|
||||||
|
|
||||||
return @intCast(Int, value);
|
return @intCast(Int(int), value);
|
||||||
}
|
}
|
||||||
|
|
||||||
///
|
///
|
||||||
|
|
|
@ -1,29 +1,63 @@
|
||||||
|
const std = @import("std");
|
||||||
|
|
||||||
const io = @import("./io.zig");
|
const io = @import("./io.zig");
|
||||||
|
|
||||||
const math = @import("./math.zig");
|
const math = @import("./math.zig");
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Errors that may occur during utf8-encoded int parsing.
|
||||||
|
///
|
||||||
pub const IntParseError = math.CheckedArithmeticError || ParseError;
|
pub const IntParseError = math.CheckedArithmeticError || ParseError;
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Optional rules for int parsing logic to consider during parsing.
|
||||||
|
///
|
||||||
|
pub const IntParseOptions = struct {
|
||||||
|
delimiter: []const u8 = "",
|
||||||
|
};
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Errors that may occur during any kind of utf8-encoded parsing.
|
||||||
|
///
|
||||||
pub const ParseError = error {
|
pub const ParseError = error {
|
||||||
BadSyntax,
|
BadSyntax,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn parse_float(comptime bits: comptime_int, utf8: []const u8) ParseError!math.Float(bits) {
|
///
|
||||||
|
/// Errors that may occur during any kind of utf8-encoded printing.
|
||||||
|
///
|
||||||
|
pub const PrintError = error {
|
||||||
|
PrintFailed,
|
||||||
|
PrintIncomplete,
|
||||||
|
};
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Attempts to parse a float value of type described by `float` from `utf8`.
|
||||||
|
///
|
||||||
|
/// The function returns a [ParseError] if `utf8` does not conform to the syntax of a float.
|
||||||
|
///
|
||||||
|
pub fn parse_float(comptime float: std.builtin.Type.Float, utf8: []const u8) ParseError!math.Float(float) {
|
||||||
// ""
|
// ""
|
||||||
if (utf8.len == 0) return error.BadSyntax;
|
if (utf8.len == 0) {
|
||||||
|
return error.BadSyntax;
|
||||||
|
}
|
||||||
|
|
||||||
const is_negative = utf8[0] == '-';
|
const is_negative = utf8[0] == '-';
|
||||||
|
|
||||||
// "-"
|
// "-"
|
||||||
if (is_negative and (utf8.len == 1)) return error.BadSyntax;
|
if (is_negative and (utf8.len == 1)) {
|
||||||
|
return error.BadSyntax;
|
||||||
|
}
|
||||||
|
|
||||||
const negative_offset = @boolToInt(is_negative);
|
const negative_offset = @boolToInt(is_negative);
|
||||||
var has_decimal = utf8[negative_offset] == '.';
|
var has_decimal = utf8[negative_offset] == '.';
|
||||||
|
|
||||||
// "-."
|
// "-."
|
||||||
if (has_decimal and (utf8.len == 2)) return error.BadSyntax;
|
if (has_decimal and (utf8.len == 2)) {
|
||||||
|
return error.BadSyntax;
|
||||||
|
}
|
||||||
|
|
||||||
const Float = math.Float(bits);
|
const Float = math.Float(float);
|
||||||
var result: Float = 0;
|
var result: Float = 0;
|
||||||
var factor: Float = 1;
|
var factor: Float = 1;
|
||||||
|
|
||||||
|
@ -46,69 +80,92 @@ pub fn parse_float(comptime bits: comptime_int, utf8: []const u8) ParseError!mat
|
||||||
return result * factor;
|
return result * factor;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_signed(comptime bits: comptime_int, utf8: []const u8) IntParseError!math.Signed(bits) {
|
///
|
||||||
// ""
|
/// Attempts to parse an int value of type described by `int` from `utf8`, with `options` as additional rules for the
|
||||||
if (utf8.len == 0) return error.BadSyntax;
|
/// parsing logic to consider.
|
||||||
|
///
|
||||||
|
/// The function returns a [IntParseError] if `utf8` does not conform to the syntax of a float, does not match the rules
|
||||||
|
/// specified in `option`, or exceeds the maximum size of the int described by `int`.
|
||||||
|
///
|
||||||
|
pub fn parse_int(
|
||||||
|
comptime int: std.builtin.Type.Int,
|
||||||
|
utf8: []const u8,
|
||||||
|
options: IntParseOptions) IntParseError!math.Int(int) {
|
||||||
|
|
||||||
|
if (utf8.len == 0) {
|
||||||
|
return error.BadSyntax;
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
const is_negative = utf8[0] == '-';
|
const is_negative = utf8[0] == '-';
|
||||||
|
|
||||||
// "-"
|
switch (int.signedness) {
|
||||||
if (is_negative and (utf8.len == 1)) return error.BadSyntax;
|
.signed => {
|
||||||
|
if (is_negative and utf8.len == 1) {
|
||||||
|
return error.BadSyntax;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
var result: math.Signed(bits) = 0;
|
.unsigned => {
|
||||||
|
if (is_negative) {
|
||||||
|
return error.BadSyntax;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
{
|
var result = @as(math.Int(int), 0);
|
||||||
var index: usize = 0;
|
|
||||||
|
|
||||||
while (index < utf8.len) : (index += 1) {
|
for (0 .. utf8.len) |index| {
|
||||||
const code = utf8[index];
|
const code = utf8[index];
|
||||||
|
|
||||||
switch (code) {
|
switch (code) {
|
||||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' =>
|
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' => {
|
||||||
result = try math.checked_add(try math.checked_mul(result, 10), try math.checked_sub(code, '0')),
|
result = try math.checked_add(try math.checked_mul(result, 10), try math.checked_sub(code, '0'));
|
||||||
|
},
|
||||||
|
|
||||||
else => return error.BadSyntax,
|
else => {
|
||||||
|
if (options.delimiter.len == 0 or !io.equals(options.delimiter, utf8[index ..])) {
|
||||||
|
return error.BadSyntax;
|
||||||
}
|
}
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse_unsigned(comptime bits: comptime_int, utf8: []const u8) IntParseError!math.Unsigned(bits) {
|
///
|
||||||
// ""
|
/// Attempts to print `utf8` to `writer`.
|
||||||
if (utf8.len == 0) return error.BadSyntax;
|
///
|
||||||
|
/// The function returns [PrintError] if the write failed to complete partially or entirely.
|
||||||
// "-..."
|
///
|
||||||
if (utf8[0] == '-') return error.BadSyntax;
|
pub fn print(writer: io.Writer, utf8: []const u8) PrintError!void {
|
||||||
|
if ((writer.invoke(utf8) orelse return error.PrintFailed) != utf8.len) {
|
||||||
var result: math.Unsigned(bits) = 0;
|
return error.PrintIncomplete;
|
||||||
|
|
||||||
{
|
|
||||||
var index: usize = 0;
|
|
||||||
|
|
||||||
while (index < utf8.len) : (index += 1) {
|
|
||||||
const code = utf8[index];
|
|
||||||
|
|
||||||
switch (code) {
|
|
||||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9' =>
|
|
||||||
result = try math.checked_add(try math.checked_mul(result, 10), try math.checked_sub(code, '0')),
|
|
||||||
|
|
||||||
else => return error.BadSyntax,
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
///
|
||||||
|
/// Attempts to print the int `value` described by `int` to `writer`.
|
||||||
|
///
|
||||||
|
/// The function returns [PrintError] if the write failed to complete partially or entirely.
|
||||||
|
///
|
||||||
|
pub fn print_int(comptime int: std.builtin.Type.Int, writer: io.Writer, value: math.Int(int)) PrintError!void {
|
||||||
|
if (value == 0) {
|
||||||
|
return try print(writer, "0");
|
||||||
}
|
}
|
||||||
|
|
||||||
return result;
|
// TODO: Don't make this buffer arbitrarily size cause big int types WILL overflow.
|
||||||
}
|
var buffer = [_]u8{0} ** 40;
|
||||||
|
|
||||||
pub fn print_unsigned(comptime bit_size: comptime_int, writer: io.Writer, value: math.Unsigned(bit_size)) io.WriteError!usize {
|
|
||||||
if (value == 0) return writer.invoke("0");
|
|
||||||
|
|
||||||
var buffer = [_]u8{0} ** 39;
|
|
||||||
var buffer_count: usize = 0;
|
var buffer_count: usize = 0;
|
||||||
var split_value = value;
|
var split_value = value;
|
||||||
|
|
||||||
|
if ((int.signedness == .unsigned) and (value < 0)) {
|
||||||
|
buffer[0] = '-';
|
||||||
|
buffer_count += 1;
|
||||||
|
}
|
||||||
|
|
||||||
while (split_value != 0) : (buffer_count += 1) {
|
while (split_value != 0) : (buffer_count += 1) {
|
||||||
const radix = 10;
|
const radix = 10;
|
||||||
|
|
||||||
|
@ -125,5 +182,5 @@ pub fn print_unsigned(comptime bit_size: comptime_int, writer: io.Writer, value:
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return writer.invoke(buffer[0 .. buffer_count]);
|
try print(writer, buffer[0 .. buffer_count]);
|
||||||
}
|
}
|
||||||
|
|
|
@ -106,13 +106,15 @@ pub const Path = extern struct {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const Readable = opaque {
|
|
||||||
pub const ReadError = error {
|
pub const ReadError = error {
|
||||||
FileUnavailable,
|
FileUnavailable,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub fn close(self: *Readable) bool {
|
pub const Readable = opaque {
|
||||||
return ext.SDL_RWclose(rw_ops_cast(self)) == 0;
|
pub fn close(self: *Readable) void {
|
||||||
|
if (ext.SDL_RWclose(rw_ops_cast(self)) != 0) {
|
||||||
|
@panic("Failed to close file");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn read(self: *Readable, buffer: []u8) ReadError!usize {
|
pub fn read(self: *Readable, buffer: []u8) ReadError!usize {
|
||||||
|
@ -162,21 +164,47 @@ pub const Readable = opaque {
|
||||||
pub const System = union (enum) {
|
pub const System = union (enum) {
|
||||||
sandboxed_path: *const Path,
|
sandboxed_path: *const Path,
|
||||||
|
|
||||||
|
pub const FileInfo = struct {
|
||||||
|
size: u64,
|
||||||
|
};
|
||||||
|
|
||||||
pub const OpenError = Path.ValidationError || error {
|
pub const OpenError = Path.ValidationError || error {
|
||||||
FileNotFound,
|
FileNotFound,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub const QueryError = OpenError || ReadError;
|
||||||
|
|
||||||
pub fn open_readable(self: System, path: Path) OpenError!*Readable {
|
pub fn open_readable(self: System, path: Path) OpenError!*Readable {
|
||||||
switch (self) {
|
switch (self) {
|
||||||
.sandboxed_path => |sandboxed_path| {
|
.sandboxed_path => |sandboxed_path| {
|
||||||
const absolute_path = sandboxed_path.joined(path);
|
return @ptrCast(*Readable, ext.SDL_RWFromFile(try sandboxed_path.joined(path).to_string(), "rb") orelse {
|
||||||
|
|
||||||
return @ptrCast(*Readable, ext.SDL_RWFromFile(try absolute_path.to_string(), "rb") orelse {
|
|
||||||
return error.FileNotFound;
|
return error.FileNotFound;
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn query_info(self: System, path: Path) QueryError!FileInfo {
|
||||||
|
switch (self) {
|
||||||
|
.sandboxed_path => |sandboxed_path| {
|
||||||
|
const file = ext.SDL_RWFromFile(try sandboxed_path.joined(path).to_string(), "rb") orelse {
|
||||||
|
return error.FileNotFound;
|
||||||
|
};
|
||||||
|
|
||||||
|
defer coral.debug.assert(ext.SDL_RWclose(file) == 0);
|
||||||
|
|
||||||
|
const file_size = ext.SDL_RWseek(file, 0, ext.RW_SEEK_END);
|
||||||
|
|
||||||
|
if (file_size < 0) {
|
||||||
|
return error.FileUnavailable;
|
||||||
|
}
|
||||||
|
|
||||||
|
return FileInfo{
|
||||||
|
.size = @intCast(u64, file_size),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
fn rw_ops_cast(ptr: *anyopaque) *ext.SDL_RWops {
|
fn rw_ops_cast(ptr: *anyopaque) *ext.SDL_RWops {
|
||||||
|
|
|
@ -0,0 +1,225 @@
|
||||||
|
const Environment = @import("./Environment.zig");
|
||||||
|
|
||||||
|
const ast = @import("./ast.zig");
|
||||||
|
|
||||||
|
const coral = @import("coral");
|
||||||
|
|
||||||
|
const types = @import("./types.zig");
|
||||||
|
|
||||||
|
const tokens = @import("./tokens.zig");
|
||||||
|
|
||||||
|
env: *Environment,
|
||||||
|
message_name_len: usize,
|
||||||
|
message_buffer: Buffer,
|
||||||
|
bytecode_buffer: Buffer,
|
||||||
|
|
||||||
|
const Buffer = coral.list.Stack(u8);
|
||||||
|
|
||||||
|
const Opcode = enum (u8) {
|
||||||
|
ret,
|
||||||
|
|
||||||
|
push_nil,
|
||||||
|
push_true,
|
||||||
|
push_false,
|
||||||
|
push_zero,
|
||||||
|
push_integer,
|
||||||
|
push_float,
|
||||||
|
push_object,
|
||||||
|
push_table,
|
||||||
|
|
||||||
|
not,
|
||||||
|
neg,
|
||||||
|
|
||||||
|
add,
|
||||||
|
sub,
|
||||||
|
mul,
|
||||||
|
div,
|
||||||
|
};
|
||||||
|
|
||||||
|
const Self = @This();
|
||||||
|
|
||||||
|
fn clear_error_details(self: *Self) void {
|
||||||
|
coral.debug.assert(self.message_buffer.values.len >= self.message_name_len);
|
||||||
|
coral.debug.assert(self.message_buffer.drop(self.message_buffer.values.len - self.message_name_len));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compile(self: *Self, data: []const u8) types.RuntimeError!void {
|
||||||
|
var tokenizer = tokens.Tokenizer{.source = data};
|
||||||
|
var parsed_statements = try ast.ParsedStatements.init(self.env.allocator, &tokenizer);
|
||||||
|
|
||||||
|
switch (parsed_statements) {
|
||||||
|
.valid => |*statements| {
|
||||||
|
defer statements.deinit(self.env.allocator);
|
||||||
|
|
||||||
|
for (statements.list.values) |statement| {
|
||||||
|
switch (statement) {
|
||||||
|
.return_expression => |return_expression| {
|
||||||
|
try self.compile_expression(return_expression);
|
||||||
|
try self.emit_opcode(.ret);
|
||||||
|
},
|
||||||
|
|
||||||
|
.return_nothing => {
|
||||||
|
try self.emit_opcode(.push_nil);
|
||||||
|
try self.emit_opcode(.ret);
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
.invalid => |invalid| {
|
||||||
|
self.clear_error_details();
|
||||||
|
|
||||||
|
try self.message_buffer.push_all(self.env.allocator, "@(");
|
||||||
|
|
||||||
|
var writable_message = coral.list.Writable.from_stack(self.env.allocator, &self.message_buffer);
|
||||||
|
const message_writer = writable_message.as_writer();
|
||||||
|
|
||||||
|
coral.utf8.print_int(@typeInfo(usize).Int, message_writer, tokenizer.lines_stepped) catch {
|
||||||
|
return error.OutOfMemory;
|
||||||
|
};
|
||||||
|
|
||||||
|
coral.utf8.print(message_writer, "): ") catch {
|
||||||
|
return error.OutOfMemory;
|
||||||
|
};
|
||||||
|
|
||||||
|
coral.utf8.print(message_writer, invalid) catch {
|
||||||
|
return error.OutOfMemory;
|
||||||
|
};
|
||||||
|
|
||||||
|
return error.BadSyntax;
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn compile_expression(self: *Self, expression: ast.Expression) types.RuntimeError!void {
|
||||||
|
switch (expression) {
|
||||||
|
.nil_literal => try self.emit_opcode(.push_nil),
|
||||||
|
.true_literal => try self.emit_opcode(.push_true),
|
||||||
|
.false_literal => try self.emit_opcode(.push_false),
|
||||||
|
|
||||||
|
.integer_literal => |literal| {
|
||||||
|
if (literal == 0) {
|
||||||
|
try self.emit_opcode(.push_zero);
|
||||||
|
} else {
|
||||||
|
try self.emit_opcode(.push_integer);
|
||||||
|
try self.emit_float(0);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
.float_literal => |literal| {
|
||||||
|
if (literal == 0) {
|
||||||
|
try self.emit_opcode(.push_zero);
|
||||||
|
} else {
|
||||||
|
try self.emit_opcode(.push_float);
|
||||||
|
try self.emit_float(literal);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
.string_literal => |literal| {
|
||||||
|
try self.emit_opcode(.push_object);
|
||||||
|
try self.emit_object(try self.intern(literal));
|
||||||
|
},
|
||||||
|
|
||||||
|
.table_literal => |literal| {
|
||||||
|
if (literal.values.len > coral.math.max_int(@typeInfo(types.Integer).Int)) {
|
||||||
|
return error.OutOfMemory;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (literal.values) |field| {
|
||||||
|
try self.compile_expression(field.expression.*);
|
||||||
|
try self.emit_opcode(.push_object);
|
||||||
|
try self.emit_object(try self.intern(field.identifier));
|
||||||
|
}
|
||||||
|
|
||||||
|
try self.emit_opcode(.push_table);
|
||||||
|
try self.emit_integer(@intCast(types.Integer, literal.values.len));
|
||||||
|
},
|
||||||
|
|
||||||
|
.binary_operation => |operation| {
|
||||||
|
try self.compile_expression(operation.lhs_expression.*);
|
||||||
|
try self.compile_expression(operation.rhs_expression.*);
|
||||||
|
|
||||||
|
try self.emit_opcode(switch (operation.kind) {
|
||||||
|
.addition => .add,
|
||||||
|
.subtraction => .sub,
|
||||||
|
.multiplication => .mul,
|
||||||
|
.division => .div,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
.unary_operation => |operation| {
|
||||||
|
try self.compile_expression(operation.expression.*);
|
||||||
|
|
||||||
|
try self.emit_opcode(switch (operation.kind) {
|
||||||
|
.boolean_negation => .not,
|
||||||
|
.numeric_negation => .neg,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
.grouped_expression => |grouped_expression| {
|
||||||
|
try self.compile_expression(grouped_expression.*);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn deinit(self: *Self) void {
|
||||||
|
self.bytecode_buffer.deinit(self.env.allocator);
|
||||||
|
self.message_buffer.deinit(self.env.allocator);
|
||||||
|
|
||||||
|
self.message_name_len = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn emit_float(self: *Self, float: types.Float) coral.io.AllocationError!void {
|
||||||
|
try self.bytecode_buffer.push_all(self.env.allocator, coral.io.bytes_of(&float));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn emit_integer(self: *Self, integer: types.Integer) coral.io.AllocationError!void {
|
||||||
|
try self.bytecode_buffer.push_all(self.env.allocator, coral.io.bytes_of(&integer));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn emit_object(self: *Self, object: types.Object) coral.io.AllocationError!void {
|
||||||
|
try self.bytecode_buffer.push_all(self.env.allocator, coral.io.bytes_of(&object));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn emit_opcode(self: *Self, opcode: Opcode) coral.io.AllocationError!void {
|
||||||
|
try self.bytecode_buffer.push_one(self.env.allocator, @enumToInt(opcode));
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn error_details(self: Self) []const u8 {
|
||||||
|
coral.debug.assert(self.message_buffer.values.len >= self.message_name_len);
|
||||||
|
|
||||||
|
return self.message_buffer.values[self.message_name_len .. ];
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init(env: *Environment, chunk_name: []const u8) coral.io.AllocationError!Self {
|
||||||
|
var bytecode_buffer = try Buffer.init(env.allocator, 0);
|
||||||
|
|
||||||
|
errdefer bytecode_buffer.deinit(env.allocator);
|
||||||
|
|
||||||
|
var message_buffer = try Buffer.init(env.allocator, chunk_name.len);
|
||||||
|
|
||||||
|
errdefer message_buffer.deinit(env.allocator);
|
||||||
|
|
||||||
|
message_buffer.push_all(env.allocator, chunk_name) catch unreachable;
|
||||||
|
|
||||||
|
return Self{
|
||||||
|
.env = env,
|
||||||
|
.message_buffer = message_buffer,
|
||||||
|
.bytecode_buffer = bytecode_buffer,
|
||||||
|
.message_name_len = chunk_name.len,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn intern(self: *Self, string: []const u8) coral.io.AllocationError!types.Object {
|
||||||
|
const interned_string = try self.env.intern(string);
|
||||||
|
|
||||||
|
coral.debug.assert(interned_string == .object);
|
||||||
|
|
||||||
|
return interned_string.object;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn name(self: Self) []const u8 {
|
||||||
|
coral.debug.assert(self.message_buffer.values.len >= self.message_name_len);
|
||||||
|
|
||||||
|
return self.message_buffer.values[0 .. self.message_name_len];
|
||||||
|
}
|
|
@ -1,13 +1,13 @@
|
||||||
const bytecode = @import("./bytecode.zig");
|
const Chunk = @import("./Chunk.zig");
|
||||||
|
|
||||||
const coral = @import("coral");
|
const coral = @import("coral");
|
||||||
|
|
||||||
const file = @import("../file.zig");
|
const file = @import("../file.zig");
|
||||||
|
|
||||||
const tokens = @import("./tokens.zig");
|
|
||||||
|
|
||||||
const types = @import("./types.zig");
|
const types = @import("./types.zig");
|
||||||
|
|
||||||
|
const tokens = @import("./tokens.zig");
|
||||||
|
|
||||||
allocator: coral.io.Allocator,
|
allocator: coral.io.Allocator,
|
||||||
heap: ObjectSlab,
|
heap: ObjectSlab,
|
||||||
global_object: types.Object,
|
global_object: types.Object,
|
||||||
|
@ -21,7 +21,12 @@ const CallStack = coral.list.Stack(struct {
|
||||||
slots: []types.Val,
|
slots: []types.Val,
|
||||||
});
|
});
|
||||||
|
|
||||||
pub const ExecuteFileError = file.System.OpenError || file.Readable.ReadError || types.CompileError;
|
pub const DataSource = struct {
|
||||||
|
name: []const u8,
|
||||||
|
data: []const u8,
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const ExecuteFileError = file.System.OpenError || file.ReadError || types.CompileError;
|
||||||
|
|
||||||
pub const InitOptions = struct {
|
pub const InitOptions = struct {
|
||||||
values_max: u32,
|
values_max: u32,
|
||||||
|
@ -196,46 +201,51 @@ pub fn discard(self: *Self, val: types.Val) void {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn execute_file(self: *Self, allocator: coral.io.Allocator, fs: file.System, file_path: file.Path) ExecuteFileError!types.Val {
|
pub fn execute_data(self: *Self, source: DataSource) ExecuteFileError!types.Val {
|
||||||
const typeid = "<chunk>";
|
const typeid = "<chunk>";
|
||||||
|
|
||||||
const Behaviors = struct {
|
const Behaviors = struct {
|
||||||
fn deinitialize(context: ObjectInfo.DeinitializeContext) void {
|
fn deinitialize(context: ObjectInfo.DeinitializeContext) void {
|
||||||
(context.env.native_cast(context.obj, typeid, bytecode.Chunk) catch unreachable).deinit();
|
(context.env.native_cast(context.obj, typeid, Chunk) catch unreachable).deinit();
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
var chunk = try bytecode.Chunk.init(allocator, self, try file_path.to_string());
|
var chunk = try Chunk.init(self, source.name);
|
||||||
|
|
||||||
errdefer chunk.deinit();
|
errdefer chunk.deinit();
|
||||||
|
|
||||||
{
|
chunk.compile(source.data) catch |compile_error| {
|
||||||
const readable_file = try fs.open_readable(file_path);
|
self.reporter.invoke(chunk.error_details());
|
||||||
|
|
||||||
defer if (!readable_file.close()) {
|
return compile_error;
|
||||||
@panic("Failed to close script file");
|
|
||||||
};
|
};
|
||||||
|
|
||||||
var file_contents = try file.Contents.init(allocator, readable_file);
|
|
||||||
|
|
||||||
defer file_contents.deinit();
|
|
||||||
|
|
||||||
var tokenizer = tokens.Tokenizer{.source = file_contents.data};
|
|
||||||
|
|
||||||
try chunk.parse(self, &tokenizer);
|
|
||||||
}
|
|
||||||
|
|
||||||
const script = try self.new_object(coral.io.bytes_of(&chunk), .{
|
const script = try self.new_object(coral.io.bytes_of(&chunk), .{
|
||||||
.identity = typeid,
|
.identity = typeid,
|
||||||
.deinitializer = Behaviors.deinitialize,
|
.deinitializer = Behaviors.deinitialize,
|
||||||
});
|
});
|
||||||
|
|
||||||
self.discard(script);
|
defer self.discard(script);
|
||||||
|
|
||||||
return try self.call(script.as_ref(), null, &.{});
|
return try self.call(script.as_ref(), null, &.{});
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn fail(self: *Self, failure_message: []const u8) error { CheckFailed } {
|
pub fn execute_file(self: *Self, fs: file.System, file_path: file.Path) ExecuteFileError!types.Val {
|
||||||
|
const readable_file = try fs.open_readable(file_path);
|
||||||
|
|
||||||
|
defer readable_file.close();
|
||||||
|
|
||||||
|
var file_source = try coral.list.Stack(u8).init(self.allocator, (try fs.query_info(file_path)).size);
|
||||||
|
|
||||||
|
defer file_source.deinit(self.allocator);
|
||||||
|
|
||||||
|
return try self.execute_data(.{
|
||||||
|
.name = try file_path.to_string(),
|
||||||
|
.data = file_source.values,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn fail(self: *Self, failure_message: []const u8) types.CheckError {
|
||||||
self.reporter.invoke(failure_message);
|
self.reporter.invoke(failure_message);
|
||||||
|
|
||||||
return error.CheckFailed;
|
return error.CheckFailed;
|
||||||
|
|
|
@ -0,0 +1,430 @@
|
||||||
|
const coral = @import("coral");
|
||||||
|
|
||||||
|
const tokens = @import("./tokens.zig");
|
||||||
|
|
||||||
|
const types = @import("./types.zig");
|
||||||
|
|
||||||
|
pub const BinaryOperation = enum {
|
||||||
|
addition,
|
||||||
|
subtraction,
|
||||||
|
multiplication,
|
||||||
|
division
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const ParsedExpression = union (enum) {
|
||||||
|
valid: Expression,
|
||||||
|
invalid: []const u8,
|
||||||
|
|
||||||
|
pub fn init(allocator: coral.io.Allocator, tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedExpression {
|
||||||
|
var parsed_term_expression = try init_term(allocator, tokenizer);
|
||||||
|
|
||||||
|
switch (parsed_term_expression) {
|
||||||
|
.valid => |*term_expression| {
|
||||||
|
var expression = term_expression.*;
|
||||||
|
var is_invalid = true;
|
||||||
|
|
||||||
|
defer if (is_invalid) {
|
||||||
|
expression.deinit(allocator);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (tokenizer.current_token == .symbol_plus) {
|
||||||
|
if (!tokenizer.step()) {
|
||||||
|
return ParsedExpression{.invalid = "expected right-hand side of expression after `+`"};
|
||||||
|
}
|
||||||
|
|
||||||
|
var parsed_binary_expression = try init_binary(allocator, tokenizer, &expression, .addition);
|
||||||
|
|
||||||
|
expression = switch (parsed_binary_expression) {
|
||||||
|
.valid => |binary_expression| binary_expression,
|
||||||
|
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokenizer.current_token == .symbol_minus) {
|
||||||
|
if (!tokenizer.step()) {
|
||||||
|
return ParsedExpression{.invalid = "expected right-hand side of expression after `-`"};
|
||||||
|
}
|
||||||
|
|
||||||
|
var parsed_binary_expression = try init_binary(allocator, tokenizer, &expression, .subtraction);
|
||||||
|
|
||||||
|
expression = switch (parsed_binary_expression) {
|
||||||
|
.valid => |binary_expression| binary_expression,
|
||||||
|
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
is_invalid = false;
|
||||||
|
|
||||||
|
return ParsedExpression{.valid = expression};
|
||||||
|
},
|
||||||
|
|
||||||
|
.invalid => |details| {
|
||||||
|
return ParsedExpression{.invalid = details};
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_binary(allocator: coral.io.Allocator, tokenizer: *tokens.Tokenizer, lhs_expression: *const Expression, operation: BinaryOperation) coral.io.AllocationError!ParsedExpression {
|
||||||
|
var parsed_expression = try init_term(allocator, tokenizer);
|
||||||
|
|
||||||
|
switch (parsed_expression) {
|
||||||
|
.valid => |*expression| {
|
||||||
|
errdefer expression.deinit(allocator);
|
||||||
|
|
||||||
|
const rhs_expression = try coral.io.allocate_one(allocator, expression.*);
|
||||||
|
|
||||||
|
errdefer coral.io.deallocate(allocator, rhs_expression);
|
||||||
|
|
||||||
|
return ParsedExpression{
|
||||||
|
.valid = .{
|
||||||
|
.binary_operation = .{
|
||||||
|
.kind = operation,
|
||||||
|
.lhs_expression = try coral.io.allocate_one(allocator, lhs_expression.*),
|
||||||
|
.rhs_expression = rhs_expression,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
.invalid => |details| {
|
||||||
|
return ParsedExpression{.invalid = details};
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_factor(allocator: coral.io.Allocator, tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedExpression {
|
||||||
|
switch (tokenizer.current_token) {
|
||||||
|
.symbol_paren_left => {
|
||||||
|
if (!tokenizer.step()) {
|
||||||
|
return ParsedExpression{.invalid = "expected an expression after `(`"};
|
||||||
|
}
|
||||||
|
|
||||||
|
var parsed_expression = try ParsedExpression.init(allocator, tokenizer);
|
||||||
|
|
||||||
|
switch (parsed_expression) {
|
||||||
|
.valid => |*expression| {
|
||||||
|
var is_invalid = true;
|
||||||
|
|
||||||
|
defer if (is_invalid) {
|
||||||
|
expression.deinit(allocator);
|
||||||
|
};
|
||||||
|
|
||||||
|
if ((!tokenizer.step()) or (tokenizer.current_token != .symbol_paren_right)) {
|
||||||
|
return ParsedExpression{.invalid = "expected a closing `)` after expression"};
|
||||||
|
}
|
||||||
|
|
||||||
|
is_invalid = false;
|
||||||
|
|
||||||
|
return ParsedExpression{
|
||||||
|
.valid = .{.grouped_expression = try coral.io.allocate_one(allocator, expression.*)},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
.invalid => |details| {
|
||||||
|
return ParsedExpression{.invalid = details};
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
.integer => |value| {
|
||||||
|
defer _ = tokenizer.step();
|
||||||
|
|
||||||
|
return ParsedExpression{
|
||||||
|
.valid = .{
|
||||||
|
.integer_literal = coral.utf8.parse_int(
|
||||||
|
@typeInfo(types.Integer).Int,
|
||||||
|
value, .{}) catch |parse_error| {
|
||||||
|
|
||||||
|
return ParsedExpression{
|
||||||
|
.invalid = switch (parse_error) {
|
||||||
|
error.BadSyntax => "invalid integer literal",
|
||||||
|
error.IntOverflow => "integer literal is too big",
|
||||||
|
}
|
||||||
|
};
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
.real => |value| {
|
||||||
|
defer _ = tokenizer.step();
|
||||||
|
|
||||||
|
return ParsedExpression{
|
||||||
|
.valid = .{
|
||||||
|
.float_literal = coral.utf8.parse_float(
|
||||||
|
@typeInfo(types.Float).Float,
|
||||||
|
value) catch |parse_error| {
|
||||||
|
|
||||||
|
return ParsedExpression{
|
||||||
|
.invalid = switch (parse_error) {
|
||||||
|
error.BadSyntax => "invalid float literal",
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
.string => |value| {
|
||||||
|
defer _ = tokenizer.step();
|
||||||
|
|
||||||
|
return ParsedExpression{
|
||||||
|
.valid = .{
|
||||||
|
.string_literal = value,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
.symbol_minus => {
|
||||||
|
if (!tokenizer.step()) {
|
||||||
|
return ParsedExpression{.invalid = "expected expression after numeric negation (`-`)"};
|
||||||
|
}
|
||||||
|
|
||||||
|
return try init_unary(allocator, tokenizer, .numeric_negation);
|
||||||
|
},
|
||||||
|
|
||||||
|
.symbol_bang => {
|
||||||
|
if (!tokenizer.step()) {
|
||||||
|
return ParsedExpression{.invalid = "expected expression after boolean negation (`!`)"};
|
||||||
|
}
|
||||||
|
|
||||||
|
return try init_unary(allocator, tokenizer, .boolean_negation);
|
||||||
|
},
|
||||||
|
|
||||||
|
else => return ParsedExpression{.invalid = "unexpected token in expression"},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_term(allocator: coral.io.Allocator, tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedExpression {
|
||||||
|
var parsed_factor_expression = try init_factor(allocator, tokenizer);
|
||||||
|
|
||||||
|
switch (parsed_factor_expression) {
|
||||||
|
.valid => |*factor_expression| {
|
||||||
|
var expression = factor_expression.*;
|
||||||
|
var is_invalid = true;
|
||||||
|
|
||||||
|
defer if (is_invalid) {
|
||||||
|
expression.deinit(allocator);
|
||||||
|
};
|
||||||
|
|
||||||
|
if (tokenizer.current_token == .symbol_asterisk) {
|
||||||
|
if (!tokenizer.step()) {
|
||||||
|
return ParsedExpression{.invalid = "expected right-hand side of expression after `*`"};
|
||||||
|
}
|
||||||
|
|
||||||
|
var parsed_binary_expression = try init_binary(allocator, tokenizer, &expression, .multiplication);
|
||||||
|
|
||||||
|
expression = switch (parsed_binary_expression) {
|
||||||
|
.valid => |binary_expression| binary_expression,
|
||||||
|
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokenizer.current_token == .symbol_forward_slash) {
|
||||||
|
if (!tokenizer.step()) {
|
||||||
|
return ParsedExpression{.invalid = "expected right-hand side of expression after `/`"};
|
||||||
|
}
|
||||||
|
|
||||||
|
var parsed_binary_expression = try init_binary(allocator, tokenizer, &expression, .division);
|
||||||
|
|
||||||
|
expression = switch (parsed_binary_expression) {
|
||||||
|
.valid => |binary_expression| binary_expression,
|
||||||
|
.invalid => |details| return ParsedExpression{.invalid = details},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
is_invalid = false;
|
||||||
|
|
||||||
|
return ParsedExpression{.valid = expression};
|
||||||
|
},
|
||||||
|
|
||||||
|
.invalid => |details| {
|
||||||
|
return ParsedExpression{.invalid = details};
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_unary(allocator: coral.io.Allocator, tokenizer: *tokens.Tokenizer, operation: UnaryOperation) coral.io.AllocationError!ParsedExpression {
|
||||||
|
var parsed_factor_expression = try init_factor(allocator, tokenizer);
|
||||||
|
|
||||||
|
switch (parsed_factor_expression) {
|
||||||
|
.valid => |*factor_expression| {
|
||||||
|
errdefer factor_expression.deinit(allocator);
|
||||||
|
|
||||||
|
return ParsedExpression{
|
||||||
|
.valid = .{
|
||||||
|
.unary_operation = .{
|
||||||
|
.kind = operation,
|
||||||
|
.expression = try coral.io.allocate_one(allocator, factor_expression.*),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
};
|
||||||
|
},
|
||||||
|
|
||||||
|
.invalid => |details| {
|
||||||
|
return ParsedExpression{.invalid = details};
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const ParsedStatements = union (enum) {
|
||||||
|
valid: Statements,
|
||||||
|
invalid: []const u8,
|
||||||
|
|
||||||
|
pub fn init(allocator: coral.io.Allocator, tokenizer: *tokens.Tokenizer) coral.io.AllocationError!ParsedStatements {
|
||||||
|
var statements_list = try Statements.List.init(allocator, 0);
|
||||||
|
var has_returned = false;
|
||||||
|
var is_invalid = true;
|
||||||
|
|
||||||
|
defer if (is_invalid) {
|
||||||
|
for (statements_list.values) |*statement| {
|
||||||
|
statement.deinit(allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
statements_list.deinit(allocator);
|
||||||
|
};
|
||||||
|
|
||||||
|
while (tokenizer.step()) {
|
||||||
|
switch (tokenizer.current_token) {
|
||||||
|
.newline => {},
|
||||||
|
|
||||||
|
.keyword_return => {
|
||||||
|
if (has_returned) {
|
||||||
|
return ParsedStatements{.invalid = "cannot return more than once per function scope"};
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokenizer.step() and (tokenizer.current_token != .newline)) {
|
||||||
|
var parsed_expression = try ParsedExpression.init(allocator, tokenizer);
|
||||||
|
|
||||||
|
switch (parsed_expression) {
|
||||||
|
.valid => |*expression| {
|
||||||
|
errdefer expression.deinit(allocator);
|
||||||
|
|
||||||
|
try statements_list.push_one(allocator, .{
|
||||||
|
.return_expression = expression.*,
|
||||||
|
});
|
||||||
|
},
|
||||||
|
|
||||||
|
.invalid => |details| {
|
||||||
|
return ParsedStatements{.invalid = details};
|
||||||
|
},
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
try statements_list.push_one(allocator, .return_nothing);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (tokenizer.step() and tokenizer.current_token != .newline) {
|
||||||
|
return ParsedStatements{.invalid = "expected newline after expression"};
|
||||||
|
}
|
||||||
|
|
||||||
|
has_returned = true;
|
||||||
|
},
|
||||||
|
|
||||||
|
else => {
|
||||||
|
return ParsedStatements{.invalid = "invalid statement"};
|
||||||
|
},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
is_invalid = false;
|
||||||
|
|
||||||
|
return ParsedStatements{
|
||||||
|
.valid = .{
|
||||||
|
.list = statements_list,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Expression = union (enum) {
|
||||||
|
nil_literal,
|
||||||
|
true_literal,
|
||||||
|
false_literal,
|
||||||
|
integer_literal: types.Integer,
|
||||||
|
float_literal: types.Float,
|
||||||
|
string_literal: []const u8,
|
||||||
|
table_literal: TableLiteral,
|
||||||
|
grouped_expression: *Expression,
|
||||||
|
|
||||||
|
binary_operation: struct {
|
||||||
|
kind: BinaryOperation,
|
||||||
|
lhs_expression: *Expression,
|
||||||
|
rhs_expression: *Expression,
|
||||||
|
},
|
||||||
|
|
||||||
|
unary_operation: struct {
|
||||||
|
kind: UnaryOperation,
|
||||||
|
expression: *Expression,
|
||||||
|
},
|
||||||
|
|
||||||
|
const TableLiteral = coral.list.Stack(struct {
|
||||||
|
identifier: []const u8,
|
||||||
|
expression: *Expression,
|
||||||
|
});
|
||||||
|
|
||||||
|
fn deinit(self: *Expression, allocator: coral.io.Allocator) void {
|
||||||
|
switch (self.*) {
|
||||||
|
.nil_literal, .true_literal, .false_literal, .integer_literal, .float_literal, .string_literal => {},
|
||||||
|
|
||||||
|
.table_literal => |*literal| {
|
||||||
|
for (literal.values) |field| {
|
||||||
|
field.expression.deinit(allocator);
|
||||||
|
coral.io.deallocate(allocator, field.expression);
|
||||||
|
}
|
||||||
|
|
||||||
|
literal.deinit(allocator);
|
||||||
|
},
|
||||||
|
|
||||||
|
.grouped_expression => |expression| {
|
||||||
|
expression.deinit(allocator);
|
||||||
|
},
|
||||||
|
|
||||||
|
.binary_operation => |operation| {
|
||||||
|
operation.lhs_expression.deinit(allocator);
|
||||||
|
coral.io.deallocate(allocator, operation.lhs_expression);
|
||||||
|
operation.rhs_expression.deinit(allocator);
|
||||||
|
coral.io.deallocate(allocator, operation.rhs_expression);
|
||||||
|
},
|
||||||
|
|
||||||
|
.unary_operation => |operation| {
|
||||||
|
operation.expression.deinit(allocator);
|
||||||
|
coral.io.deallocate(allocator, operation.expression);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const Statements = struct {
|
||||||
|
list: List,
|
||||||
|
|
||||||
|
const List = coral.list.Stack(union (enum) {
|
||||||
|
return_expression: Expression,
|
||||||
|
return_nothing,
|
||||||
|
|
||||||
|
const Self = @This();
|
||||||
|
|
||||||
|
fn deinit(self: *Self, allocator: coral.io.Allocator) void {
|
||||||
|
switch (self.*) {
|
||||||
|
.return_expression => |*expression| {
|
||||||
|
expression.deinit(allocator);
|
||||||
|
},
|
||||||
|
|
||||||
|
.return_nothing => {},
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
pub fn deinit(self: *Statements, allocator: coral.io.Allocator) void {
|
||||||
|
for (self.list.values) |*statement| {
|
||||||
|
statement.deinit(allocator);
|
||||||
|
}
|
||||||
|
|
||||||
|
self.list.deinit(allocator);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
pub const UnaryOperation = enum {
|
||||||
|
boolean_negation,
|
||||||
|
numeric_negation,
|
||||||
|
};
|
|
@ -1,779 +0,0 @@
|
||||||
const Environment = @import("./Environment.zig");
|
|
||||||
|
|
||||||
const coral = @import("coral");
|
|
||||||
|
|
||||||
const tokens = @import("./tokens.zig");
|
|
||||||
|
|
||||||
const types = @import("./types.zig");
|
|
||||||
|
|
||||||
pub const Chunk = struct {
|
|
||||||
env: *Environment,
|
|
||||||
allocator: coral.io.Allocator,
|
|
||||||
arity: usize,
|
|
||||||
name: []u8,
|
|
||||||
bytecodes: Buffer,
|
|
||||||
objects: Objects,
|
|
||||||
|
|
||||||
const Buffer = coral.list.Stack(u8);
|
|
||||||
|
|
||||||
const Objects = coral.list.Stack(types.Object);
|
|
||||||
|
|
||||||
pub fn deinit(self: *Chunk) void {
|
|
||||||
for (self.objects.values) |object| {
|
|
||||||
if (!self.env.heap.fetch(object).release(self.env)) {
|
|
||||||
self.env.heap.remove(object);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
coral.io.deallocate(self.allocator, self.name);
|
|
||||||
self.bytecodes.deinit(self.allocator);
|
|
||||||
self.objects.deinit(self.allocator);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_byte(self: *Chunk, byte: u8) coral.io.AllocationError!void {
|
|
||||||
return self.bytecodes.push_one(self.allocator, byte);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_closure(self: *Chunk, chunk: Chunk) coral.io.AllocationError!void {
|
|
||||||
const value = try self.env.new_object(coral.io.bytes_of(&chunk), .{
|
|
||||||
|
|
||||||
});
|
|
||||||
|
|
||||||
coral.debug.assert(value == .object);
|
|
||||||
try self.objects.push_one(self.allocator, value.object);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_float(self: *Chunk, value: types.Float) coral.io.AllocationError!void {
|
|
||||||
return self.bytecodes.push_all(self.allocator, coral.io.bytes_of(&value));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_integer(self: *Chunk, value: types.Integer) coral.io.AllocationError!void {
|
|
||||||
return self.bytecodes.push_all(self.allocator, coral.io.bytes_of(&value));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_opcode(self: *Chunk, opcode: Opcode) coral.io.AllocationError!void {
|
|
||||||
return self.bytecodes.push_one(self.allocator, @enumToInt(opcode));
|
|
||||||
}
|
|
||||||
|
|
||||||
fn emit_string(self: *Chunk, string: []const u8) coral.io.AllocationError!void {
|
|
||||||
const interned_string = try self.env.intern(string);
|
|
||||||
|
|
||||||
coral.debug.assert(interned_string == .object);
|
|
||||||
|
|
||||||
return try self.bytecodes.push_all(self.allocator, coral.io.bytes_of(&interned_string));
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn init(allocator: coral.io.Allocator, env: *Environment, name: []const u8) !Chunk {
|
|
||||||
const assumed_average_bytecode_size = 1024;
|
|
||||||
var bytecodes = try Buffer.init(allocator, assumed_average_bytecode_size);
|
|
||||||
|
|
||||||
errdefer bytecodes.deinit(allocator);
|
|
||||||
|
|
||||||
var objects = try Objects.init(allocator, 1);
|
|
||||||
|
|
||||||
errdefer objects.deinit(allocator);
|
|
||||||
|
|
||||||
const name_copy = try coral.io.allocate_many(u8, name.len, allocator);
|
|
||||||
|
|
||||||
errdefer coral.io.deallocate(name_copy);
|
|
||||||
|
|
||||||
coral.io.copy(name_copy, name);
|
|
||||||
|
|
||||||
return Chunk{
|
|
||||||
.allocator = allocator,
|
|
||||||
.env = env,
|
|
||||||
.name = name_copy,
|
|
||||||
.bytecodes = bytecodes,
|
|
||||||
.objects = objects,
|
|
||||||
.arity = 0,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse(self: *Chunk, env: *Environment, script_tokenizer: *tokens.Tokenizer) types.CompileError!void {
|
|
||||||
errdefer self.reset();
|
|
||||||
|
|
||||||
self.reset();
|
|
||||||
|
|
||||||
var parser = Parser{
|
|
||||||
.env = env,
|
|
||||||
.tokenizer = script_tokenizer
|
|
||||||
};
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
parser.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!(try parser.parse_statement(self))) break;
|
|
||||||
}
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
parser.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
try parser.current_token.expect(.newline);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn reset(self: *Chunk) void {
|
|
||||||
self.bytecodes.clear();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
pub const Opcode = enum(u8) {
|
|
||||||
pop,
|
|
||||||
push_nil,
|
|
||||||
push_true,
|
|
||||||
push_false,
|
|
||||||
push_zero,
|
|
||||||
push_integer,
|
|
||||||
push_float,
|
|
||||||
push_object,
|
|
||||||
push_table,
|
|
||||||
|
|
||||||
not,
|
|
||||||
neg,
|
|
||||||
add,
|
|
||||||
sub,
|
|
||||||
div,
|
|
||||||
mul,
|
|
||||||
|
|
||||||
call,
|
|
||||||
get_index,
|
|
||||||
set_index,
|
|
||||||
get_global,
|
|
||||||
set_global,
|
|
||||||
get_local,
|
|
||||||
set_local,
|
|
||||||
};
|
|
||||||
|
|
||||||
const Parser = struct {
|
|
||||||
env: *Environment,
|
|
||||||
tokenizer: *tokens.Tokenizer,
|
|
||||||
current_token: tokens.Token = .newline,
|
|
||||||
previous_token: tokens.Token = .newline,
|
|
||||||
locals: SmallStack(Local, .{.name = "", .depth = 0}) = .{},
|
|
||||||
|
|
||||||
const Local = struct {
|
|
||||||
name: []const u8,
|
|
||||||
depth: u16,
|
|
||||||
|
|
||||||
const empty = Local{ .name = "", .depth = 0 };
|
|
||||||
};
|
|
||||||
|
|
||||||
const Operator = enum {
|
|
||||||
not,
|
|
||||||
negate,
|
|
||||||
add,
|
|
||||||
subtract,
|
|
||||||
divide,
|
|
||||||
multiply,
|
|
||||||
|
|
||||||
const Self = @This();
|
|
||||||
|
|
||||||
fn opcode(self: Self) Opcode {
|
|
||||||
return switch (self) {
|
|
||||||
.not => .not,
|
|
||||||
.negate => .neg,
|
|
||||||
.add => .add,
|
|
||||||
.subtract => .sub,
|
|
||||||
.multiply => .mul,
|
|
||||||
.divide => .div,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
fn precedence(self: Self) isize {
|
|
||||||
return switch (self) {
|
|
||||||
.not => 13,
|
|
||||||
.negate => 13,
|
|
||||||
.add => 11,
|
|
||||||
.subtract => 11,
|
|
||||||
.divide => 12,
|
|
||||||
.multiply => 12,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const OperatorStack = SmallStack(Operator, .not);
|
|
||||||
|
|
||||||
const StepError = error {
|
|
||||||
UnexpectedEnd,
|
|
||||||
};
|
|
||||||
|
|
||||||
fn declare_local(self: *Parser, name: []const u8) !void {
|
|
||||||
return self.locals.push(.{
|
|
||||||
.name = name,
|
|
||||||
.depth = 0,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
const operator_tokens = &.{.symbol_assign, .symbol_plus,
|
|
||||||
.symbol_dash, .symbol_asterisk, .symbol_forward_slash, .symbol_paren_left, .symbol_comma};
|
|
||||||
|
|
||||||
fn parse_closure(self: *Parser, parent_chunk: *Chunk) types.CompileError!void {
|
|
||||||
var closure_chunk = try Chunk.init(self.env.allocator, self.env, switch (self.previous_token) {
|
|
||||||
.local_identifier => |identifier| identifier,
|
|
||||||
.symbol_assign, .symbol_paren_left, .symbol_comma => "<closure>",
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
});
|
|
||||||
|
|
||||||
errdefer closure_chunk.deinit();
|
|
||||||
|
|
||||||
try self.step();
|
|
||||||
try self.current_token.expect(.symbol_paren_left);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
switch (self.current_token) {
|
|
||||||
.symbol_paren_right => break,
|
|
||||||
|
|
||||||
.local_identifier => {
|
|
||||||
try self.declare_local(self.current_token.local_identifier);
|
|
||||||
try self.step();
|
|
||||||
},
|
|
||||||
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
|
|
||||||
closure_chunk.arity += 1;
|
|
||||||
|
|
||||||
switch (self.current_token) {
|
|
||||||
.symbol_paren_right => break,
|
|
||||||
.symbol_comma => continue,
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try self.step();
|
|
||||||
try self.current_token.expect(.symbol_brace_left);
|
|
||||||
|
|
||||||
// TODO: Create new callframe.
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
switch (self.current_token) {
|
|
||||||
.symbol_brace_right => break,
|
|
||||||
else => if (try self.parse_statement(&closure_chunk)) continue,
|
|
||||||
}
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
self.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return,
|
|
||||||
};
|
|
||||||
|
|
||||||
switch (self.current_token) {
|
|
||||||
.newline => continue,
|
|
||||||
.symbol_brace_right => break,
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try parent_chunk.emit_opcode(.push_object);
|
|
||||||
try parent_chunk.emit_closure(closure_chunk);
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_expression(self: *Parser, chunk: *Chunk) types.CompileError!void {
|
|
||||||
var operators = OperatorStack{};
|
|
||||||
var local_depth = @as(usize, 0);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
switch (self.current_token) {
|
|
||||||
.keyword_nil => {
|
|
||||||
try self.previous_token.expect_any(operator_tokens);
|
|
||||||
try chunk.emit_opcode(.push_nil);
|
|
||||||
|
|
||||||
self.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return,
|
|
||||||
};
|
|
||||||
},
|
|
||||||
|
|
||||||
.keyword_true => {
|
|
||||||
try self.previous_token.expect_any(operator_tokens);
|
|
||||||
try chunk.emit_opcode(.push_true);
|
|
||||||
|
|
||||||
self.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return,
|
|
||||||
};
|
|
||||||
},
|
|
||||||
|
|
||||||
.keyword_false => {
|
|
||||||
try self.previous_token.expect_any(operator_tokens);
|
|
||||||
try chunk.emit_opcode(.push_false);
|
|
||||||
|
|
||||||
self.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return,
|
|
||||||
};
|
|
||||||
},
|
|
||||||
|
|
||||||
.integer_literal => |literal| {
|
|
||||||
try self.previous_token.expect_any(operator_tokens);
|
|
||||||
|
|
||||||
const value = coral.utf8.parse_signed(@bitSizeOf(types.Integer), literal)
|
|
||||||
catch |parse_error| switch (parse_error) {
|
|
||||||
error.BadSyntax => unreachable,
|
|
||||||
error.IntOverflow => return error.IntOverflow,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (value == 0) {
|
|
||||||
try chunk.emit_opcode(.push_zero);
|
|
||||||
} else {
|
|
||||||
try chunk.emit_opcode(.push_integer);
|
|
||||||
try chunk.emit_integer(value);
|
|
||||||
}
|
|
||||||
|
|
||||||
try self.step();
|
|
||||||
},
|
|
||||||
|
|
||||||
.real_literal => |literal| {
|
|
||||||
try self.previous_token.expect_any(operator_tokens);
|
|
||||||
|
|
||||||
try chunk.emit_float(coral.utf8.parse_float(@bitSizeOf(types.Float), literal) catch |parse_error| {
|
|
||||||
switch (parse_error) {
|
|
||||||
// Already validated to be a real by the tokenizer so this cannot fail, as real syntax is a
|
|
||||||
// subset of float syntax.
|
|
||||||
error.BadSyntax => unreachable,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
try self.step();
|
|
||||||
},
|
|
||||||
|
|
||||||
.string_literal => |literal| {
|
|
||||||
try self.previous_token.expect_any(operator_tokens);
|
|
||||||
try chunk.emit_opcode(.push_object);
|
|
||||||
try chunk.emit_string(literal);
|
|
||||||
try self.step();
|
|
||||||
},
|
|
||||||
|
|
||||||
.global_identifier, .local_identifier => {
|
|
||||||
try self.previous_token.expect_any(&.{.symbol_assign, .symbol_plus,
|
|
||||||
.symbol_dash, .symbol_asterisk, .symbol_forward_slash, .symbol_period});
|
|
||||||
|
|
||||||
try self.step();
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_bang => {
|
|
||||||
try self.previous_token.expect_any(operator_tokens);
|
|
||||||
try operators.push(.not);
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_plus => {
|
|
||||||
try self.parse_operator(chunk, &operators, .add);
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_dash => {
|
|
||||||
try self.parse_operator(chunk, &operators, .subtract);
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_asterisk => {
|
|
||||||
try self.parse_operator(chunk, &operators, .multiply);
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_forward_slash => {
|
|
||||||
try self.parse_operator(chunk, &operators, .divide);
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_arrow => {
|
|
||||||
try self.parse_closure(chunk);
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_period => {
|
|
||||||
switch (self.previous_token) {
|
|
||||||
.global_identifier => |identifier| {
|
|
||||||
try chunk.emit_opcode(.get_global);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
},
|
|
||||||
|
|
||||||
.local_identifier => |identifier| {
|
|
||||||
if (local_depth == 0) {
|
|
||||||
try chunk.emit_byte(self.resolve_local(identifier) orelse {
|
|
||||||
return error.UndefinedLocal;
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
try chunk.emit_opcode(.get_index);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
local_depth += 1;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_paren_left => {
|
|
||||||
switch (self.previous_token) {
|
|
||||||
.local_identifier => |identifier| {
|
|
||||||
if (local_depth == 0) {
|
|
||||||
try chunk.emit_byte(self.resolve_local(identifier) orelse {
|
|
||||||
return error.UndefinedLocal;
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
try chunk.emit_opcode(.get_index);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
.global_identifier => |identifier| {
|
|
||||||
try chunk.emit_opcode(.get_global);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
},
|
|
||||||
|
|
||||||
else => {
|
|
||||||
try self.parse_expression(chunk);
|
|
||||||
try self.previous_token.expect(.symbol_paren_right);
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
|
|
||||||
continue;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
local_depth += 1;
|
|
||||||
|
|
||||||
var argument_count = @as(types.Integer, 0);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
try switch (self.current_token) {
|
|
||||||
.symbol_paren_right => break,
|
|
||||||
else => self.parse_expression(chunk),
|
|
||||||
};
|
|
||||||
|
|
||||||
argument_count += 1;
|
|
||||||
|
|
||||||
switch (self.current_token) {
|
|
||||||
.symbol_paren_right => break,
|
|
||||||
.symbol_comma => continue,
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try chunk.emit_opcode(.call);
|
|
||||||
try chunk.emit_integer(argument_count);
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_brace_left => {
|
|
||||||
const is_call_argument = switch (self.previous_token) {
|
|
||||||
.local_identifier, .global_identifier => true,
|
|
||||||
else => false,
|
|
||||||
};
|
|
||||||
|
|
||||||
var field_count = @as(types.Integer, 0);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
switch (self.current_token) {
|
|
||||||
.newline => continue,
|
|
||||||
|
|
||||||
.local_identifier => {
|
|
||||||
// Create local copy of identifier because step() will overwrite captures.
|
|
||||||
const identifier = self.current_token.local_identifier;
|
|
||||||
|
|
||||||
try chunk.emit_opcode(.push_object);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
switch (self.current_token) {
|
|
||||||
.symbol_assign => {
|
|
||||||
try self.step();
|
|
||||||
try self.parse_expression(chunk);
|
|
||||||
|
|
||||||
field_count += 1;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_brace_right => {
|
|
||||||
try chunk.emit_opcode(.push_object);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
|
|
||||||
field_count += 1;
|
|
||||||
|
|
||||||
break;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_comma => {
|
|
||||||
try chunk.emit_opcode(.push_object);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
|
|
||||||
field_count += 1;
|
|
||||||
},
|
|
||||||
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_brace_right => break,
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try chunk.emit_opcode(.push_table);
|
|
||||||
try chunk.emit_integer(field_count);
|
|
||||||
|
|
||||||
if (is_call_argument) {
|
|
||||||
try chunk.emit_opcode(.call);
|
|
||||||
try chunk.emit_integer(1);
|
|
||||||
}
|
|
||||||
|
|
||||||
self.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return,
|
|
||||||
};
|
|
||||||
},
|
|
||||||
|
|
||||||
else => {
|
|
||||||
try self.previous_token.expect_any(&.{.keyword_nil, .keyword_true, .keyword_false, .integer_literal,
|
|
||||||
.real_literal, .string_literal, .global_identifier, .local_identifier, .symbol_brace_right,
|
|
||||||
.symbol_paren_right});
|
|
||||||
|
|
||||||
while (operators.pop()) |operator| {
|
|
||||||
try chunk.emit_opcode(operator.opcode());
|
|
||||||
}
|
|
||||||
|
|
||||||
return;
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_operator(self: *Parser, chunk: *Chunk, operators: *OperatorStack, rhs_operator: Operator) types.CompileError!void {
|
|
||||||
try self.previous_token.expect_any(operator_tokens);
|
|
||||||
|
|
||||||
while (operators.pop()) |lhs_operator| {
|
|
||||||
if (rhs_operator.precedence() < lhs_operator.precedence()) break try operators.push(lhs_operator);
|
|
||||||
|
|
||||||
try chunk.emit_opcode(lhs_operator.opcode());
|
|
||||||
}
|
|
||||||
|
|
||||||
try operators.push(rhs_operator);
|
|
||||||
try self.step();
|
|
||||||
}
|
|
||||||
|
|
||||||
fn parse_statement(self: *Parser, chunk: *Chunk) types.CompileError!bool {
|
|
||||||
var local_depth = @as(usize, 0);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
switch (self.current_token) {
|
|
||||||
.newline => self.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return true,
|
|
||||||
},
|
|
||||||
|
|
||||||
.keyword_return => {
|
|
||||||
try self.previous_token.expect(.newline);
|
|
||||||
|
|
||||||
self.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return true,
|
|
||||||
};
|
|
||||||
|
|
||||||
try self.parse_expression(chunk);
|
|
||||||
|
|
||||||
return false;
|
|
||||||
},
|
|
||||||
|
|
||||||
.local_identifier => {
|
|
||||||
try self.previous_token.expect_any(&.{.newline, .symbol_period});
|
|
||||||
try self.step();
|
|
||||||
},
|
|
||||||
|
|
||||||
.global_identifier => {
|
|
||||||
try self.previous_token.expect(.newline);
|
|
||||||
try self.step();
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_period => switch (self.previous_token) {
|
|
||||||
.global_identifier => {
|
|
||||||
// Create local copy of identifier because step() will overwrite captures.
|
|
||||||
const identifier = self.previous_token.global_identifier;
|
|
||||||
|
|
||||||
try self.step();
|
|
||||||
try self.current_token.expect(.local_identifier);
|
|
||||||
try chunk.emit_opcode(.get_global);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
|
|
||||||
local_depth += 1;
|
|
||||||
},
|
|
||||||
|
|
||||||
.local_identifier => {
|
|
||||||
// Create local copy of identifier because step() will overwrite captures.
|
|
||||||
const identifier = self.previous_token.local_identifier;
|
|
||||||
|
|
||||||
try self.step();
|
|
||||||
try self.current_token.expect(.local_identifier);
|
|
||||||
|
|
||||||
if (local_depth == 0) {
|
|
||||||
try chunk.emit_byte(self.resolve_local(identifier) orelse return error.UndefinedLocal);
|
|
||||||
} else {
|
|
||||||
try chunk.emit_opcode(.get_index);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
}
|
|
||||||
|
|
||||||
local_depth += 1;
|
|
||||||
},
|
|
||||||
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_assign => {
|
|
||||||
try self.previous_token.expect(.local_identifier);
|
|
||||||
|
|
||||||
const identifier = self.previous_token.local_identifier;
|
|
||||||
|
|
||||||
if (local_depth == 0) {
|
|
||||||
if (self.resolve_local(identifier)) |local_slot| {
|
|
||||||
try chunk.emit_opcode(.set_local);
|
|
||||||
try chunk.emit_byte(local_slot);
|
|
||||||
} else {
|
|
||||||
try self.declare_local(identifier);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
try chunk.emit_opcode(.set_index);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
}
|
|
||||||
|
|
||||||
try self.step();
|
|
||||||
try self.parse_expression(chunk);
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_arrow => {
|
|
||||||
try self.parse_closure(chunk);
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
.symbol_paren_left => {
|
|
||||||
switch (self.previous_token) {
|
|
||||||
.local_identifier => |identifier| {
|
|
||||||
if (local_depth == 0) {
|
|
||||||
try chunk.emit_byte(self.resolve_local(identifier) orelse {
|
|
||||||
return error.UndefinedLocal;
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
try chunk.emit_opcode(.get_index);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
}
|
|
||||||
},
|
|
||||||
|
|
||||||
.global_identifier => |identifier| {
|
|
||||||
try chunk.emit_opcode(.get_global);
|
|
||||||
try chunk.emit_string(identifier);
|
|
||||||
},
|
|
||||||
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
|
|
||||||
var argument_count = @as(types.Integer, 0);
|
|
||||||
|
|
||||||
while (true) {
|
|
||||||
try self.step();
|
|
||||||
|
|
||||||
try switch (self.current_token) {
|
|
||||||
.symbol_paren_right => break,
|
|
||||||
else => self.parse_expression(chunk),
|
|
||||||
};
|
|
||||||
|
|
||||||
argument_count += 1;
|
|
||||||
|
|
||||||
switch (self.current_token) {
|
|
||||||
.symbol_paren_right => break,
|
|
||||||
.symbol_comma => continue,
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
try chunk.emit_opcode(.call);
|
|
||||||
try chunk.emit_integer(argument_count);
|
|
||||||
try chunk.emit_opcode(.pop);
|
|
||||||
|
|
||||||
self.step() catch |step_error| switch (step_error) {
|
|
||||||
error.UnexpectedEnd => return true,
|
|
||||||
};
|
|
||||||
|
|
||||||
local_depth = 0;
|
|
||||||
},
|
|
||||||
|
|
||||||
else => return error.UnexpectedToken,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn resolve_local(self: *Parser, name: []const u8) ?u8 {
|
|
||||||
var count = @as(u8, self.locals.buffer.len);
|
|
||||||
|
|
||||||
while (count != 0) {
|
|
||||||
const index = count - 1;
|
|
||||||
|
|
||||||
if (coral.io.equals(name, self.locals.buffer[index].name)) return index;
|
|
||||||
|
|
||||||
count = index;
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
fn step(self: *Parser) StepError!void {
|
|
||||||
self.previous_token = self.current_token;
|
|
||||||
self.current_token = self.tokenizer.next() orelse return error.UnexpectedEnd;
|
|
||||||
|
|
||||||
@import("std").debug.print("{s}\n", .{self.current_token.text()});
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
fn SmallStack(comptime Element: type, comptime default: Element) type {
|
|
||||||
const maximum = 255;
|
|
||||||
|
|
||||||
return struct {
|
|
||||||
buffer: [maximum]Element = [_]Element{default} ** maximum,
|
|
||||||
count: u8 = 0,
|
|
||||||
|
|
||||||
const Self = @This();
|
|
||||||
|
|
||||||
fn peek(self: Self) ?Element {
|
|
||||||
if (self.count == 0) return null;
|
|
||||||
|
|
||||||
return self.buffer[self.count - 1];
|
|
||||||
}
|
|
||||||
|
|
||||||
fn pop(self: *Self) ?Element {
|
|
||||||
if (self.count == 0) return null;
|
|
||||||
|
|
||||||
self.count -= 1;
|
|
||||||
|
|
||||||
return self.buffer[self.count];
|
|
||||||
}
|
|
||||||
|
|
||||||
fn push(self: *Self, element: Element) !void {
|
|
||||||
if (self.count == maximum) return error.OutOfMemory;
|
|
||||||
|
|
||||||
self.buffer[self.count] = element;
|
|
||||||
self.count += 1;
|
|
||||||
}
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
const SymbolTable = coral.table.Hashed(coral.table.string_key, usize);
|
|
|
@ -4,12 +4,12 @@ pub const Token = union(enum) {
|
||||||
unknown: u8,
|
unknown: u8,
|
||||||
newline,
|
newline,
|
||||||
|
|
||||||
global_identifier: []const u8,
|
global: []const u8,
|
||||||
local_identifier: []const u8,
|
local: []const u8,
|
||||||
|
|
||||||
symbol_assign,
|
symbol_assign,
|
||||||
symbol_plus,
|
symbol_plus,
|
||||||
symbol_dash,
|
symbol_minus,
|
||||||
symbol_asterisk,
|
symbol_asterisk,
|
||||||
symbol_forward_slash,
|
symbol_forward_slash,
|
||||||
symbol_paren_left,
|
symbol_paren_left,
|
||||||
|
@ -24,9 +24,9 @@ pub const Token = union(enum) {
|
||||||
symbol_period,
|
symbol_period,
|
||||||
symbol_arrow,
|
symbol_arrow,
|
||||||
|
|
||||||
integer_literal: []const u8,
|
integer: []const u8,
|
||||||
real_literal: []const u8,
|
real: []const u8,
|
||||||
string_literal: []const u8,
|
string: []const u8,
|
||||||
|
|
||||||
keyword_nil,
|
keyword_nil,
|
||||||
keyword_false,
|
keyword_false,
|
||||||
|
@ -34,49 +34,33 @@ pub const Token = union(enum) {
|
||||||
keyword_return,
|
keyword_return,
|
||||||
keyword_self,
|
keyword_self,
|
||||||
|
|
||||||
pub const ExpectError = error {
|
|
||||||
UnexpectedToken,
|
|
||||||
};
|
|
||||||
|
|
||||||
pub fn expect(self: Token, tag: coral.io.Tag(Token)) ExpectError!void {
|
|
||||||
if (self != tag) return error.UnexpectedToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn expect_any(self: Token, tags: []const coral.io.Tag(Token)) ExpectError!void {
|
|
||||||
for (tags) |tag| {
|
|
||||||
if (self == tag) return;
|
|
||||||
}
|
|
||||||
|
|
||||||
return error.UnexpectedToken;
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn text(self: Token) []const u8 {
|
pub fn text(self: Token) []const u8 {
|
||||||
return switch (self) {
|
return switch (self) {
|
||||||
.unknown => |unknown| @ptrCast([*]const u8, &unknown)[0 .. 1],
|
.unknown => |unknown| @ptrCast([*]const u8, &unknown)[0 .. 1],
|
||||||
.newline => "newline",
|
.newline => "newline",
|
||||||
.global_identifier => |identifier| identifier,
|
.identifier_global => |identifier| identifier,
|
||||||
.local_identifier => |identifier| identifier,
|
.identifier_local => |identifier| identifier,
|
||||||
|
|
||||||
.symbol_assign => "=",
|
.assign => "=",
|
||||||
.symbol_plus => "+",
|
.plus => "+",
|
||||||
.symbol_dash => "-",
|
.minus => "-",
|
||||||
.symbol_asterisk => "*",
|
.asterisk => "*",
|
||||||
.symbol_forward_slash => "/",
|
.forward_slash => "/",
|
||||||
.symbol_paren_left => "(",
|
.paren_left => "(",
|
||||||
.symbol_paren_right => ")",
|
.paren_right => ")",
|
||||||
.symbol_bang => "!",
|
.bang => "!",
|
||||||
.symbol_comma => ",",
|
.comma => ",",
|
||||||
.symbol_at => "@",
|
.at => "@",
|
||||||
.symbol_brace_left => "{",
|
.brace_left => "{",
|
||||||
.symbol_brace_right => "}",
|
.brace_right => "}",
|
||||||
.symbol_bracket_left => "[",
|
.bracket_left => "[",
|
||||||
.symbol_bracket_right => "]",
|
.bracket_right => "]",
|
||||||
.symbol_period => ".",
|
.period => ".",
|
||||||
.symbol_arrow => "=>",
|
.arrow => "=>",
|
||||||
|
|
||||||
.integer_literal => |literal| literal,
|
.integer => |literal| literal,
|
||||||
.real_literal => |literal| literal,
|
.real => |literal| literal,
|
||||||
.string_literal => |literal| literal,
|
.string => |literal| literal,
|
||||||
|
|
||||||
.keyword_nil => "nil",
|
.keyword_nil => "nil",
|
||||||
.keyword_false => "false",
|
.keyword_false => "false",
|
||||||
|
@ -89,209 +73,267 @@ pub const Token = union(enum) {
|
||||||
|
|
||||||
pub const Tokenizer = struct {
|
pub const Tokenizer = struct {
|
||||||
source: []const u8,
|
source: []const u8,
|
||||||
cursor: usize = 0,
|
lines_stepped: usize = 1,
|
||||||
|
previous_token: Token = .newline,
|
||||||
|
current_token: Token = .newline,
|
||||||
|
|
||||||
pub fn has_next(self: Tokenizer) bool {
|
pub fn has_next(self: Tokenizer) bool {
|
||||||
return self.cursor < self.source.len;
|
return self.source.len != 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn next(self: *Tokenizer) ?Token {
|
pub fn step(self: *Tokenizer) bool {
|
||||||
while (self.has_next()) switch (self.source[self.cursor]) {
|
self.previous_token = self.current_token;
|
||||||
' ', '\t' => self.cursor += 1,
|
|
||||||
|
var cursor = @as(usize, 0);
|
||||||
|
|
||||||
|
defer self.source = self.source[cursor ..];
|
||||||
|
|
||||||
|
while (self.has_next()) switch (self.source[cursor]) {
|
||||||
|
'#' => {
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
|
while (self.has_next() and (self.source[cursor] == '\n')) {
|
||||||
|
cursor += 1;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
|
||||||
|
' ', '\t' => cursor += 1,
|
||||||
|
|
||||||
'\n' => {
|
'\n' => {
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
self.lines_stepped += 1;
|
||||||
|
self.current_token = .newline;
|
||||||
|
|
||||||
return .newline;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'0' ... '9' => {
|
'0' ... '9' => {
|
||||||
const begin = self.cursor;
|
const begin = cursor;
|
||||||
|
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
while (self.has_next()) switch (self.source[self.cursor]) {
|
while (self.has_next()) switch (self.source[cursor]) {
|
||||||
'0' ... '9' => self.cursor += 1,
|
'0' ... '9' => cursor += 1,
|
||||||
|
|
||||||
'.' => {
|
'.' => {
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
while (self.has_next()) switch (self.source[self.cursor]) {
|
while (self.has_next()) switch (self.source[cursor]) {
|
||||||
'0' ... '9' => self.cursor += 1,
|
'0' ... '9' => cursor += 1,
|
||||||
else => break,
|
else => break,
|
||||||
};
|
};
|
||||||
|
|
||||||
return Token{.real_literal = self.source[begin .. self.cursor]};
|
self.current_token = .{.real = self.source[begin .. cursor]};
|
||||||
|
|
||||||
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
else => break,
|
else => break,
|
||||||
};
|
};
|
||||||
|
|
||||||
return Token{.integer_literal = self.source[begin .. self.cursor]};
|
self.current_token = .{.integer = self.source[begin .. cursor]};
|
||||||
|
|
||||||
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'A' ... 'Z', 'a' ... 'z', '_' => {
|
'A' ... 'Z', 'a' ... 'z', '_' => {
|
||||||
const begin = self.cursor;
|
const begin = cursor;
|
||||||
|
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
while (self.cursor < self.source.len) switch (self.source[self.cursor]) {
|
while (cursor < self.source.len) switch (self.source[cursor]) {
|
||||||
'0'...'9', 'A'...'Z', 'a'...'z', '_' => self.cursor += 1,
|
'0'...'9', 'A'...'Z', 'a'...'z', '_' => cursor += 1,
|
||||||
else => break,
|
else => break,
|
||||||
};
|
};
|
||||||
|
|
||||||
const identifier = self.source[begin..self.cursor];
|
const identifier = self.source[begin .. cursor];
|
||||||
|
|
||||||
coral.debug.assert(identifier.len != 0);
|
coral.debug.assert(identifier.len != 0);
|
||||||
|
|
||||||
switch (identifier[0]) {
|
switch (identifier[0]) {
|
||||||
'n' => if (coral.io.ends_with(identifier, "il")) return .keyword_nil,
|
'n' => if (coral.io.ends_with(identifier, "il")) {
|
||||||
'f' => if (coral.io.ends_with(identifier, "alse")) return .keyword_false,
|
self.current_token = .keyword_nil;
|
||||||
't' => if (coral.io.ends_with(identifier, "rue")) return .keyword_true,
|
},
|
||||||
'r' => if (coral.io.ends_with(identifier, "eturn")) return .keyword_return,
|
|
||||||
's' => if (coral.io.ends_with(identifier, "elf")) return .keyword_self,
|
'f' => if (coral.io.ends_with(identifier, "alse")) {
|
||||||
else => {},
|
self.current_token = .keyword_false;
|
||||||
|
},
|
||||||
|
|
||||||
|
't' => if (coral.io.ends_with(identifier, "rue")) {
|
||||||
|
self.current_token = .keyword_true;
|
||||||
|
},
|
||||||
|
|
||||||
|
'r' => if (coral.io.ends_with(identifier, "eturn")) {
|
||||||
|
self.current_token = .keyword_return;
|
||||||
|
},
|
||||||
|
|
||||||
|
's' => if (coral.io.ends_with(identifier, "elf")) {
|
||||||
|
self.current_token = .keyword_self;
|
||||||
|
},
|
||||||
|
|
||||||
|
else => self.current_token = .{.local = identifier},
|
||||||
}
|
}
|
||||||
|
|
||||||
return Token{.local_identifier = identifier};
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'@' => {
|
'@' => {
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
if (self.has_next()) switch (self.source[self.cursor]) {
|
if (self.has_next()) switch (self.source[cursor]) {
|
||||||
'A'...'Z', 'a'...'z', '_' => {
|
'A'...'Z', 'a'...'z', '_' => {
|
||||||
const begin = self.cursor;
|
const begin = cursor;
|
||||||
|
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
while (self.has_next()) switch (self.source[self.cursor]) {
|
while (self.has_next()) switch (self.source[cursor]) {
|
||||||
'0'...'9', 'A'...'Z', 'a'...'z', '_' => self.cursor += 1,
|
'0'...'9', 'A'...'Z', 'a'...'z', '_' => cursor += 1,
|
||||||
else => break,
|
else => break,
|
||||||
};
|
};
|
||||||
|
|
||||||
return Token{.global_identifier = self.source[begin..self.cursor]};
|
self.current_token = .{.global = self.source[begin .. cursor]};
|
||||||
|
|
||||||
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'"' => {
|
'"' => {
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
const begin = self.cursor;
|
const begin = cursor;
|
||||||
|
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
while (self.has_next()) switch (self.source[self.cursor]) {
|
while (self.has_next()) switch (self.source[cursor]) {
|
||||||
'"' => break,
|
'"' => break,
|
||||||
else => self.cursor += 1,
|
else => cursor += 1,
|
||||||
};
|
};
|
||||||
|
|
||||||
defer self.cursor += 1;
|
self.current_token = .{.global = self.source[begin .. cursor]};
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return Token{.global_identifier = self.source[begin..self.cursor]};
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
else => {},
|
else => {},
|
||||||
};
|
};
|
||||||
|
|
||||||
return .symbol_at;
|
self.current_token = .symbol_at;
|
||||||
|
|
||||||
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'"' => {
|
'"' => {
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
const begin = self.cursor;
|
const begin = cursor;
|
||||||
|
|
||||||
self.cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
while (self.has_next()) switch (self.source[self.cursor]) {
|
while (self.has_next()) switch (self.source[cursor]) {
|
||||||
'"' => break,
|
'"' => break,
|
||||||
else => self.cursor += 1,
|
else => cursor += 1,
|
||||||
};
|
};
|
||||||
|
|
||||||
defer self.cursor += 1;
|
self.current_token = .{.string = self.source[begin .. cursor]};
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return Token{.string_literal = self.source[begin..self.cursor]};
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'{' => {
|
'{' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_brace_left;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_brace_left;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'}' => {
|
'}' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_brace_right;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_brace_right;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
',' => {
|
',' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_comma;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_comma;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'!' => {
|
'!' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_bang;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_bang;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
')' => {
|
')' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_paren_right;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_paren_right;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'(' => {
|
'(' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_paren_left;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_paren_left;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'/' => {
|
'/' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_forward_slash;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_forward_slash;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'*' => {
|
'*' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_asterisk;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_asterisk;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'-' => {
|
'-' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_minus;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_dash;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'+' => {
|
'+' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_plus;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_plus;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'=' => {
|
'=' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_assign;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_assign;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
'.' => {
|
'.' => {
|
||||||
self.cursor += 1;
|
self.current_token = .symbol_period;
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return .symbol_period;
|
return true;
|
||||||
},
|
},
|
||||||
|
|
||||||
else => {
|
else => {
|
||||||
defer self.cursor += 1;
|
self.current_token = .{.unknown = self.source[cursor]};
|
||||||
|
cursor += 1;
|
||||||
|
|
||||||
return Token{.unknown = self.source[self.cursor]};
|
return true;
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
return null;
|
self.current_token = .newline;
|
||||||
|
|
||||||
|
return false;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -1,5 +1,9 @@
|
||||||
const coral = @import("coral");
|
const coral = @import("coral");
|
||||||
|
|
||||||
|
pub const CheckError = error {
|
||||||
|
CheckFailed
|
||||||
|
};
|
||||||
|
|
||||||
pub const CompileError = coral.io.AllocationError || RuntimeError || error {
|
pub const CompileError = coral.io.AllocationError || RuntimeError || error {
|
||||||
UnexpectedEnd,
|
UnexpectedEnd,
|
||||||
UnexpectedToken,
|
UnexpectedToken,
|
||||||
|
@ -30,11 +34,42 @@ pub const Ref = union (Primitive) {
|
||||||
object: Object,
|
object: Object,
|
||||||
};
|
};
|
||||||
|
|
||||||
pub const RuntimeError = coral.io.AllocationError || error {
|
pub const RuntimeError = coral.io.AllocationError || CheckError || error {
|
||||||
IntOverflow,
|
BadSyntax,
|
||||||
CheckFailed,
|
|
||||||
};
|
};
|
||||||
|
|
||||||
|
pub fn SmallStack(comptime Element: type, comptime default: Element) type {
|
||||||
|
const maximum = 255;
|
||||||
|
|
||||||
|
return struct {
|
||||||
|
buffer: [maximum]Element = [_]Element{default} ** maximum,
|
||||||
|
count: u8 = 0,
|
||||||
|
|
||||||
|
const Self = @This();
|
||||||
|
|
||||||
|
fn peek(self: Self) ?Element {
|
||||||
|
if (self.count == 0) return null;
|
||||||
|
|
||||||
|
return self.buffer[self.count - 1];
|
||||||
|
}
|
||||||
|
|
||||||
|
fn pop(self: *Self) ?Element {
|
||||||
|
if (self.count == 0) return null;
|
||||||
|
|
||||||
|
self.count -= 1;
|
||||||
|
|
||||||
|
return self.buffer[self.count];
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push(self: *Self, element: Element) !void {
|
||||||
|
if (self.count == maximum) return error.OutOfMemory;
|
||||||
|
|
||||||
|
self.buffer[self.count] = element;
|
||||||
|
self.count += 1;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
pub const Val = union (Primitive) {
|
pub const Val = union (Primitive) {
|
||||||
nil,
|
nil,
|
||||||
false,
|
false,
|
||||||
|
|
|
@ -18,7 +18,7 @@ const AppManifest = struct {
|
||||||
height: u16 = 480,
|
height: u16 = 480,
|
||||||
|
|
||||||
pub fn load_script(self: *AppManifest, env: *kym.Environment, fs: file.System, file_path: []const u8) !void {
|
pub fn load_script(self: *AppManifest, env: *kym.Environment, fs: file.System, file_path: []const u8) !void {
|
||||||
const manifest = try env.execute_file(heap.allocator, fs, file.Path.from(&.{file_path}));
|
const manifest = try env.execute_file(fs, file.Path.from(&.{file_path}));
|
||||||
|
|
||||||
defer env.discard(manifest);
|
defer env.discard(manifest);
|
||||||
|
|
||||||
|
@ -35,12 +35,14 @@ const AppManifest = struct {
|
||||||
coral.io.copy(&self.title, title_string);
|
coral.io.copy(&self.title, title_string);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const u16_int = @typeInfo(u16).Int;
|
||||||
|
|
||||||
{
|
{
|
||||||
const width = try env.get_field(manifest_ref, try env.intern("width"));
|
const width = try env.get_field(manifest_ref, try env.intern("width"));
|
||||||
|
|
||||||
errdefer env.discard(width);
|
errdefer env.discard(width);
|
||||||
|
|
||||||
self.width = try coral.math.checked_cast(u16, try env.to_integer(width.as_ref()));
|
self.width = try coral.math.checked_cast(u16_int, try env.to_integer(width.as_ref()));
|
||||||
}
|
}
|
||||||
|
|
||||||
{
|
{
|
||||||
|
@ -48,7 +50,7 @@ const AppManifest = struct {
|
||||||
|
|
||||||
errdefer env.discard(height);
|
errdefer env.discard(height);
|
||||||
|
|
||||||
self.width = try coral.math.checked_cast(u16, try env.to_integer(height.as_ref()));
|
self.width = try coral.math.checked_cast(u16_int, try env.to_integer(height.as_ref()));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
Loading…
Reference in New Issue