Compare commits
2 Commits
55fb1ce062
...
96e10f0668
Author | SHA1 | Date |
---|---|---|
kayomn | 96e10f0668 | |
kayomn | 8fe734f9b7 |
|
@ -4,7 +4,7 @@ pub const dag = @import("./dag.zig");
|
|||
|
||||
pub const debug = @import("./debug.zig");
|
||||
|
||||
pub const hash = @import("./hash.zig");
|
||||
pub const hashes = @import("./hashes.zig");
|
||||
|
||||
pub const heap = @import("./heap.zig");
|
||||
|
||||
|
|
|
@ -14,6 +14,15 @@ pub const Error = error {
|
|||
UnavailableResource,
|
||||
};
|
||||
|
||||
pub fn FixedBuffer(comptime len: usize, comptime default_value: anytype) type {
|
||||
const Value = @TypeOf(default_value);
|
||||
|
||||
return struct {
|
||||
filled: usize = 0,
|
||||
values: [len]Value = [_]Value{default_value} ** len,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn Functor(comptime Output: type, comptime input_types: []const type) type {
|
||||
const InputTuple = std.meta.Tuple(input_types);
|
||||
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
const coral = @import("./coral.zig");
|
||||
|
||||
const hash = @import("./hash.zig");
|
||||
const hashes = @import("./hashes.zig");
|
||||
|
||||
const io = @import("./io.zig");
|
||||
|
||||
|
@ -257,11 +257,9 @@ pub fn enum_traits(comptime Enum: type) Traits(Enum) {
|
|||
}
|
||||
|
||||
pub const string_traits = init: {
|
||||
const djb2 = hash.djb2;
|
||||
|
||||
const strings = struct {
|
||||
fn hash(value: []const u8) usize {
|
||||
return djb2(@typeInfo(usize).Int, value);
|
||||
return hashes.djb2(@typeInfo(usize).Int, value);
|
||||
}
|
||||
};
|
||||
|
||||
|
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,998 @@
|
|||
const coral = @import("coral");
|
||||
|
||||
const file = @import("../file.zig");
|
||||
|
||||
const script = @import("../script.zig");
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const tokens = @import("./tokens.zig");
|
||||
|
||||
const tree = @import("./tree.zig");
|
||||
|
||||
name: *script.Object,
|
||||
arity: u8,
|
||||
opcodes: coral.Stack(Opcode),
|
||||
lines: coral.Stack(tokens.Line),
|
||||
cursor: usize,
|
||||
constants: coral.Stack(*script.Object),
|
||||
bindings: []?*script.Object,
|
||||
externals: *script.Object,
|
||||
|
||||
const Compiler = struct {
|
||||
chunk: *Self,
|
||||
env: *script.Runtime,
|
||||
|
||||
fn compile_argument(self: *const Compiler, environment: *const tree.Environment, initial_argument: ?*const tree.Expr) script.Error!u8 {
|
||||
// TODO: Exceeding 255 arguments will make the VM crash.
|
||||
var maybe_argument = initial_argument;
|
||||
var argument_count = @as(u8, 0);
|
||||
|
||||
while (maybe_argument) |argument| {
|
||||
try self.compile_expression(environment, argument, null);
|
||||
|
||||
maybe_argument = argument.next;
|
||||
argument_count += 1;
|
||||
}
|
||||
|
||||
return argument_count;
|
||||
}
|
||||
|
||||
fn compile_expression(self: *const Compiler, environment: *const tree.Environment, expression: *const tree.Expr, name: ?[]const u8) script.Error!void {
|
||||
const number_format = coral.utf8.DecimalFormat{
|
||||
.delimiter = "_",
|
||||
.positive_prefix = .none,
|
||||
};
|
||||
|
||||
switch (expression.kind) {
|
||||
.nil_literal => try self.chunk.write(expression.line, .push_nil),
|
||||
.true_literal => try self.chunk.write(expression.line, .push_true),
|
||||
.false_literal => try self.chunk.write(expression.line, .push_false),
|
||||
|
||||
.number_literal => |literal| {
|
||||
for (literal) |codepoint| {
|
||||
if (codepoint == '.') {
|
||||
return self.chunk.write(expression.line, .{
|
||||
.push_const = try self.declare_float(number_format.parse(literal, script.Float) orelse unreachable),
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
try self.chunk.write(expression.line, .{
|
||||
.push_const = try self.declare_fixed(number_format.parse(literal, script.Fixed) orelse unreachable),
|
||||
});
|
||||
},
|
||||
|
||||
.string_literal => |literal| {
|
||||
try self.chunk.write(expression.line, .{.push_const = try self.declare_string(literal)});
|
||||
},
|
||||
|
||||
.vector2 => |vector2| {
|
||||
try self.compile_expression(environment, vector2.x, null);
|
||||
try self.compile_expression(environment, vector2.y, null);
|
||||
try self.chunk.write(expression.line, .push_vector2);
|
||||
},
|
||||
|
||||
.vector3 => |vector3| {
|
||||
try self.compile_expression(environment, vector3.x, null);
|
||||
try self.compile_expression(environment, vector3.y, null);
|
||||
try self.compile_expression(environment, vector3.z, null);
|
||||
try self.chunk.write(expression.line, .push_vector3);
|
||||
},
|
||||
|
||||
.string_template => {
|
||||
var current_expression = expression.next orelse {
|
||||
return self.chunk.write(expression.line, .{.push_const = try self.declare_string("")});
|
||||
};
|
||||
|
||||
var component_count = @as(u8, 0);
|
||||
|
||||
while (true) {
|
||||
try self.compile_expression(environment, current_expression, null);
|
||||
|
||||
component_count += 1;
|
||||
|
||||
current_expression = current_expression.next orelse {
|
||||
return self.chunk.write(expression.line, .{.push_concat = component_count});
|
||||
};
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_literal => |literal| {
|
||||
try self.chunk.write(expression.line, .{.push_const = try self.declare_symbol(literal)});
|
||||
},
|
||||
|
||||
.table => |table| {
|
||||
var entries = table.nodes();
|
||||
var num_entries = @as(u16, 0);
|
||||
|
||||
while (entries.next()) |entry| {
|
||||
try self.compile_expression(environment, entry.key, null);
|
||||
try self.compile_expression(environment, entry.value, null);
|
||||
|
||||
num_entries = coral.scalars.add(num_entries, 1) orelse {
|
||||
return self.env.raise(error.OutOfMemory, "too many initializer values", .{});
|
||||
};
|
||||
}
|
||||
|
||||
try self.chunk.write(expression.line, .{.push_table = num_entries});
|
||||
},
|
||||
|
||||
.lambda_construct => |lambda_construct| {
|
||||
var chunk = try Self.init(self.env, name orelse "<lambda>", lambda_construct.environment, &.{});
|
||||
|
||||
errdefer chunk.deinit(self.env);
|
||||
|
||||
if (lambda_construct.environment.capture_count == 0) {
|
||||
try self.chunk.write(expression.line, .{.push_const = try self.declare_chunk(chunk)});
|
||||
} else {
|
||||
const lambda_captures = lambda_construct.environment.get_captures();
|
||||
var index = lambda_captures.len;
|
||||
|
||||
while (index != 0) {
|
||||
index -= 1;
|
||||
|
||||
try self.chunk.write(expression.line, switch (lambda_captures[index]) {
|
||||
.declaration_index => |declaration_index| .{.push_local = declaration_index},
|
||||
.capture_index => |capture_index| .{.push_binding = capture_index},
|
||||
});
|
||||
}
|
||||
|
||||
try self.chunk.write(expression.line, .{.push_const = try self.declare_chunk(chunk)});
|
||||
try self.chunk.write(expression.line, .{.bind = lambda_construct.environment.capture_count});
|
||||
}
|
||||
},
|
||||
|
||||
.binary_op => |binary_op| {
|
||||
try self.compile_expression(environment, binary_op.lhs_operand, null);
|
||||
try self.compile_expression(environment, binary_op.rhs_operand, null);
|
||||
|
||||
try self.chunk.write(expression.line, switch (binary_op.operation) {
|
||||
.addition => .add,
|
||||
.subtraction => .sub,
|
||||
.multiplication => .mul,
|
||||
.divsion => .div,
|
||||
.greater_equals_comparison => .cge,
|
||||
.greater_than_comparison => .cgt,
|
||||
.equals_comparison => .eql,
|
||||
.less_than_comparison => .clt,
|
||||
.less_equals_comparison => .cle,
|
||||
});
|
||||
},
|
||||
|
||||
.unary_op => |unary_op| {
|
||||
try self.compile_expression(environment, unary_op.operand, null);
|
||||
|
||||
try self.chunk.write(expression.line, switch (unary_op.operation) {
|
||||
.boolean_negation => .not,
|
||||
.numeric_negation => .neg,
|
||||
});
|
||||
},
|
||||
|
||||
.invoke => |invoke| {
|
||||
const argument_count = try self.compile_argument(environment, invoke.argument);
|
||||
|
||||
try self.compile_expression(environment, invoke.object, null);
|
||||
try self.chunk.write(expression.line, .{.call = argument_count});
|
||||
},
|
||||
|
||||
.group => |group| try self.compile_expression(environment, group, null),
|
||||
|
||||
.declaration_get => |declaration_get| {
|
||||
if (get_local_index(environment, declaration_get.declaration)) |index| {
|
||||
if (is_declaration_boxed(declaration_get.declaration)) {
|
||||
try self.chunk.write(expression.line, .{.push_local = index});
|
||||
try self.chunk.write(expression.line, .get_box);
|
||||
} else {
|
||||
try self.chunk.write(expression.line, .{.push_local = index});
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (try get_binding_index(environment, declaration_get.declaration)) |index| {
|
||||
try self.chunk.write(expression.line, .{.push_binding = index});
|
||||
|
||||
if (is_declaration_boxed(declaration_get.declaration)) {
|
||||
try self.chunk.write(expression.line, .get_box);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
return self.env.raise(error.IllegalState, "local out of scope", .{});
|
||||
},
|
||||
|
||||
.declaration_set => |declaration_set| {
|
||||
if (get_local_index(environment, declaration_set.declaration)) |index| {
|
||||
if (is_declaration_boxed(declaration_set.declaration)) {
|
||||
try self.chunk.write(expression.line, .{.push_local = index});
|
||||
try self.compile_expression(environment, declaration_set.assign, null);
|
||||
try self.chunk.write(expression.line, .set_box);
|
||||
} else {
|
||||
try self.compile_expression(environment, declaration_set.assign, null);
|
||||
try self.chunk.write(expression.line, .{.set_local = index});
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (try get_binding_index(environment, declaration_set.declaration)) |index| {
|
||||
try self.compile_expression(environment, declaration_set.assign, null);
|
||||
try self.chunk.write(expression.line, .{.push_binding = index});
|
||||
|
||||
if (is_declaration_boxed(declaration_set.declaration)) {
|
||||
try self.chunk.write(expression.line, .set_box);
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
return self.env.raise(error.IllegalState, "local out of scope", .{});
|
||||
},
|
||||
|
||||
.field_get => |field_get| {
|
||||
try self.chunk.write(expression.line, .{.push_const = try self.declare_symbol(field_get.identifier)});
|
||||
try self.compile_expression(environment, field_get.object, null);
|
||||
try self.chunk.write(expression.line, .get_dynamic);
|
||||
},
|
||||
|
||||
.field_set => |field_set| {
|
||||
try self.chunk.write(expression.line, .{.push_const = try self.declare_symbol(field_set.identifier)});
|
||||
try self.compile_expression(environment, field_set.assign, null);
|
||||
try self.compile_expression(environment, field_set.object, null);
|
||||
try self.chunk.write(expression.line, .set_dynamic);
|
||||
},
|
||||
|
||||
.subscript_get => |subscript_get| {
|
||||
try self.compile_expression(environment, subscript_get.index, null);
|
||||
try self.compile_expression(environment, subscript_get.object, null);
|
||||
try self.chunk.write(expression.line, .get_dynamic);
|
||||
},
|
||||
|
||||
.subscript_set => |subscript_set| {
|
||||
try self.compile_expression(environment, subscript_set.index, null);
|
||||
try self.compile_expression(environment, subscript_set.assign, null);
|
||||
try self.compile_expression(environment, subscript_set.object, null);
|
||||
try self.chunk.write(expression.line, .set_dynamic);
|
||||
},
|
||||
|
||||
.external_get => |external_get| {
|
||||
try self.chunk.write(expression.line, .{.push_const = try self.declare_symbol(external_get.name)});
|
||||
try self.chunk.write(expression.line, .get_external);
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn compile_environment(self: *const Compiler, environment: *const tree.Environment) script.Error!void {
|
||||
if (environment.statement) |statement| {
|
||||
const last_statement = try self.compile_statement(environment, statement);
|
||||
|
||||
if (last_statement.kind != .@"return") {
|
||||
try self.chunk.write(last_statement.line, .push_nil);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn compile_statement(self: *const Compiler, environment: *const tree.Environment, initial_statement: *const tree.Stmt) script.Error!*const tree.Stmt {
|
||||
var current_statement = initial_statement;
|
||||
|
||||
while (true) {
|
||||
switch (current_statement.kind) {
|
||||
.@"return" => |@"return"| {
|
||||
if (@"return".returned_expression) |expression| {
|
||||
try self.compile_expression(environment, expression, null);
|
||||
} else {
|
||||
try self.chunk.write(current_statement.line, .push_nil);
|
||||
}
|
||||
|
||||
// TODO: Omit ret calls at ends of chunk.
|
||||
try self.chunk.write(current_statement.line, .ret);
|
||||
},
|
||||
|
||||
.@"while" => |@"while"| {
|
||||
try self.compile_expression(environment, @"while".loop_expression, null);
|
||||
try self.chunk.write(current_statement.line, .{.jf = 0});
|
||||
|
||||
const origin_index = @as(u16, @intCast(self.chunk.opcodes.values.len - 1));
|
||||
|
||||
_ = try self.compile_statement(environment, @"while".loop);
|
||||
self.chunk.opcodes.values[origin_index].jf = @intCast(self.chunk.opcodes.values.len - 1);
|
||||
|
||||
try self.compile_expression(environment, @"while".loop_expression, null);
|
||||
try self.chunk.write(current_statement.line, .{.jt = origin_index});
|
||||
},
|
||||
|
||||
.@"if" => |@"if"| {
|
||||
try self.compile_expression(environment, @"if".then_expression, null);
|
||||
try self.chunk.write(current_statement.line, .{.jf = 0});
|
||||
|
||||
const origin_index = @as(u16, @intCast(self.chunk.opcodes.values.len - 1));
|
||||
|
||||
_ = try self.compile_statement(environment, @"if".@"then");
|
||||
self.chunk.opcodes.values[origin_index].jf = @intCast(self.chunk.opcodes.values.len - 1);
|
||||
|
||||
if (@"if".@"else") |@"else"| {
|
||||
_ = try self.compile_statement(environment, @"else");
|
||||
}
|
||||
},
|
||||
|
||||
.declare => |declare| {
|
||||
try self.compile_expression(environment, declare.initial_expression, declare.declaration.identifier);
|
||||
|
||||
if (is_declaration_boxed(declare.declaration)) {
|
||||
try self.chunk.write(current_statement.line, .push_boxed);
|
||||
}
|
||||
},
|
||||
|
||||
.top_expression => |top_expression| {
|
||||
try self.compile_expression(environment, top_expression, null);
|
||||
|
||||
if (top_expression.kind == .invoke) {
|
||||
try self.chunk.write(current_statement.line, .pop);
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
current_statement = current_statement.next orelse return current_statement;
|
||||
}
|
||||
}
|
||||
|
||||
const constants_max = @as(usize, std.math.maxInt(u16));
|
||||
|
||||
fn declare_chunk(self: *const Compiler, chunk: Self) script.Error!u16 {
|
||||
if (self.chunk.constants.values.len == std.math.maxInt(u16)) {
|
||||
return self.env.raise(error.BadSyntax, "chunks cannot contain more than {max} constants", .{
|
||||
.max = @as(usize, std.math.maxInt(u16)),
|
||||
});
|
||||
}
|
||||
|
||||
const constant = (try self.env.new_dynamic(chunk)).pop().?;
|
||||
|
||||
errdefer self.env.release(constant);
|
||||
|
||||
try self.chunk.constants.push(constant);
|
||||
|
||||
return @intCast(self.chunk.constants.values.len - 1);
|
||||
}
|
||||
|
||||
fn declare_fixed(self: *const Compiler, fixed: script.Fixed) script.Error!u16 {
|
||||
if (self.chunk.constants.values.len == constants_max) {
|
||||
return self.env.raise(error.BadSyntax, "chunks cannot contain more than {max} constants", .{
|
||||
.max = constants_max,
|
||||
});
|
||||
}
|
||||
|
||||
const constant = (try self.env.new_fixed(fixed)).pop().?;
|
||||
|
||||
errdefer self.env.release(constant);
|
||||
|
||||
try self.chunk.constants.push(constant);
|
||||
|
||||
return @intCast(self.chunk.constants.values.len - 1);
|
||||
}
|
||||
|
||||
fn declare_float(self: *const Compiler, float: script.Float) script.Error!u16 {
|
||||
if (self.chunk.constants.values.len == constants_max) {
|
||||
return self.env.raise(error.BadSyntax, "chunks cannot contain more than {max} constants", .{
|
||||
.max = constants_max,
|
||||
});
|
||||
}
|
||||
|
||||
const constant = (try self.env.new_float(float)).pop().?;
|
||||
|
||||
errdefer self.env.release(constant);
|
||||
|
||||
try self.chunk.constants.push(constant);
|
||||
|
||||
return @intCast(self.chunk.constants.values.len - 1);
|
||||
}
|
||||
|
||||
fn declare_string(self: *const Compiler, string: []const u8) script.Error!u16 {
|
||||
if (self.chunk.constants.values.len == constants_max) {
|
||||
return self.env.raise(error.BadSyntax, "chunks cannot contain more than {max} constants", .{
|
||||
.max = constants_max,
|
||||
});
|
||||
}
|
||||
|
||||
const constant = (try self.env.new_string(string)).pop().?;
|
||||
|
||||
errdefer self.env.release(constant);
|
||||
|
||||
try self.chunk.constants.push(constant);
|
||||
|
||||
return @intCast(self.chunk.constants.values.len - 1);
|
||||
}
|
||||
|
||||
fn declare_vector2(self: *const Compiler, vector: script.Vector2) script.Error!u16 {
|
||||
if (self.chunk.constants.values.len == constants_max) {
|
||||
return self.env.raise(error.BadSyntax, "chunks cannot contain more than {max} constants", .{
|
||||
.max = constants_max,
|
||||
});
|
||||
}
|
||||
|
||||
const constant = (try self.env.new_vector2(vector)).pop().?;
|
||||
|
||||
errdefer self.env.release(constant);
|
||||
|
||||
try self.chunk.constants.push(constant);
|
||||
|
||||
return @intCast(self.chunk.constants.values.len - 1);
|
||||
}
|
||||
|
||||
fn declare_vector3(self: *const Compiler, vector: script.Vector3) script.Error!u16 {
|
||||
if (self.chunk.constants.values.len == constants_max) {
|
||||
return self.env.raise(error.BadSyntax, "chunks cannot contain more than {max} constants", .{
|
||||
.max = constants_max,
|
||||
});
|
||||
}
|
||||
|
||||
const constant = (try self.env.new_vector3(vector)).pop().?;
|
||||
|
||||
errdefer self.env.release(constant);
|
||||
|
||||
try self.chunk.constants.push(constant);
|
||||
|
||||
return @intCast(self.chunk.constants.values.len - 1);
|
||||
}
|
||||
|
||||
fn declare_symbol(self: *const Compiler, symbol: []const u8) script.Error!u16 {
|
||||
if (self.chunk.constants.values.len == constants_max) {
|
||||
return self.env.raise(error.BadSyntax, "chunks cannot contain more than {max} constants", .{
|
||||
.max = constants_max,
|
||||
});
|
||||
}
|
||||
|
||||
const constant = (try self.env.new_symbol(symbol)).pop().?;
|
||||
|
||||
errdefer self.env.release(constant);
|
||||
|
||||
try self.chunk.constants.push(constant);
|
||||
|
||||
return @intCast(self.chunk.constants.values.len - 1);
|
||||
}
|
||||
|
||||
fn get_binding_index(environment: *const tree.Environment, declaration: *const tree.Declaration) script.Error!?u8 {
|
||||
var binding_index = @as(u8, 0);
|
||||
|
||||
while (binding_index < environment.capture_count) : (binding_index += 1) {
|
||||
var capture = &environment.captures[binding_index];
|
||||
var target_environment = environment.enclosing orelse return null;
|
||||
|
||||
while (capture.* == .capture_index) {
|
||||
capture = &target_environment.captures[capture.capture_index];
|
||||
target_environment = target_environment.enclosing orelse return null;
|
||||
}
|
||||
|
||||
std.debug.assert(capture.* == .declaration_index);
|
||||
|
||||
if (&target_environment.declarations[capture.declaration_index] == declaration) {
|
||||
return binding_index;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn get_local_index(environment: *const tree.Environment, declaration: *const tree.Declaration) ?u8 {
|
||||
var remaining = environment.declaration_count;
|
||||
|
||||
while (remaining != 0) {
|
||||
remaining -= 1;
|
||||
|
||||
if (&environment.declarations[remaining] == declaration) {
|
||||
return remaining;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn is_declaration_boxed(declaration: *const tree.Declaration) bool {
|
||||
return declaration.is.captured and !declaration.is.readonly;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Opcode = union (enum) {
|
||||
ret,
|
||||
pop,
|
||||
push_nil,
|
||||
push_true,
|
||||
push_false,
|
||||
push_const: u16,
|
||||
push_local: u8,
|
||||
push_top,
|
||||
push_vector2,
|
||||
push_vector3,
|
||||
push_table: u16,
|
||||
push_binding: u8,
|
||||
push_concat: u8,
|
||||
push_boxed,
|
||||
set_local: u8,
|
||||
get_dynamic,
|
||||
set_dynamic,
|
||||
get_external,
|
||||
get_box,
|
||||
set_box,
|
||||
call: u8,
|
||||
bind: u8,
|
||||
|
||||
not,
|
||||
neg,
|
||||
|
||||
add,
|
||||
sub,
|
||||
mul,
|
||||
div,
|
||||
|
||||
eql,
|
||||
cgt,
|
||||
clt,
|
||||
cge,
|
||||
cle,
|
||||
|
||||
jt: u16,
|
||||
jf: u16,
|
||||
};
|
||||
|
||||
pub const External = struct {[]const u8, *script.Object};
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub fn deinit(self: *Self, env: *script.Runtime) void {
|
||||
while (self.constants.pop()) |constant| {
|
||||
env.release(constant.*);
|
||||
}
|
||||
|
||||
self.constants.deinit();
|
||||
self.opcodes.deinit();
|
||||
self.lines.deinit();
|
||||
env.release(self.name);
|
||||
env.release(self.externals);
|
||||
|
||||
if (self.bindings.len != 0) {
|
||||
for (self.bindings) |binding| {
|
||||
if (binding) |value| {
|
||||
env.release(value);
|
||||
}
|
||||
}
|
||||
|
||||
env.allocator.free(self.bindings);
|
||||
}
|
||||
|
||||
self.bindings = &.{};
|
||||
}
|
||||
|
||||
pub fn dump(chunk: Self, env: *script.Runtime) script.Error!*script.Object {
|
||||
var opcode_cursor = @as(u32, 0);
|
||||
var buffer = coral.list.ByteStack.init(env.allocator);
|
||||
|
||||
defer buffer.deinit();
|
||||
|
||||
const writer = coral.list.stack_as_writer(&buffer);
|
||||
|
||||
_ = coral.utf8.print_string(writer, "\n");
|
||||
|
||||
while (opcode_cursor < chunk.opcodes.values.len) : (opcode_cursor += 1) {
|
||||
_ = coral.utf8.print_formatted(writer, "[{instruction}]: ", .{.instruction = opcode_cursor});
|
||||
|
||||
_ = switch (chunk.opcodes.values[opcode_cursor]) {
|
||||
.ret => coral.utf8.print_string(writer, "ret\n"),
|
||||
.pop => coral.utf8.print_string(writer, "pop\n"),
|
||||
.push_nil => coral.utf8.print_string(writer, "push nil\n"),
|
||||
.push_true => coral.utf8.print_string(writer, "push true\n"),
|
||||
.push_false => coral.utf8.print_string(writer, "push false\n"),
|
||||
|
||||
.push_const => |push_const| print: {
|
||||
const string_ref = (try (try env.push(try chunk.get_constant(env, push_const))).to_string()).pop().?;
|
||||
|
||||
defer env.release(string_ref);
|
||||
|
||||
const string = string_ref.is_string();
|
||||
|
||||
break: print coral.utf8.print_formatted(writer, "push const ({value})\n", .{.value = string.?});
|
||||
},
|
||||
|
||||
.push_local => |push_local| coral.utf8.print_formatted(writer, "push local ({local})\n", .{
|
||||
.local = push_local,
|
||||
}),
|
||||
|
||||
.push_top => coral.utf8.print_string(writer, "push top\n"),
|
||||
|
||||
.push_table => |push_table| coral.utf8.print_formatted(writer, "push table ({count})\n", .{
|
||||
.count = push_table,
|
||||
}),
|
||||
|
||||
.push_boxed => coral.utf8.print_string(writer, "push boxed\n"),
|
||||
|
||||
.push_binding => |push_binding| coral.utf8.print_formatted(writer, "push binding ({binding})\n", .{
|
||||
.binding = push_binding,
|
||||
}),
|
||||
|
||||
.push_concat => |push_concat| coral.utf8.print_formatted(writer, "push concat ({count})\n", .{
|
||||
.count = push_concat,
|
||||
}),
|
||||
|
||||
.push_builtin => |push_builtin| coral.utf8.print_formatted(writer, "push builtin ({builtin})\n", .{
|
||||
.builtin = switch (push_builtin) {
|
||||
.import => "import",
|
||||
.print => "print",
|
||||
.vec2 => "vec2",
|
||||
.vec3 => "vec3",
|
||||
},
|
||||
}),
|
||||
|
||||
.bind => |bind| coral.utf8.print_formatted(writer, "bind ({count})\n", .{
|
||||
.count = bind,
|
||||
}),
|
||||
|
||||
.set_local => |local_set| coral.utf8.print_formatted(writer, "set local ({local})\n", .{
|
||||
.local = local_set,
|
||||
}),
|
||||
|
||||
.get_box => coral.utf8.print_string(writer, "get box\n"),
|
||||
.set_box => coral.utf8.print_string(writer, "set box\n"),
|
||||
.get_dynamic => coral.utf8.print_string(writer, "get dynamic\n"),
|
||||
.set_dynamic => coral.utf8.print_string(writer, "set dynamic\n"),
|
||||
.call => |call| coral.utf8.print_formatted(writer, "call ({count})\n", .{.count = call}),
|
||||
.not => coral.utf8.print_string(writer, "not\n"),
|
||||
.neg => coral.utf8.print_string(writer, "neg\n"),
|
||||
.add => coral.utf8.print_string(writer, "add\n"),
|
||||
.sub => coral.utf8.print_string(writer, "sub\n"),
|
||||
.mul => coral.utf8.print_string(writer, "mul\n"),
|
||||
.div => coral.utf8.print_string(writer, "div\n"),
|
||||
.eql => coral.utf8.print_string(writer, "eql\n"),
|
||||
.cgt => coral.utf8.print_string(writer, "cgt\n"),
|
||||
.clt => coral.utf8.print_string(writer, "clt\n"),
|
||||
.cge => coral.utf8.print_string(writer, "cge\n"),
|
||||
.cle => coral.utf8.print_string(writer, "cle\n"),
|
||||
.jf => |jf| coral.utf8.print_formatted(writer, "jf ({instruction})\n", .{.instruction = jf}),
|
||||
.jt => |jt| coral.utf8.print_formatted(writer, "jt ({instruction})\n", .{.instruction = jt}),
|
||||
};
|
||||
}
|
||||
|
||||
return (try env.new_string(buffer.values)).pop().?;
|
||||
}
|
||||
|
||||
pub fn execute(self: *Self, env: *script.Runtime) script.Error!?*script.Object {
|
||||
self.cursor = 0;
|
||||
|
||||
while (self.cursor < self.opcodes.values.len) : (self.cursor += 1) {
|
||||
switch (self.opcodes.values[self.cursor]) {
|
||||
.ret => break,
|
||||
.pop => env.discard(),
|
||||
.push_nil => _ = try env.push(null),
|
||||
.push_true => _ = try env.new_boolean(true),
|
||||
.push_false => _ = try env.new_boolean(false),
|
||||
.push_const => |push_const| _ = try env.push(try self.get_constant(env, push_const)),
|
||||
.push_local => |push_local| _ = try env.local_get(push_local),
|
||||
.push_top => _ = try env.local_top(),
|
||||
|
||||
.push_vector2 => {
|
||||
const y = try env.expect_float(try env.expect_object(env.pop()));
|
||||
const x = try env.expect_float(try env.expect_object(env.pop()));
|
||||
|
||||
_ = try env.new_vector2(.{@floatCast(x), @floatCast(y)});
|
||||
},
|
||||
|
||||
.push_vector3 => {
|
||||
const z = try env.expect_float(try env.expect_object(env.pop()));
|
||||
const y = try env.expect_float(try env.expect_object(env.pop()));
|
||||
const x = try env.expect_float(try env.expect_object(env.pop()));
|
||||
|
||||
_ = try env.new_vector3(.{@floatCast(x), @floatCast(y), @floatCast(z)});
|
||||
},
|
||||
|
||||
.push_table => |push_table| {
|
||||
const table = (try env.new_table()).pop().?;
|
||||
|
||||
defer env.release(table);
|
||||
|
||||
var popped = @as(usize, 0);
|
||||
|
||||
while (popped < push_table) : (popped += 1) {
|
||||
if (env.pop()) |object| {
|
||||
defer env.release(object);
|
||||
|
||||
try env.index_set(table, object);
|
||||
} else {
|
||||
env.release(try env.expect_object(env.pop()));
|
||||
}
|
||||
}
|
||||
|
||||
_ = try env.push(table);
|
||||
},
|
||||
|
||||
.push_boxed => {
|
||||
const value = env.pop();
|
||||
|
||||
defer {
|
||||
if (value) |object| {
|
||||
env.release(object);
|
||||
}
|
||||
}
|
||||
|
||||
_ = try env.new_boxed(value);
|
||||
},
|
||||
|
||||
.push_binding => |push_binding| _ = try env.push(try self.get_binding(env, push_binding)),
|
||||
.push_concat => |push_concat| _ = try env.concat(push_concat),
|
||||
|
||||
.bind => |bind| {
|
||||
const callable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(callable);
|
||||
|
||||
const chunk = try env.expect_dynamic(callable, Self);
|
||||
|
||||
if (chunk.bindings.len != 0) {
|
||||
return env.raise(error.IllegalState, "cannot bind values to an already-bound chunk", .{});
|
||||
}
|
||||
|
||||
chunk.bindings = try env.allocator.alloc(?*script.Object, bind);
|
||||
|
||||
errdefer env.allocator.free(chunk.bindings);
|
||||
|
||||
for (0 .. bind) |index| {
|
||||
const value = env.pop();
|
||||
|
||||
errdefer {
|
||||
if (value) |object| {
|
||||
env.release(object);
|
||||
}
|
||||
}
|
||||
|
||||
chunk.bindings[index] = value;
|
||||
}
|
||||
|
||||
_ = try env.push(callable);
|
||||
},
|
||||
|
||||
.set_local => |local_set| _ = try env.local_set(local_set, env.pop()),
|
||||
.get_box => _ = try env.boxed_get(),
|
||||
|
||||
.set_box => {
|
||||
const value = env.pop();
|
||||
|
||||
defer {
|
||||
if (value) |object| {
|
||||
env.release(object);
|
||||
}
|
||||
}
|
||||
|
||||
try env.boxed_set(value);
|
||||
},
|
||||
|
||||
.get_dynamic => {
|
||||
const indexable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(indexable);
|
||||
|
||||
_ = try env.index_get(indexable);
|
||||
},
|
||||
|
||||
.set_dynamic => {
|
||||
const indexable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(indexable);
|
||||
|
||||
const value = env.pop();
|
||||
|
||||
defer {
|
||||
if (value) |object| {
|
||||
env.release(object);
|
||||
}
|
||||
}
|
||||
|
||||
try env.index_set(indexable, value);
|
||||
},
|
||||
|
||||
.get_external => _ = try env.index_get(self.externals),
|
||||
.call => |call| _ = try env.call(call),
|
||||
|
||||
.not => {
|
||||
const object = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(object);
|
||||
|
||||
_ = try env.new_boolean(object.is_false());
|
||||
},
|
||||
|
||||
.neg => _ = try env.neg(),
|
||||
|
||||
.add => {
|
||||
const addable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(addable);
|
||||
|
||||
_ = switch (try env.expect_numeric(addable)) {
|
||||
.fixed => |fixed| try env.fixed_add(fixed),
|
||||
.float => |float| try env.float_add(float),
|
||||
.vector2 => |vector2| try env.vector2_add(vector2),
|
||||
.vector3 => |vector3| try env.vector3_add(vector3),
|
||||
};
|
||||
},
|
||||
|
||||
.sub => {
|
||||
const subtractable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(subtractable);
|
||||
|
||||
_ = switch (try env.expect_numeric(subtractable)) {
|
||||
.fixed => |fixed| try env.fixed_subtract(fixed),
|
||||
.float => |float| try env.float_subtract(float),
|
||||
.vector2 => |vector2| try env.vector2_subtract(vector2),
|
||||
.vector3 => |vector3| try env.vector3_subtract(vector3),
|
||||
};
|
||||
},
|
||||
|
||||
.mul => {
|
||||
const multiplicable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(multiplicable);
|
||||
|
||||
_ = switch (try env.expect_numeric(multiplicable)) {
|
||||
.fixed => |fixed| try env.fixed_multiply(fixed),
|
||||
.float => |float| try env.float_multiply(float),
|
||||
.vector2 => |vector2| try env.vector2_multiply(vector2),
|
||||
.vector3 => |vector3| try env.vector3_multiply(vector3),
|
||||
};
|
||||
},
|
||||
|
||||
.div => {
|
||||
const divisible = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(divisible);
|
||||
|
||||
_ = switch (try env.expect_numeric(divisible)) {
|
||||
.fixed => |fixed| try env.fixed_divide(fixed),
|
||||
.float => |float| try env.float_divide(float),
|
||||
.vector2 => |vector2| try env.vector2_divide(vector2),
|
||||
.vector3 => |vector3| try env.vector3_divide(vector3),
|
||||
};
|
||||
},
|
||||
|
||||
.eql => {
|
||||
if (env.pop()) |equatable| {
|
||||
defer env.release(equatable);
|
||||
|
||||
_ = try env.equals_object(equatable);
|
||||
} else {
|
||||
_ = try env.equals_nil();
|
||||
}
|
||||
},
|
||||
|
||||
.cgt => {
|
||||
const comparable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(comparable);
|
||||
|
||||
_ = try env.compare_greater(comparable);
|
||||
},
|
||||
|
||||
.clt => {
|
||||
const comparable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(comparable);
|
||||
|
||||
_ = try env.compare_less(comparable);
|
||||
},
|
||||
|
||||
.cge => {
|
||||
const comparable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(comparable);
|
||||
|
||||
_ = try env.compare_greater_equals(comparable);
|
||||
},
|
||||
|
||||
.cle => {
|
||||
const comparable = try env.expect_object(env.pop());
|
||||
|
||||
defer env.release(comparable);
|
||||
|
||||
_ = try env.compare_less_equals(comparable);
|
||||
},
|
||||
|
||||
.jf => |jf| {
|
||||
if (env.pop()) |condition| {
|
||||
defer env.release(condition);
|
||||
|
||||
if (condition.is_false()) {
|
||||
self.cursor = jf;
|
||||
}
|
||||
} else {
|
||||
self.cursor = jf;
|
||||
}
|
||||
},
|
||||
|
||||
.jt => |jt| {
|
||||
if (env.pop()) |condition| {
|
||||
defer env.release(condition);
|
||||
|
||||
if (condition.is_true()) {
|
||||
self.cursor = jt;
|
||||
}
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return env.pop();
|
||||
}
|
||||
|
||||
fn get_binding(self: *Self, env: *script.Runtime, index: usize) script.Error!?*script.Object {
|
||||
if (index >= self.bindings.len) {
|
||||
return env.raise(error.IllegalState, "invalid binding", .{});
|
||||
}
|
||||
|
||||
return self.bindings[index];
|
||||
}
|
||||
|
||||
fn get_constant(self: *const Self, env: *script.Runtime, index: usize) script.Error!*script.Object {
|
||||
if (index >= self.constants.values.len) {
|
||||
return env.raise(error.IllegalState, "invalid constant", .{});
|
||||
}
|
||||
|
||||
return self.constants.values[index];
|
||||
}
|
||||
|
||||
pub fn init(env: *script.Runtime, name: []const u8, environment: *const tree.Environment, externals: []const External) script.Error!Self {
|
||||
const name_symbol = (try env.new_symbol(name)).pop().?;
|
||||
|
||||
errdefer env.release(name_symbol);
|
||||
|
||||
const externals_table = (try env.new_table()).pop().?;
|
||||
|
||||
errdefer env.release(externals_table);
|
||||
|
||||
for (0 .. externals.len) |i| {
|
||||
const external_name, const external_object = externals[i];
|
||||
|
||||
try (try env.new_symbol(external_name)).index_set(externals_table, external_object);
|
||||
}
|
||||
|
||||
var chunk = Self{
|
||||
.externals = externals_table,
|
||||
.name = name_symbol,
|
||||
.opcodes = .{.allocator = env.allocator},
|
||||
.constants = .{.allocator = env.allocator},
|
||||
.lines = .{.allocator = env.allocator},
|
||||
.bindings = &.{},
|
||||
.arity = environment.argument_count,
|
||||
.cursor = 0,
|
||||
};
|
||||
|
||||
var compiler = Compiler{
|
||||
.chunk = &chunk,
|
||||
.env = env,
|
||||
};
|
||||
|
||||
try compiler.compile_environment(environment);
|
||||
|
||||
return chunk;
|
||||
}
|
||||
|
||||
pub const typeinfo = script.Typeinfo{
|
||||
.name = "lambda",
|
||||
.destruct = typeinfo_destruct,
|
||||
.call = typeinfo_call,
|
||||
.to_string = typeinfo_to_string,
|
||||
};
|
||||
|
||||
fn typeinfo_call(context: script.Typeinfo.CallContext) script.Error!?*script.Object {
|
||||
return @as(*Self, @ptrCast(@alignCast(context.userdata))).execute(context.env);
|
||||
}
|
||||
|
||||
fn typeinfo_destruct(context: script.Typeinfo.DestructContext) void {
|
||||
@as(*Self, @ptrCast(@alignCast(context.userdata))).deinit(context.env);
|
||||
}
|
||||
|
||||
fn typeinfo_to_string(context: script.Typeinfo.ToStringContext) script.Error!*script.Object {
|
||||
return (try (try context.env.push(@as(*Self, @ptrCast(@alignCast(context.userdata))).name)).to_string()).pop().?;
|
||||
}
|
||||
|
||||
pub fn write(self: *Self, line: tokens.Line, opcode: Opcode) std.mem.Allocator.Error!void {
|
||||
try self.opcodes.push(opcode);
|
||||
try self.lines.push(line);
|
||||
}
|
|
@ -0,0 +1,135 @@
|
|||
const coral = @import("coral");
|
||||
|
||||
const script = @import("../script.zig");
|
||||
|
||||
associative: coral.map.Table(*script.Object, *script.Object, struct {
|
||||
pub const hash = script.Object.hash;
|
||||
|
||||
pub const equals = script.Object.equals;
|
||||
}),
|
||||
|
||||
contiguous: coral.Stack(?*script.Object),
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub fn deinit(self: *Self, env: *script.Runtime) void {
|
||||
{
|
||||
var entries = self.associative.entries();
|
||||
|
||||
while (entries.next()) |entry| {
|
||||
env.release(entry.key);
|
||||
env.release(entry.value);
|
||||
}
|
||||
}
|
||||
|
||||
self.associative.deinit();
|
||||
|
||||
while (self.contiguous.pop()) |value| {
|
||||
if (value.*) |ref| {
|
||||
env.release(ref);
|
||||
}
|
||||
}
|
||||
|
||||
self.contiguous.deinit();
|
||||
}
|
||||
|
||||
pub fn init(env: *script.Runtime) Self {
|
||||
return .{
|
||||
.associative = .{
|
||||
.allocator = env.allocator,
|
||||
.traits = .{},
|
||||
},
|
||||
|
||||
.contiguous = .{.allocator = env.allocator},
|
||||
};
|
||||
}
|
||||
|
||||
pub const typeinfo = script.Typeinfo{
|
||||
.name = "table",
|
||||
.destruct = typeinfo_destruct,
|
||||
.get = typeinfo_get,
|
||||
.set = typeinfo_set,
|
||||
.count = typeinfo_count,
|
||||
};
|
||||
|
||||
fn typeinfo_count(context: script.Typeinfo.CountContext) script.Error!script.Fixed {
|
||||
const table = @as(*Self, @ptrCast(@alignCast(context.userdata)));
|
||||
|
||||
return @intCast(table.associative.len + table.contiguous.values.len);
|
||||
}
|
||||
|
||||
fn typeinfo_destruct(context: script.Typeinfo.DestructContext) void {
|
||||
@as(*Self, @ptrCast(@alignCast(context.userdata))).deinit(context.env);
|
||||
}
|
||||
|
||||
fn typeinfo_get(context: script.Typeinfo.GetContext) script.Error!?*script.Object {
|
||||
const table = @as(*Self, @ptrCast(@alignCast(context.userdata)));
|
||||
const index = (try context.push_index()).pop().?;
|
||||
|
||||
defer context.env.release(index);
|
||||
|
||||
if (index.is_fixed()) |fixed| {
|
||||
if (fixed < 0) {
|
||||
// TODO: Negative indexing.
|
||||
unreachable;
|
||||
}
|
||||
|
||||
if (fixed < table.contiguous.values.len) {
|
||||
return table.contiguous.values[@intCast(fixed)];
|
||||
}
|
||||
}
|
||||
|
||||
if (table.associative.lookup(index)) |value| {
|
||||
return value;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
fn typeinfo_set(context: script.Typeinfo.SetContext) script.Error!void {
|
||||
const table = @as(*Self, @ptrCast(@alignCast(context.userdata)));
|
||||
const index = (try context.push_index()).pop().?;
|
||||
|
||||
errdefer context.env.release(index);
|
||||
|
||||
if (index.is_fixed()) |fixed| {
|
||||
if (fixed < 0) {
|
||||
// TODO: Negative indexing.
|
||||
unreachable;
|
||||
}
|
||||
|
||||
if (fixed < table.contiguous.values.len) {
|
||||
const maybe_replacing = &table.contiguous.values[@intCast(fixed)];
|
||||
|
||||
if (maybe_replacing.*) |replacing| {
|
||||
context.env.release(replacing);
|
||||
}
|
||||
|
||||
if ((try context.push_value()).pop()) |value| {
|
||||
errdefer context.env.release(value);
|
||||
|
||||
maybe_replacing.* = value;
|
||||
} else {
|
||||
maybe_replacing.* = null;
|
||||
}
|
||||
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
const value = (try context.push_value()).pop() orelse {
|
||||
if (table.associative.remove(index)) |removed| {
|
||||
context.env.release(removed.key);
|
||||
context.env.release(removed.value);
|
||||
}
|
||||
|
||||
return;
|
||||
};
|
||||
|
||||
errdefer context.env.release(value);
|
||||
|
||||
if (try table.associative.replace(index, value)) |replaced| {
|
||||
context.env.release(replaced.key);
|
||||
context.env.release(replaced.value);
|
||||
}
|
||||
}
|
|
@ -0,0 +1,535 @@
|
|||
const coral = @import("coral");
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
pub const Line = struct {
|
||||
number: u32,
|
||||
};
|
||||
|
||||
pub const Token = union(enum) {
|
||||
end,
|
||||
unknown: u8,
|
||||
newline,
|
||||
identifier: []const u8,
|
||||
builtin: []const u8,
|
||||
|
||||
symbol_plus,
|
||||
symbol_minus,
|
||||
symbol_asterisk,
|
||||
symbol_forward_slash,
|
||||
symbol_paren_left,
|
||||
symbol_paren_right,
|
||||
symbol_bang,
|
||||
symbol_comma,
|
||||
symbol_at,
|
||||
symbol_brace_left,
|
||||
symbol_brace_right,
|
||||
symbol_bracket_left,
|
||||
symbol_bracket_right,
|
||||
symbol_period,
|
||||
symbol_colon,
|
||||
symbol_less_than,
|
||||
symbol_less_equals,
|
||||
symbol_greater_than,
|
||||
symbol_greater_equals,
|
||||
symbol_equals,
|
||||
symbol_double_equals,
|
||||
|
||||
number: []const u8,
|
||||
string: []const u8,
|
||||
template_string: []const u8,
|
||||
|
||||
keyword_nil,
|
||||
keyword_false,
|
||||
keyword_true,
|
||||
keyword_return,
|
||||
keyword_self,
|
||||
keyword_const,
|
||||
keyword_if,
|
||||
keyword_do,
|
||||
keyword_end,
|
||||
keyword_while,
|
||||
keyword_else,
|
||||
keyword_elif,
|
||||
keyword_var,
|
||||
keyword_vec2,
|
||||
keyword_vec3,
|
||||
keyword_let,
|
||||
keyword_lambda,
|
||||
|
||||
pub fn text(self: Token) []const u8 {
|
||||
return switch (self) {
|
||||
.end => "end",
|
||||
.unknown => |unknown| @as([*]const u8, @ptrCast(&unknown))[0 .. 1],
|
||||
.newline => "newline",
|
||||
|
||||
.identifier => |identifier| identifier,
|
||||
.builtin => |identifier| identifier,
|
||||
|
||||
.symbol_plus => "+",
|
||||
.symbol_minus => "-",
|
||||
.symbol_asterisk => "*",
|
||||
.symbol_forward_slash => "/",
|
||||
.symbol_paren_left => "(",
|
||||
.symbol_paren_right => ")",
|
||||
.symbol_bang => "!",
|
||||
.symbol_comma => ",",
|
||||
.symbol_at => "@",
|
||||
.symbol_brace_left => "{",
|
||||
.symbol_brace_right => "}",
|
||||
.symbol_bracket_left => "[",
|
||||
.symbol_bracket_right => "]",
|
||||
.symbol_period => ".",
|
||||
.symbol_colon => ":",
|
||||
.symbol_less_than => "<",
|
||||
.symbol_less_equals => "<=",
|
||||
.symbol_greater_than => ">",
|
||||
.symbol_greater_equals => ">=",
|
||||
.symbol_equals => "=",
|
||||
.symbol_double_equals => "==",
|
||||
|
||||
.number => |literal| literal,
|
||||
.string => |literal| literal,
|
||||
.template_string => |literal| literal,
|
||||
|
||||
.keyword_const => "const",
|
||||
.keyword_nil => "nil",
|
||||
.keyword_false => "false",
|
||||
.keyword_true => "true",
|
||||
.keyword_return => "return",
|
||||
.keyword_self => "self",
|
||||
.keyword_if => "if",
|
||||
.keyword_do => "do",
|
||||
.keyword_end => "end",
|
||||
.keyword_while => "while",
|
||||
.keyword_elif => "elif",
|
||||
.keyword_else => "else",
|
||||
.keyword_var => "var",
|
||||
.keyword_vec2 => "vec2",
|
||||
.keyword_vec3 => "vec3",
|
||||
.keyword_let => "let",
|
||||
.keyword_lambda => "lambda",
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const Stream = struct {
|
||||
source: []const u8,
|
||||
line: Line = .{.number = 1},
|
||||
token: Token = .newline,
|
||||
|
||||
pub fn skip_newlines(self: *Stream) void {
|
||||
self.step();
|
||||
|
||||
while (self.token == .newline) {
|
||||
self.step();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn step(self: *Stream) void {
|
||||
var cursor = @as(usize, 0);
|
||||
|
||||
defer self.source = self.source[cursor ..];
|
||||
|
||||
while (cursor < self.source.len) {
|
||||
switch (self.source[cursor]) {
|
||||
'#' => {
|
||||
cursor += 1;
|
||||
|
||||
while (cursor < self.source.len and self.source[cursor] != '\n') {
|
||||
cursor += 1;
|
||||
}
|
||||
},
|
||||
|
||||
' ', '\t' => cursor += 1,
|
||||
|
||||
'\n' => {
|
||||
cursor += 1;
|
||||
self.token = .newline;
|
||||
self.line.number += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'0' ... '9' => {
|
||||
const begin = cursor;
|
||||
|
||||
cursor += 1;
|
||||
|
||||
while (cursor < self.source.len) switch (self.source[cursor]) {
|
||||
'0' ... '9' => cursor += 1,
|
||||
|
||||
'.' => {
|
||||
cursor += 1;
|
||||
|
||||
while (cursor < self.source.len) switch (self.source[cursor]) {
|
||||
'0' ... '9' => cursor += 1,
|
||||
else => break,
|
||||
};
|
||||
|
||||
self.token = .{.number = self.source[begin .. cursor]};
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
else => break,
|
||||
};
|
||||
|
||||
self.token = .{.number = self.source[begin .. cursor]};
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'A' ... 'Z', 'a' ... 'z', '_' => {
|
||||
const begin = cursor;
|
||||
|
||||
cursor += 1;
|
||||
|
||||
while (cursor < self.source.len) switch (self.source[cursor]) {
|
||||
'0'...'9', 'A'...'Z', 'a'...'z', '_' => cursor += 1,
|
||||
else => break,
|
||||
};
|
||||
|
||||
const identifier = self.source[begin .. cursor];
|
||||
|
||||
std.debug.assert(identifier.len != 0);
|
||||
|
||||
switch (identifier[0]) {
|
||||
'c' => {
|
||||
if (coral.are_equal(identifier[1 ..], "onst")) {
|
||||
self.token = .keyword_const;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
'd' => {
|
||||
if (coral.are_equal(identifier[1 ..], "o")) {
|
||||
self.token = .keyword_do;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
'e' => {
|
||||
if (coral.are_equal(identifier[1 ..], "lse")) {
|
||||
self.token = .keyword_else;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (coral.are_equal(identifier[1 ..], "lif")) {
|
||||
self.token = .keyword_elif;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (coral.are_equal(identifier[1 ..], "nd")) {
|
||||
self.token = .keyword_end;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
'f' => {
|
||||
if (coral.are_equal(identifier[1 ..], "alse")) {
|
||||
self.token = .keyword_false;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
'i' => {
|
||||
if (coral.are_equal(identifier[1 ..], "f")) {
|
||||
self.token = .keyword_if;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
'l' => {
|
||||
if (coral.are_equal(identifier[1 ..], "ambda")) {
|
||||
self.token = .keyword_lambda;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (coral.are_equal(identifier[1 ..], "et")) {
|
||||
self.token = .keyword_let;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
'n' => {
|
||||
if (coral.are_equal(identifier[1 ..], "il")) {
|
||||
self.token = .keyword_nil;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
'r' => {
|
||||
if (coral.are_equal(identifier[1 ..], "eturn")) {
|
||||
self.token = .keyword_return;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
's' => {
|
||||
if (coral.are_equal(identifier[1 ..], "elf")) {
|
||||
self.token = .keyword_self;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
't' => {
|
||||
if (coral.are_equal(identifier[1 ..], "rue")) {
|
||||
self.token = .keyword_true;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
'v' => {
|
||||
const rest = identifier[1 ..];
|
||||
|
||||
if (coral.are_equal(rest, "ar")) {
|
||||
self.token = .keyword_var;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (coral.are_equal(rest, "vec2")) {
|
||||
self.token = .keyword_vec2;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
if (coral.are_equal(rest, "vec3")) {
|
||||
self.token = .keyword_vec3;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
'w' => {
|
||||
if (coral.are_equal(identifier[1 ..], "hile")) {
|
||||
self.token = .keyword_while;
|
||||
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
|
||||
self.token = .{.identifier = identifier};
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'`' => {
|
||||
cursor += 1;
|
||||
|
||||
const begin = cursor;
|
||||
|
||||
while (cursor < self.source.len) switch (self.source[cursor]) {
|
||||
'`' => break,
|
||||
else => cursor += 1,
|
||||
};
|
||||
|
||||
self.token = .{.template_string = self.source[begin .. cursor]};
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'"' => {
|
||||
cursor += 1;
|
||||
|
||||
const begin = cursor;
|
||||
|
||||
while (cursor < self.source.len) switch (self.source[cursor]) {
|
||||
'"' => break,
|
||||
else => cursor += 1,
|
||||
};
|
||||
|
||||
self.token = .{.string = self.source[begin .. cursor]};
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'{' => {
|
||||
self.token = .symbol_brace_left;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'}' => {
|
||||
self.token = .symbol_brace_right;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'[' => {
|
||||
self.token = .symbol_bracket_left;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
']' => {
|
||||
self.token = .symbol_bracket_right;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
',' => {
|
||||
self.token = .symbol_comma;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'!' => {
|
||||
self.token = .symbol_bang;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
')' => {
|
||||
self.token = .symbol_paren_right;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'(' => {
|
||||
self.token = .symbol_paren_left;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'/' => {
|
||||
self.token = .symbol_forward_slash;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'*' => {
|
||||
self.token = .symbol_asterisk;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'-' => {
|
||||
self.token = .symbol_minus;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'+' => {
|
||||
self.token = .symbol_plus;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
':' => {
|
||||
self.token = .symbol_colon;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'=' => {
|
||||
cursor += 1;
|
||||
|
||||
if (cursor < self.source.len) {
|
||||
switch (self.source[cursor]) {
|
||||
'=' => {
|
||||
cursor += 1;
|
||||
self.token = .symbol_double_equals;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
else => {},
|
||||
}
|
||||
}
|
||||
|
||||
self.token = .symbol_equals;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'<' => {
|
||||
cursor += 1;
|
||||
|
||||
if (cursor < self.source.len and (self.source[cursor] == '=')) {
|
||||
cursor += 1;
|
||||
self.token = .symbol_less_equals;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
self.token = .symbol_less_than;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'>' => {
|
||||
cursor += 1;
|
||||
|
||||
if (cursor < self.source.len and (self.source[cursor] == '=')) {
|
||||
cursor += 1;
|
||||
self.token = .symbol_greater_equals;
|
||||
|
||||
return;
|
||||
}
|
||||
|
||||
self.token = .symbol_greater_than;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'.' => {
|
||||
self.token = .symbol_period;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
'@' => {
|
||||
self.token = .symbol_at;
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
|
||||
else => {
|
||||
self.token = .{.unknown = self.source[cursor]};
|
||||
cursor += 1;
|
||||
|
||||
return;
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
self.token = .end;
|
||||
|
||||
return;
|
||||
}
|
||||
};
|
|
@ -0,0 +1,268 @@
|
|||
pub const Expr = @import("./tree/Expr.zig");
|
||||
|
||||
pub const Stmt = @import("./tree/Stmt.zig");
|
||||
|
||||
const coral = @import("coral");
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
const script = @import("../script.zig");
|
||||
|
||||
const tokens = @import("./tokens.zig");
|
||||
|
||||
pub const Declaration = struct {
|
||||
identifier: []const coral.Byte,
|
||||
|
||||
is: packed struct {
|
||||
readonly: bool = false,
|
||||
captured: bool = false,
|
||||
} = .{},
|
||||
};
|
||||
|
||||
pub const Environment = struct {
|
||||
captures: [capture_max]Capture = [_]Capture{.{.declaration_index = 0}} ** capture_max,
|
||||
capture_count: u8 = 0,
|
||||
declarations: [declaration_max]Declaration = [_]Declaration{.{.identifier = ""}} ** declaration_max,
|
||||
declaration_count: u8 = 0,
|
||||
argument_count: u8 = 0,
|
||||
statement: ?*const Stmt = null,
|
||||
enclosing: ?*Environment = null,
|
||||
|
||||
pub const Capture = union (enum) {
|
||||
declaration_index: u8,
|
||||
capture_index: u8,
|
||||
};
|
||||
|
||||
pub const DeclareError = std.mem.Allocator.Error || error {
|
||||
DeclarationExists,
|
||||
};
|
||||
|
||||
const capture_max = std.math.maxInt(u8);
|
||||
|
||||
const declaration_max = std.math.maxInt(u8);
|
||||
|
||||
pub fn create_enclosed(self: *Environment, root: *Root) std.mem.Allocator.Error!*Environment {
|
||||
const environment = try root.arena.allocator().create(Environment);
|
||||
|
||||
environment.* = .{.enclosing = self};
|
||||
|
||||
return environment;
|
||||
}
|
||||
|
||||
fn declare(self: *Environment, declaration: Declaration) DeclareError!*const Declaration {
|
||||
if (self.declaration_count == self.declarations.len) {
|
||||
return error.OutOfMemory;
|
||||
}
|
||||
|
||||
{
|
||||
var environment = self;
|
||||
|
||||
while (true) {
|
||||
var remaining_count = environment.declaration_count;
|
||||
|
||||
while (remaining_count != 0) {
|
||||
remaining_count -= 1;
|
||||
|
||||
if (coral.are_equal(environment.declarations[remaining_count].identifier, declaration.identifier)) {
|
||||
return error.DeclarationExists;
|
||||
}
|
||||
}
|
||||
|
||||
environment = environment.enclosing orelse break;
|
||||
}
|
||||
}
|
||||
|
||||
const declaration_slot = &self.declarations[self.declaration_count];
|
||||
|
||||
declaration_slot.* = declaration;
|
||||
self.declaration_count += 1;
|
||||
|
||||
return declaration_slot;
|
||||
}
|
||||
|
||||
pub fn declare_argument(self: *Environment, identifier: []const u8) DeclareError!*const Declaration {
|
||||
std.debug.assert(self.declaration_count <= self.argument_count);
|
||||
|
||||
defer self.argument_count += 1;
|
||||
|
||||
return self.declare(.{
|
||||
.identifier = identifier,
|
||||
.is = .{.readonly = true},
|
||||
});
|
||||
}
|
||||
|
||||
pub fn declare_constant(self: *Environment, identifier: []const u8) DeclareError!*const Declaration {
|
||||
return self.declare(.{
|
||||
.identifier = identifier,
|
||||
.is = .{.readonly = true},
|
||||
});
|
||||
}
|
||||
|
||||
pub fn declare_variable(self: *Environment, identifier: []const u8) DeclareError!*const Declaration {
|
||||
return self.declare(.{.identifier = identifier});
|
||||
}
|
||||
|
||||
pub fn resolve_declaration(self: *Environment, identifier: []const u8) std.mem.Allocator.Error!?*const Declaration {
|
||||
var environment = self;
|
||||
var ancestry = @as(u32, 0);
|
||||
|
||||
while (true) : (ancestry += 1) {
|
||||
var remaining_count = environment.declaration_count;
|
||||
|
||||
while (remaining_count != 0) {
|
||||
remaining_count -= 1;
|
||||
|
||||
const declaration = &environment.declarations[remaining_count];
|
||||
|
||||
if (coral.are_equal(declaration.identifier, identifier)) {
|
||||
if (ancestry != 0) {
|
||||
declaration.is.captured = true;
|
||||
environment = self;
|
||||
ancestry -= 1;
|
||||
|
||||
while (ancestry != 0) : (ancestry -= 1) {
|
||||
if (environment.capture_count == environment.captures.len) {
|
||||
return error.OutOfMemory;
|
||||
}
|
||||
|
||||
environment.captures[environment.capture_count] = .{
|
||||
.capture_index = environment.enclosing.?.capture_count
|
||||
};
|
||||
|
||||
environment.capture_count += 1;
|
||||
environment = environment.enclosing.?;
|
||||
}
|
||||
|
||||
environment.captures[environment.capture_count] = .{.declaration_index = remaining_count};
|
||||
environment.capture_count += 1;
|
||||
}
|
||||
|
||||
return declaration;
|
||||
}
|
||||
}
|
||||
|
||||
environment = environment.enclosing orelse return null;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_captures(self: *const Environment) []const Capture {
|
||||
return self.captures[0 .. self.capture_count];
|
||||
}
|
||||
|
||||
pub fn get_declarations(self: *const Environment) []const Declaration {
|
||||
return self.declarations[0 .. self.declaration_count];
|
||||
}
|
||||
};
|
||||
|
||||
pub fn NodeChain(comptime Value: type) type {
|
||||
return struct {
|
||||
head: ?*Value = null,
|
||||
tail: ?*Value = null,
|
||||
|
||||
pub const Nodes = struct {
|
||||
current: ?*const Value,
|
||||
|
||||
pub fn next(self: *Nodes) ?*const Value {
|
||||
const current = self.current orelse return null;
|
||||
|
||||
defer self.current = current.next;
|
||||
|
||||
return current;
|
||||
}
|
||||
};
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub fn append(self: *Self, value: *Value) void {
|
||||
if (self.tail) |node| {
|
||||
node.next = value;
|
||||
self.tail = value;
|
||||
} else {
|
||||
self.tail = value;
|
||||
self.head = value;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn nodes(self: *const Self) Nodes {
|
||||
return .{.current = self.head};
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
pub const ParseError = std.mem.Allocator.Error || error {
|
||||
BadSyntax,
|
||||
};
|
||||
|
||||
pub const Root = struct {
|
||||
arena: std.heap.ArenaAllocator,
|
||||
environment: Environment,
|
||||
error_messages: MessageList,
|
||||
|
||||
const MessageList = coral.Stack([]coral.Byte);
|
||||
|
||||
pub fn report_error(self: *Root, line: tokens.Line, comptime format: []const u8, args: anytype) ParseError {
|
||||
const allocator = self.arena.allocator();
|
||||
const message = try coral.utf8.alloc_formatted(allocator, format, args);
|
||||
|
||||
defer allocator.free(message);
|
||||
|
||||
try self.error_messages.push(try coral.utf8.alloc_formatted(allocator, "{line_number}: {message}", .{
|
||||
.message = message,
|
||||
.line_number = line.number,
|
||||
}));
|
||||
|
||||
return error.BadSyntax;
|
||||
}
|
||||
|
||||
pub fn report_declare_error(self: *Root, line: tokens.Line, identifier: []const u8, @"error": Environment.DeclareError) ParseError {
|
||||
return switch (@"error") {
|
||||
error.OutOfMemory => error.OutOfMemory,
|
||||
|
||||
error.DeclarationExists => self.report_error(line, "declaration `{identifier}` already exists", .{
|
||||
.identifier = identifier,
|
||||
}),
|
||||
};
|
||||
}
|
||||
|
||||
pub fn create_node(self: *Root, node: anytype) std.mem.Allocator.Error!*@TypeOf(node) {
|
||||
const copy = try self.arena.allocator().create(@TypeOf(node));
|
||||
|
||||
copy.* = node;
|
||||
|
||||
return copy;
|
||||
}
|
||||
|
||||
pub fn create_string(self: *Root, comptime format: []const u8, args: anytype) std.mem.Allocator.Error![]const u8 {
|
||||
return coral.utf8.alloc_formatted(self.arena.allocator(), format, args);
|
||||
}
|
||||
|
||||
pub fn deinit(self: *Root) void {
|
||||
self.error_messages.deinit();
|
||||
self.arena.deinit();
|
||||
}
|
||||
|
||||
pub fn init(allocator: std.mem.Allocator) std.mem.Allocator.Error!Root {
|
||||
return .{
|
||||
.arena = std.heap.ArenaAllocator.init(allocator),
|
||||
.error_messages = .{.allocator = allocator},
|
||||
.environment = .{},
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parse(self: *Root, stream: *tokens.Stream) ParseError!void {
|
||||
stream.skip_newlines();
|
||||
|
||||
const first_statement = try Stmt.parse(self, stream, &self.environment);
|
||||
var current_statement = first_statement;
|
||||
|
||||
while (stream.token != .end) {
|
||||
const next_statement = try Stmt.parse(self, stream, &self.environment);
|
||||
|
||||
current_statement.next = next_statement;
|
||||
current_statement = next_statement;
|
||||
}
|
||||
|
||||
self.environment.statement = first_statement;
|
||||
}
|
||||
};
|
||||
|
|
@ -0,0 +1,906 @@
|
|||
const Stmt = @import("./Stmt.zig");
|
||||
|
||||
const coral = @import("coral");
|
||||
|
||||
const tokens = @import("../tokens.zig");
|
||||
|
||||
const tree = @import("../tree.zig");
|
||||
|
||||
const std = @import("std");
|
||||
|
||||
next: ?*const Self = null,
|
||||
line: tokens.Line,
|
||||
kind: Kind,
|
||||
|
||||
pub const BinaryOp = struct {
|
||||
rhs_operand: *Self,
|
||||
lhs_operand: *Self,
|
||||
operation: Operation,
|
||||
|
||||
pub const Operation = enum {
|
||||
addition,
|
||||
subtraction,
|
||||
multiplication,
|
||||
divsion,
|
||||
equals_comparison,
|
||||
greater_than_comparison,
|
||||
greater_equals_comparison,
|
||||
less_than_comparison,
|
||||
less_equals_comparison,
|
||||
};
|
||||
|
||||
fn parser(comptime parse_next: Parser, comptime operations: []const BinaryOp.Operation) Parser {
|
||||
const BinaryOpParser = struct {
|
||||
fn parse(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
|
||||
var expression = try parse_next(root, stream, environment);
|
||||
|
||||
inline for (operations) |operation| {
|
||||
const token = comptime @as(tokens.Token, switch (operation) {
|
||||
.addition => .symbol_plus,
|
||||
.subtraction => .symbol_minus,
|
||||
.multiplication => .symbol_asterisk,
|
||||
.divsion => .symbol_forward_slash,
|
||||
.equals_comparison => .symbol_double_equals,
|
||||
.greater_than_comparison => .symbol_greater_than,
|
||||
.greater_equals_comparison => .symbol_greater_equals,
|
||||
.less_than_comparison => .symbol_less_than,
|
||||
.less_equals_comparison => .symbol_less_equals,
|
||||
});
|
||||
|
||||
if (stream.token == std.meta.activeTag(token)) {
|
||||
stream.step();
|
||||
|
||||
if (stream.token == .end) {
|
||||
return root.report_error(stream.line, "expected other half of expression after `" ++ comptime token.text() ++ "`", .{});
|
||||
}
|
||||
|
||||
// TODO: Remove once Zig has fixed struct self-reassignment.
|
||||
const unnecessary_temp = expression;
|
||||
|
||||
expression = try root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.binary_op = .{
|
||||
.rhs_operand = try parse_next(root, stream, environment),
|
||||
.operation = operation,
|
||||
.lhs_operand = unnecessary_temp,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
};
|
||||
|
||||
return BinaryOpParser.parse;
|
||||
}
|
||||
};
|
||||
|
||||
pub const DeclarationGet = struct {
|
||||
declaration: *const tree.Declaration,
|
||||
};
|
||||
|
||||
pub const DeclarationSet = struct {
|
||||
declaration: *const tree.Declaration,
|
||||
assign: *const Self,
|
||||
};
|
||||
|
||||
pub const FieldGet = struct {
|
||||
identifier: []const coral.Byte,
|
||||
object: *const Self,
|
||||
};
|
||||
|
||||
pub const FieldSet = struct {
|
||||
identifier: []const coral.Byte,
|
||||
object: *const Self,
|
||||
assign: *const Self,
|
||||
};
|
||||
|
||||
pub const Invoke = struct {
|
||||
argument: ?*const Self,
|
||||
object: *const Self,
|
||||
};
|
||||
|
||||
const Kind = union (enum) {
|
||||
nil_literal,
|
||||
true_literal,
|
||||
false_literal,
|
||||
number_literal: []const u8,
|
||||
string_literal: []const u8,
|
||||
string_template,
|
||||
symbol_literal: []const u8,
|
||||
vector2: Vector2,
|
||||
vector3: Vector3,
|
||||
table: tree.NodeChain(TableEntry),
|
||||
group: *Self,
|
||||
lambda_construct: LambdaConstruct,
|
||||
declaration_get: DeclarationGet,
|
||||
declaration_set: DeclarationSet,
|
||||
field_get: FieldGet,
|
||||
field_set: FieldSet,
|
||||
external_get: ExternalGet,
|
||||
subscript_get: SubscriptGet,
|
||||
subscript_set: SubscriptSet,
|
||||
binary_op: BinaryOp,
|
||||
unary_op: UnaryOp,
|
||||
invoke: Invoke,
|
||||
};
|
||||
|
||||
pub const LambdaConstruct = struct {
|
||||
environment: *const tree.Environment,
|
||||
};
|
||||
|
||||
const Parser = fn (root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self;
|
||||
|
||||
const ExternalGet = struct {
|
||||
name: []const u8,
|
||||
};
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub const SubscriptGet = struct {
|
||||
index: *const Self,
|
||||
object: *const Self,
|
||||
};
|
||||
|
||||
pub const SubscriptSet = struct {
|
||||
index: *const Self,
|
||||
object: *const Self,
|
||||
assign: *const Self,
|
||||
};
|
||||
|
||||
pub const TableEntry = struct {
|
||||
next: ?*const TableEntry = null,
|
||||
key: *const Self,
|
||||
value: *const Self,
|
||||
};
|
||||
|
||||
const TemplateToken = union (enum) {
|
||||
invalid: []const coral.Byte,
|
||||
literal: []const coral.Byte,
|
||||
expression: []const coral.Byte,
|
||||
|
||||
fn extract(source: *[]const coral.Byte) ?TemplateToken {
|
||||
var cursor = @as(usize, 0);
|
||||
|
||||
defer source.* = source.*[cursor ..];
|
||||
|
||||
while (cursor < source.len) {
|
||||
switch (source.*[cursor]) {
|
||||
'{' => {
|
||||
cursor += 1;
|
||||
|
||||
while (true) : (cursor += 1) {
|
||||
if (cursor == source.len) {
|
||||
return .{.invalid = source.*[0 .. cursor]};
|
||||
}
|
||||
|
||||
if (source.*[cursor] == '}') {
|
||||
const token = TemplateToken{.expression = source.*[1 .. cursor]};
|
||||
|
||||
cursor += 1;
|
||||
|
||||
return token;
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
else => {
|
||||
cursor += 1;
|
||||
|
||||
while (true) : (cursor += 1) {
|
||||
if (cursor == source.len) {
|
||||
return .{.literal = source.*[0 .. cursor]};
|
||||
}
|
||||
|
||||
if (source.*[cursor] == '{') {
|
||||
const cursor_next = cursor + 1;
|
||||
|
||||
if (cursor_next == source.len) {
|
||||
return .{.invalid = source.*[0 .. cursor]};
|
||||
}
|
||||
|
||||
if (source.*[cursor_next] == '{') {
|
||||
cursor = cursor_next;
|
||||
|
||||
return .{.literal = source.*[0 .. cursor]};
|
||||
}
|
||||
|
||||
return .{.literal = source.*[0 .. cursor]};
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
};
|
||||
|
||||
pub const UnaryOp = struct {
|
||||
operand: *Self,
|
||||
operation: Operation,
|
||||
|
||||
pub const Operation = enum {
|
||||
numeric_negation,
|
||||
boolean_negation,
|
||||
};
|
||||
};
|
||||
|
||||
pub const Vector2 = struct {
|
||||
x: *const Self,
|
||||
y: *const Self,
|
||||
};
|
||||
|
||||
pub const Vector3 = struct {
|
||||
x: *const Self,
|
||||
y: *const Self,
|
||||
z: *const Self,
|
||||
};
|
||||
|
||||
pub fn parse(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
|
||||
const expression = try parse_additive(root, stream, environment);
|
||||
|
||||
if (stream.token == .symbol_equals) {
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token == .end) {
|
||||
return root.report_error(stream.line, "expected assignment after `=`", .{});
|
||||
}
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = switch (expression.kind) {
|
||||
.declaration_get => |declaration_get| convert: {
|
||||
if (declaration_get.declaration.is.readonly) {
|
||||
return root.report_error(stream.line, "readonly declarations cannot be re-assigned", .{});
|
||||
}
|
||||
|
||||
break: convert .{
|
||||
.declaration_set = .{
|
||||
.assign = try parse(root, stream, environment),
|
||||
.declaration = declaration_get.declaration,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
.field_get => |field_get| .{
|
||||
.field_set = .{
|
||||
.assign = try parse(root, stream, environment),
|
||||
.object = field_get.object,
|
||||
.identifier = field_get.identifier,
|
||||
},
|
||||
},
|
||||
|
||||
.subscript_get => |subscript_get| .{
|
||||
.subscript_set = .{
|
||||
.assign = try parse(root, stream, environment),
|
||||
.object = subscript_get.object,
|
||||
.index = subscript_get.index,
|
||||
},
|
||||
},
|
||||
|
||||
else => return root.report_error(stream.line, "expected local or field on left-hand side of expression", .{}),
|
||||
},
|
||||
});
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
const parse_additive = BinaryOp.parser(parse_equality, &.{
|
||||
.addition,
|
||||
.subtraction,
|
||||
});
|
||||
|
||||
const parse_comparison = BinaryOp.parser(parse_term, &.{
|
||||
.greater_than_comparison,
|
||||
.greater_equals_comparison,
|
||||
.less_than_comparison,
|
||||
.less_equals_comparison
|
||||
});
|
||||
|
||||
const parse_equality = BinaryOp.parser(parse_comparison, &.{
|
||||
.equals_comparison,
|
||||
});
|
||||
|
||||
fn parse_factor(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
|
||||
var expression = try parse_operand(root, stream, environment);
|
||||
|
||||
while (true) {
|
||||
switch (stream.token) {
|
||||
.symbol_period => {
|
||||
stream.skip_newlines();
|
||||
|
||||
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
|
||||
const unnecessary_temp = expression;
|
||||
|
||||
expression = try root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.field_get = .{
|
||||
.identifier = switch (stream.token) {
|
||||
.identifier => |field_identifier| field_identifier,
|
||||
else => return root.report_error(stream.line, "expected identifier after `.`", .{}),
|
||||
},
|
||||
|
||||
.object = unnecessary_temp,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
stream.skip_newlines();
|
||||
},
|
||||
|
||||
.symbol_bracket_left => {
|
||||
stream.skip_newlines();
|
||||
|
||||
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
|
||||
const unnecessary_temp = expression;
|
||||
|
||||
expression = try root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.subscript_get = .{
|
||||
.index = try parse(root, stream, environment),
|
||||
.object = unnecessary_temp,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
if (stream.token != .symbol_bracket_right) {
|
||||
return root.report_error(stream.line, "expected closing `]` on subscript", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
},
|
||||
|
||||
.symbol_paren_left => {
|
||||
const lines_stepped = stream.line;
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
var first_argument = @as(?*Self, null);
|
||||
|
||||
if (stream.token != .symbol_paren_right) {
|
||||
var argument = try parse(root, stream, environment);
|
||||
|
||||
first_argument = argument;
|
||||
|
||||
while (true) {
|
||||
switch (stream.token) {
|
||||
.symbol_comma => stream.skip_newlines(),
|
||||
.symbol_paren_right => break,
|
||||
else => return root.report_error(stream.line, "expected `,` or `)` after lambda argument", .{}),
|
||||
}
|
||||
|
||||
const next_argument = try parse(root, stream, environment);
|
||||
|
||||
argument.next = next_argument;
|
||||
argument = next_argument;
|
||||
}
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
|
||||
const unnecessary_temp = expression;
|
||||
|
||||
expression = try root.create_node(Self{
|
||||
.line = lines_stepped,
|
||||
|
||||
.kind = .{
|
||||
.invoke = .{
|
||||
.argument = first_argument,
|
||||
.object = unnecessary_temp,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
fn parse_operand(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
|
||||
switch (stream.token) {
|
||||
.symbol_paren_left => {
|
||||
stream.skip_newlines();
|
||||
|
||||
const expression = try parse(root, stream, environment);
|
||||
|
||||
if (stream.token != .symbol_paren_right) {
|
||||
return root.report_error(stream.line, "expected a closing `)` after expression", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.group = expression},
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_nil => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .nil_literal,
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_true => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .true_literal,
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_false => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .false_literal,
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_vec2 => {
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token != .symbol_paren_left) {
|
||||
return root.report_error(stream.line, "expected an opening `(` after `vec2`", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
const x_expression = try parse(root, stream, environment);
|
||||
|
||||
switch (stream.token) {
|
||||
.symbol_paren_right => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.vector2 = .{
|
||||
.x = x_expression,
|
||||
.y = x_expression,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
.symbol_comma => {
|
||||
stream.skip_newlines();
|
||||
|
||||
const y_expression = try parse(root, stream, environment);
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token != .symbol_paren_right) {
|
||||
return root.report_error(stream.line, "expected a closing `)` after `vec3`", .{});
|
||||
}
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.vector2 = .{
|
||||
.x = x_expression,
|
||||
.y = y_expression,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
else => return root.report_error(stream.line, "expected a closing `)` after `vec3`", .{}),
|
||||
}
|
||||
},
|
||||
|
||||
.keyword_vec3 => {
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token != .symbol_paren_left) {
|
||||
return root.report_error(stream.line, "expected an opening `(` after `vec2`", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
const x_expression = try parse(root, stream, environment);
|
||||
|
||||
switch (stream.token) {
|
||||
.symbol_paren_right => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.vector3 = .{
|
||||
.x = x_expression,
|
||||
.y = x_expression,
|
||||
.z = x_expression,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
.symbol_comma => {
|
||||
stream.skip_newlines();
|
||||
|
||||
const y_expression = try parse(root, stream, environment);
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
const z_expression = try parse(root, stream, environment);
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token != .symbol_paren_right) {
|
||||
return root.report_error(stream.line, "expected a closing `)` after `vec3`", .{});
|
||||
}
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.vector3 = .{
|
||||
.x = x_expression,
|
||||
.y = y_expression,
|
||||
.z = z_expression,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
else => return root.report_error(stream.line, "expected a closing `)` after `vec3`", .{}),
|
||||
}
|
||||
},
|
||||
|
||||
.number => |value| {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.number_literal = value},
|
||||
});
|
||||
},
|
||||
|
||||
.string => |value| {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.string_literal = value},
|
||||
});
|
||||
},
|
||||
|
||||
.template_string => |value| {
|
||||
const line = stream.line;
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
return parse_template(root, value, line, environment);
|
||||
},
|
||||
|
||||
.symbol_at => {
|
||||
stream.step();
|
||||
|
||||
const identifier = switch (stream.token) {
|
||||
.identifier => |identifier| identifier,
|
||||
else => return root.report_error(stream.line, "expected identifier after `@`", .{}),
|
||||
};
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.external_get = .{.name = identifier}},
|
||||
});
|
||||
},
|
||||
|
||||
.symbol_period => {
|
||||
stream.step();
|
||||
|
||||
const identifier = switch (stream.token) {
|
||||
.identifier => |identifier| identifier,
|
||||
else => return root.report_error(stream.line, "expected identifier after `.`", .{}),
|
||||
};
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.symbol_literal = identifier},
|
||||
});
|
||||
},
|
||||
|
||||
.identifier => |identifier| {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.declaration_get = .{
|
||||
.declaration = (try environment.resolve_declaration(identifier)) orelse {
|
||||
return root.report_error(stream.line, "undefined identifier `{identifier}`", .{
|
||||
.identifier = identifier,
|
||||
});
|
||||
}
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_lambda => {
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token != .symbol_paren_left) {
|
||||
return root.report_error(stream.line, "expected `(` after opening lambda block", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
var lambda_environment = try environment.create_enclosed(root);
|
||||
|
||||
while (stream.token != .symbol_paren_right) {
|
||||
const identifier = switch (stream.token) {
|
||||
.identifier => |identifier| identifier,
|
||||
else => return root.report_error(stream.line, "expected identifier", .{}),
|
||||
};
|
||||
|
||||
_ = lambda_environment.declare_argument(identifier) catch |declare_error| {
|
||||
return root.report_declare_error(stream.line, identifier, declare_error);
|
||||
};
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
switch (stream.token) {
|
||||
.symbol_comma => stream.skip_newlines(),
|
||||
.symbol_paren_right => break,
|
||||
else => return root.report_error(stream.line, "expected `,` or `)` after identifier", .{}),
|
||||
}
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token != .symbol_colon) {
|
||||
return root.report_error(stream.line, "expected `:` after closing `)` of lambda identifiers", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token != .keyword_end) {
|
||||
const first_statement = try Stmt.parse(root, stream, lambda_environment);
|
||||
var current_statement = first_statement;
|
||||
|
||||
while (stream.token != .keyword_end) {
|
||||
const next_statement = try Stmt.parse(root, stream, lambda_environment);
|
||||
|
||||
current_statement.next = next_statement;
|
||||
current_statement = next_statement;
|
||||
}
|
||||
|
||||
lambda_environment.statement = first_statement;
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.lambda_construct = .{.environment = lambda_environment}},
|
||||
});
|
||||
},
|
||||
|
||||
.symbol_brace_left => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return parse_table(root, stream, environment);
|
||||
},
|
||||
|
||||
.symbol_minus => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.unary_op = .{
|
||||
.operand = try parse_factor(root, stream, environment),
|
||||
.operation = .numeric_negation,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
.symbol_bang => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.unary_op = .{
|
||||
.operand = try parse_factor(root, stream, environment),
|
||||
.operation = .boolean_negation,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
else => return root.report_error(stream.line, "unexpected token in expression", .{}),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_table(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
|
||||
var entries = tree.NodeChain(TableEntry){};
|
||||
var sequential_index = @as(usize, 0);
|
||||
|
||||
while (true) {
|
||||
switch (stream.token) {
|
||||
.symbol_brace_right => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.table = entries},
|
||||
});
|
||||
},
|
||||
|
||||
.symbol_bracket_left => {
|
||||
stream.skip_newlines();
|
||||
|
||||
const key = try parse(root, stream, environment);
|
||||
|
||||
if (stream.token != .symbol_bracket_right) {
|
||||
return root.report_error(stream.line, "expected `]` after subscript index expression", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token != .symbol_equals) {
|
||||
return root.report_error(stream.line, "expected `=` after table expression key", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
entries.append(try root.create_node(TableEntry{
|
||||
.value = try parse(root, stream, environment),
|
||||
.key = key,
|
||||
}));
|
||||
},
|
||||
|
||||
.symbol_period => {
|
||||
stream.step();
|
||||
|
||||
const field = switch (stream.token) {
|
||||
.identifier => |identifier| identifier,
|
||||
else => return root.report_error(stream.line, "invalid symbol literal", .{}),
|
||||
};
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
switch (stream.token) {
|
||||
.symbol_comma => {
|
||||
stream.skip_newlines();
|
||||
|
||||
entries.append(try root.create_node(TableEntry{
|
||||
.key = try root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.number_literal = try root.create_string("{i}", .{.i = sequential_index})},
|
||||
}),
|
||||
|
||||
.value = try root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.symbol_literal = field},
|
||||
}),
|
||||
}));
|
||||
|
||||
sequential_index += 1;
|
||||
},
|
||||
|
||||
.symbol_equals => {
|
||||
stream.skip_newlines();
|
||||
|
||||
entries.append(try root.create_node(TableEntry{
|
||||
.value = try parse(root, stream, environment),
|
||||
|
||||
.key = try root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.symbol_literal = field},
|
||||
}),
|
||||
}));
|
||||
},
|
||||
|
||||
else => return root.report_error(stream.line, "expected `,` or `=` after symbol", .{}),
|
||||
}
|
||||
},
|
||||
|
||||
else => {
|
||||
entries.append(try root.create_node(TableEntry{
|
||||
.value = try parse(root, stream, environment),
|
||||
|
||||
.key = try root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.number_literal = try root.create_string("{i}", .{.i = sequential_index})},
|
||||
}),
|
||||
}));
|
||||
|
||||
sequential_index += 1;
|
||||
},
|
||||
}
|
||||
|
||||
switch (stream.token) {
|
||||
.symbol_brace_right => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.table = entries},
|
||||
});
|
||||
},
|
||||
|
||||
.symbol_comma => stream.skip_newlines(),
|
||||
else => return root.report_error(stream.line, "expected `,` or '}' after table key value pair", .{}),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_template(root: *tree.Root, template: []const coral.Byte, line: tokens.Line, environment: *tree.Environment) tree.ParseError!*Self {
|
||||
const expression_head = try root.create_node(Self{
|
||||
.line = line,
|
||||
.kind = .string_template,
|
||||
});
|
||||
|
||||
var expression_tail = expression_head;
|
||||
var source = template;
|
||||
|
||||
while (TemplateToken.extract(&source)) |token| {
|
||||
const expression = try switch (token) {
|
||||
.invalid => |invalid| root.report_error(line, "invalid template format: `{invalid}`", .{
|
||||
.invalid = invalid,
|
||||
}),
|
||||
|
||||
.literal => |literal| root.create_node(Self{
|
||||
.line = line,
|
||||
.kind = .{.string_literal = literal},
|
||||
}),
|
||||
|
||||
.expression => |expression| create: {
|
||||
var stream = tokens.Stream{
|
||||
.source = expression,
|
||||
.line = line,
|
||||
};
|
||||
|
||||
stream.step();
|
||||
|
||||
break: create try parse(root, &stream, environment);
|
||||
},
|
||||
};
|
||||
|
||||
expression_tail.next = expression;
|
||||
expression_tail = expression;
|
||||
}
|
||||
|
||||
return expression_head;
|
||||
}
|
||||
|
||||
const parse_term = BinaryOp.parser(parse_factor, &.{
|
||||
.multiplication,
|
||||
.divsion,
|
||||
});
|
|
@ -0,0 +1,242 @@
|
|||
const Expr = @import("./Expr.zig");
|
||||
|
||||
const coral = @import("coral");
|
||||
|
||||
const tokens = @import("../tokens.zig");
|
||||
|
||||
const tree = @import("../tree.zig");
|
||||
|
||||
next: ?*const Self = null,
|
||||
line: tokens.Line,
|
||||
|
||||
kind: union (enum) {
|
||||
top_expression: *const Expr,
|
||||
@"return": Return,
|
||||
declare: Declare,
|
||||
@"if": If,
|
||||
@"while": While,
|
||||
},
|
||||
|
||||
pub const Declare = struct {
|
||||
declaration: *const tree.Declaration,
|
||||
initial_expression: *const Expr,
|
||||
};
|
||||
|
||||
pub const If = struct {
|
||||
then_expression: *const Expr,
|
||||
@"then": *const Self,
|
||||
@"else": ?*const Self,
|
||||
};
|
||||
|
||||
pub const Return = struct {
|
||||
returned_expression: ?*const Expr,
|
||||
};
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub const While = struct {
|
||||
loop_expression: *const Expr,
|
||||
loop: *const Self,
|
||||
};
|
||||
|
||||
pub fn parse(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
|
||||
switch (stream.token) {
|
||||
.keyword_return => {
|
||||
stream.step();
|
||||
|
||||
if (stream.token != .end and stream.token != .newline) {
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.@"return" = .{.returned_expression = try Expr.parse(root, stream, environment)}},
|
||||
});
|
||||
}
|
||||
|
||||
if (stream.token != .end and stream.token != .newline) {
|
||||
return root.report_error(stream.line, "expected end or newline after return statement", .{});
|
||||
}
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.@"return" = .{.returned_expression = null}},
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_while => {
|
||||
defer stream.skip_newlines();
|
||||
|
||||
stream.step();
|
||||
|
||||
const condition_expression = try Expr.parse(root, stream, environment);
|
||||
|
||||
if (stream.token != .symbol_colon) {
|
||||
return root.report_error(stream.line, "expected `:` after `while` statement", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
const first_statement = try parse(root, stream, environment);
|
||||
|
||||
{
|
||||
var current_statement = first_statement;
|
||||
|
||||
while (stream.token != .keyword_end) {
|
||||
const next_statement = try parse(root, stream, environment);
|
||||
|
||||
current_statement.next = next_statement;
|
||||
current_statement = next_statement;
|
||||
}
|
||||
}
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.@"while" = .{
|
||||
.loop = first_statement,
|
||||
.loop_expression = condition_expression,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_var, .keyword_let => {
|
||||
const is_constant = stream.token == .keyword_let;
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
const identifier = switch (stream.token) {
|
||||
.identifier => |identifier| identifier,
|
||||
else => return root.report_error(stream.line, "expected identifier after declaration", .{}),
|
||||
};
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
if (stream.token != .symbol_equals) {
|
||||
return root.report_error(stream.line, "expected `=` after declaration `{identifier}`", .{
|
||||
.identifier = identifier,
|
||||
});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.declare = .{
|
||||
.initial_expression = try Expr.parse(root, stream, environment),
|
||||
|
||||
.declaration = declare: {
|
||||
if (is_constant) {
|
||||
break: declare environment.declare_constant(identifier) catch |declaration_error| {
|
||||
return root.report_declare_error(stream.line, identifier, declaration_error);
|
||||
};
|
||||
}
|
||||
|
||||
break: declare environment.declare_variable(identifier) catch |declaration_error| {
|
||||
return root.report_declare_error(stream.line, identifier, declaration_error);
|
||||
};
|
||||
},
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_if => return parse_branch(root, stream, environment),
|
||||
|
||||
else => return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
.kind = .{.top_expression = try Expr.parse(root, stream, environment)},
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_branch(root: *tree.Root, stream: *tokens.Stream, environment: *tree.Environment) tree.ParseError!*Self {
|
||||
stream.step();
|
||||
|
||||
const expression = try Expr.parse(root, stream, environment);
|
||||
|
||||
if (stream.token != .symbol_colon) {
|
||||
return root.report_error(stream.line, "expected `:` after `{token}`", .{.token = stream.token.text()});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
const first_then_statement = try parse(root, stream, environment);
|
||||
var current_then_statement = first_then_statement;
|
||||
|
||||
while (true) {
|
||||
switch (stream.token) {
|
||||
.keyword_end => {
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.@"if" = .{
|
||||
.then_expression = expression,
|
||||
.@"then" = first_then_statement,
|
||||
.@"else" = null,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_else => {
|
||||
stream.step();
|
||||
|
||||
if (stream.token != .symbol_colon) {
|
||||
return root.report_error(stream.line, "expected `:` after `if` statement condition", .{});
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
const first_else_statement = try parse(root, stream, environment);
|
||||
var current_else_statement = first_else_statement;
|
||||
|
||||
while (stream.token != .keyword_end) {
|
||||
const next_statement = try parse(root, stream, environment);
|
||||
|
||||
current_else_statement.next = next_statement;
|
||||
current_else_statement = next_statement;
|
||||
}
|
||||
|
||||
stream.skip_newlines();
|
||||
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.@"if" = .{
|
||||
.@"else" = first_else_statement,
|
||||
.@"then" = first_then_statement,
|
||||
.then_expression = expression,
|
||||
},
|
||||
}
|
||||
});
|
||||
},
|
||||
|
||||
.keyword_elif => {
|
||||
return root.create_node(Self{
|
||||
.line = stream.line,
|
||||
|
||||
.kind = .{
|
||||
.@"if" = .{
|
||||
.@"else" = try parse_branch(root, stream, environment),
|
||||
.@"then" = first_then_statement,
|
||||
.then_expression = expression,
|
||||
},
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
else => {
|
||||
const next_statement = try parse(root, stream, environment);
|
||||
|
||||
current_then_statement.next = next_statement;
|
||||
current_then_statement = next_statement;
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue