Tidy up Kym implementation

This commit is contained in:
kayomn 2023-04-23 15:53:50 +01:00
parent d1110d8683
commit 0974cb016b
3 changed files with 766 additions and 563 deletions

705
source/kym/bytecode.zig Normal file → Executable file
View File

@ -6,25 +6,46 @@ pub const Chunk = struct {
constant_buffer: Buffer, constant_buffer: Buffer,
bytecode_buffer: Buffer, bytecode_buffer: Buffer,
constants: Constants, constants: Constants,
locals: SmallStack(Local, .{.name = "", .depth = 0}) = .{},
const Buffer = coral.stack.Dense(u8); const Buffer = coral.stack.Dense(u8);
const Constants = coral.stack.Dense(Constant); const Constants = coral.stack.Dense(Constant);
pub fn compile(self: *Chunk, script: []const u8) !void { const Local = struct {
name: []const u8,
depth: u16,
const empty = Local{ .name = "", .depth = 0 };
};
pub fn compile(self: *Chunk, script: []const u8) ParseError!void {
self.reset(); self.reset();
var tokenizer = tokens.Tokenizer{.source = script}; var tokenizer = tokens.Tokenizer{.source = script};
errdefer self.reset();
var parser = Parser{ var parser = Parser{
.chunk = self, .chunk = self,
.tokenizer = &tokenizer, .tokenizer = &tokenizer,
}; };
errdefer self.reset(); while (true) {
parser.step() catch |step_error| switch (step_error) {
error.UnexpectedEnd => return,
};
try parser.parse_statement(); try parser.parse_statement();
} }
}
fn declare_local(self: *Chunk, name: []const u8) !void {
return self.locals.push(.{
.name = name,
.depth = 0,
});
}
pub fn deinit(self: *Chunk) void { pub fn deinit(self: *Chunk) void {
self.bytecode_buffer.deinit(); self.bytecode_buffer.deinit();
@ -32,48 +53,18 @@ pub const Chunk = struct {
self.constants.deinit(); self.constants.deinit();
} }
pub fn emit_byte(self: *Chunk, byte: u8) !void { fn emit_byte(self: *Chunk, byte: u8) !void {
return self.bytecode_buffer.push_one(byte); return self.bytecode_buffer.push_one(byte);
} }
pub fn emit_opcode(self: *Chunk, opcode: Opcode) !void { fn emit_opcode(self: *Chunk, opcode: Opcode) !void {
return self.bytecode_buffer.push_one(@enumToInt(opcode)); return self.bytecode_buffer.push_one(@enumToInt(opcode));
} }
pub fn emit_operand(self: *Chunk, operand: Operand) !void { fn emit_operand(self: *Chunk, operand: Operand) !void {
return self.bytecode_buffer.push_all(coral.io.bytes_of(&operand)); return self.bytecode_buffer.push_all(coral.io.bytes_of(&operand));
} }
pub fn intern_string(self: *Chunk, string: []const u8) !u64 {
var constant_slot = @as(u64, 0);
for (self.constants.values) |interned_constant| {
switch (interned_constant) {
.string => |interned_string| if (coral.io.equals(interned_string, string)) return constant_slot,
}
constant_slot += 1;
}
const constant_allocator = coral.stack.as_dense_allocator(&self.constant_buffer);
const allocation = constant_allocator.allocate_many(u8, string.len + 1) orelse return error.OutOfMemory;
errdefer constant_allocator.deallocate(allocation);
// Zero-terminate string.
allocation[string.len] = 0;
// Write string contents.
{
const allocated_string = allocation[0 .. string.len];
coral.io.copy(allocated_string, string);
try self.constants.push_one(.{.string = @ptrCast([:0]u8, allocated_string)});
}
return constant_slot;
}
pub fn fetch_byte(self: Chunk, cursor: *usize) ?u8 { pub fn fetch_byte(self: Chunk, cursor: *usize) ?u8 {
if (cursor.* >= self.bytecode_buffer.values.len) return null; if (cursor.* >= self.bytecode_buffer.values.len) return null;
@ -128,10 +119,54 @@ pub const Chunk = struct {
}; };
} }
fn intern_string(self: *Chunk, string: []const u8) !u64 {
var constant_slot = @as(u64, 0);
for (self.constants.values) |interned_constant| {
switch (interned_constant) {
.string => |interned_string| if (coral.io.equals(interned_string, string)) return constant_slot,
}
constant_slot += 1;
}
const constant_allocator = coral.stack.as_dense_allocator(&self.constant_buffer);
const allocation = constant_allocator.allocate_many(u8, string.len + 1) orelse return error.OutOfMemory;
errdefer constant_allocator.deallocate(allocation);
// Zero-terminate string.
allocation[string.len] = 0;
// Write string contents.
{
const allocated_string = allocation[0 .. string.len];
coral.io.copy(allocated_string, string);
try self.constants.push_one(.{.string = @ptrCast([:0]u8, allocated_string)});
}
return constant_slot;
}
pub fn reset(self: *Chunk) void { pub fn reset(self: *Chunk) void {
self.bytecode_buffer.clear(); self.bytecode_buffer.clear();
self.constant_buffer.clear(); self.constant_buffer.clear();
} }
pub fn resolve_local(self: *Chunk, name: []const u8) ?u8 {
var count = @as(u8, self.locals.buffer.len);
while (count != 0) {
const index = count - 1;
if (coral.io.equals(name, self.locals.buffer[index].name)) return index;
count = index;
}
return null;
}
}; };
pub const Constant = union (enum) { pub const Constant = union (enum) {
@ -139,6 +174,7 @@ pub const Constant = union (enum) {
}; };
pub const Opcode = enum(u8) { pub const Opcode = enum(u8) {
pop,
push_nil, push_nil,
push_true, push_true,
push_false, push_false,
@ -157,8 +193,8 @@ pub const Opcode = enum(u8) {
mul, mul,
call, call,
get_field, get_index,
set_field, set_index,
get_x, get_x,
set_x, set_x,
get_y, get_y,
@ -166,28 +202,22 @@ pub const Opcode = enum(u8) {
get_global, get_global,
set_global, set_global,
get_local, get_local,
set_local,
}; };
pub const Operand = u64; pub const Operand = u64;
const ParseError = SyntaxError || error{ pub const ParseError = Parser.StepError || tokens.Token.ExpectError || error {
OutOfMemory, OutOfMemory,
IntOverflow,
UndefinedLocal,
}; };
const Parser = struct { const Parser = struct {
tokenizer: *tokens.Tokenizer,
scope_depth: u16 = 0,
chunk: *Chunk, chunk: *Chunk,
locals: SmallStack(Local, Local.empty) = .{}, tokenizer: *tokens.Tokenizer,
current_token: tokens.Token = .newline,
const Local = struct { previous_token: tokens.Token = .newline,
name: []const u8,
depth: u16,
const empty = Local{ .name = "", .depth = 0 };
};
const Operations = SmallStack(Operator, .not);
const Operator = enum { const Operator = enum {
not, not,
@ -197,7 +227,9 @@ const Parser = struct {
divide, divide,
multiply, multiply,
fn opcode(self: Operator) Opcode { const Self = @This();
fn opcode(self: Self) Opcode {
return switch (self) { return switch (self) {
.not => .not, .not => .not,
.negate => .neg, .negate => .neg,
@ -208,7 +240,7 @@ const Parser = struct {
}; };
} }
fn precedence(self: Operator) isize { fn precedence(self: Self) isize {
return switch (self) { return switch (self) {
.not => 13, .not => 13,
.negate => 13, .negate => 13,
@ -220,101 +252,54 @@ const Parser = struct {
} }
}; };
fn declare_local(self: *Parser, name: []const u8) !void { const OperatorStack = SmallStack(Operator, .not);
return self.locals.push(.{
.name = name,
.depth = self.scope_depth,
});
}
fn error_unexpected_end(self: *Parser) SyntaxError { const StepError = error {
_ = self; UnexpectedEnd,
};
return error.BadSyntax; const operator_tokens = &.{.symbol_assign, .symbol_plus, .symbol_dash, .symbol_asterisk, .symbol_forward_slash};
}
fn error_unexpected_token(self: *Parser, token: tokens.Token) SyntaxError { fn parse_expression(self: *Parser) ParseError!void {
_ = self; var operators = OperatorStack{};
_ = token; var local_depth = @as(usize, 0);
// _ = self.error_writer.write("unexpected token `") catch {};
// _ = self.error_writer.write(token.text()) catch {};
// _ = self.error_writer.write("`") catch {};
return error.BadSyntax; while (true) {
} switch (self.current_token) {
.keyword_nil => {
try self.previous_token.expect_any(operator_tokens);
try self.chunk.emit_opcode(.push_nil);
fn error_integer_overflow(self: *Parser, integer_literal: []const u8) SyntaxError { self.step() catch |step_error| switch (step_error) {
// TODO: Implement. error.UnexpectedEnd => return,
_ = self; };
_ = integer_literal;
return error.BadSyntax;
}
pub fn parse_expression(self: *Parser, initial_token: tokens.Token) ParseError!void {
var operations = Operations{};
var previous_token = initial_token;
while (self.tokenizer.next()) |current_token| {
switch (current_token) {
.newline => {
previous_token = current_token;
break;
}, },
else => previous_token = try self.parse_operation(&operations, previous_token, current_token), .keyword_true => {
} try self.previous_token.expect_any(operator_tokens);
} try self.chunk.emit_opcode(.push_true);
while (operations.pop()) |operator| try self.chunk.emit_opcode(operator.opcode()); self.step() catch |step_error| switch (step_error) {
} error.UnexpectedEnd => return,
};
fn parse_arguments(self: *Parser) ParseError!tokens.Token {
var operations = Operations{};
var previous_token = @as(tokens.Token, .symbol_paren_left);
var argument_count = @as(Operand, 0);
while (self.tokenizer.next()) |current_token| {
switch (current_token) {
.symbol_paren_right => {
while (operations.pop()) |operator| try self.chunk.emit_opcode(operator.opcode());
try self.chunk.emit_opcode(.call);
try self.chunk.emit_operand(argument_count);
return .symbol_paren_right;
}, },
.symbol_comma => { .keyword_false => {
while (operations.pop()) |operator| try self.chunk.emit_opcode(operator.opcode()); try self.previous_token.expect_any(operator_tokens);
try self.chunk.emit_opcode(.push_false);
previous_token = current_token; self.step() catch |step_error| switch (step_error) {
error.UnexpectedEnd => return,
argument_count += 1; };
}, },
else => previous_token = try self.parse_operation(&operations, previous_token, current_token),
}
}
return previous_token;
}
fn parse_group(_: *Parser) ParseError!tokens.Token {
return error.BadSyntax;
}
pub fn parse_operation(self: *Parser, operations: *Operations,
previous_token: tokens.Token, current_token: tokens.Token) ParseError!tokens.Token {
switch (current_token) {
.integer_literal => |literal| { .integer_literal => |literal| {
const value = coral.utf8.parse_signed(@bitSizeOf(i64), literal) catch |err| switch (err) { try self.previous_token.expect_any(operator_tokens);
const value = coral.utf8.parse_signed(@bitSizeOf(i64), literal)
catch |parse_error| switch (parse_error) {
error.BadSyntax => unreachable, error.BadSyntax => unreachable,
error.IntOverflow => return self.error_integer_overflow(literal), error.IntOverflow => return error.IntOverflow,
}; };
if (value == 0) { if (value == 0) {
@ -323,21 +308,106 @@ const Parser = struct {
try self.chunk.emit_opcode(.push_integer); try self.chunk.emit_opcode(.push_integer);
try self.chunk.emit_operand(@bitCast(u64, value)); try self.chunk.emit_operand(@bitCast(u64, value));
} }
try self.step();
}, },
.real_literal => |literal| { .real_literal => |literal| {
try self.chunk.emit_operand(@bitCast(u64, coral.utf8.parse_float(@bitSizeOf(f64), literal) catch |err| { try self.previous_token.expect_any(operator_tokens);
switch (err) {
try self.chunk.emit_operand(@bitCast(u64, coral.utf8.parse_float(@bitSizeOf(f64), literal)
catch |parse_error| switch (parse_error) {
// Already validated to be a real by the tokenizer so this cannot fail, as real syntax is a // Already validated to be a real by the tokenizer so this cannot fail, as real syntax is a
// subset of float syntax. // subset of float syntax.
error.BadSyntax => unreachable, error.BadSyntax => unreachable,
}
})); }));
try self.step();
}, },
.string_literal => |literal| { .string_literal => |literal| {
try self.previous_token.expect_any(operator_tokens);
try self.chunk.emit_opcode(.push_string); try self.chunk.emit_opcode(.push_string);
try self.chunk.emit_operand(try self.chunk.intern_string(literal)); try self.chunk.emit_operand(try self.chunk.intern_string(literal));
try self.step();
},
.global_identifier, .local_identifier => {
try self.previous_token.expect_any(&.{.symbol_assign, .symbol_plus,
.symbol_dash, .symbol_asterisk, .symbol_forward_slash, .symbol_period});
try self.step();
},
.symbol_bang => {
try self.previous_token.expect_any(operator_tokens);
try operators.push(.not);
try self.step();
local_depth = 0;
},
.symbol_plus => {
try self.parse_operator(&operators, .add);
local_depth = 0;
},
.symbol_dash => {
try self.parse_operator(&operators, .subtract);
local_depth = 0;
},
.symbol_asterisk => {
try self.parse_operator(&operators, .multiply);
local_depth = 0;
},
.symbol_forward_slash => {
try self.parse_operator(&operators, .divide);
local_depth = 0;
},
.symbol_period => {
switch (self.previous_token) {
.global_identifier => |identifier| {
try self.chunk.emit_opcode(.get_global);
try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
},
.local_identifier => |identifier| {
if (local_depth == 0) {
try self.chunk.emit_byte(self.chunk.resolve_local(identifier) orelse {
return error.UndefinedLocal;
});
} else {
try self.chunk.emit_opcode(.get_index);
try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
}
},
else => return error.UnexpectedToken,
}
try self.step();
local_depth += 1;
},
.symbol_paren_left => {
switch (self.previous_token) {
.local_identifier => |identifier| {
if (local_depth == 0) {
try self.chunk.emit_byte(self.chunk.resolve_local(identifier) orelse {
return error.UndefinedLocal;
});
} else {
try self.chunk.emit_opcode(.get_index);
try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
}
}, },
.global_identifier => |identifier| { .global_identifier => |identifier| {
@ -345,171 +415,290 @@ const Parser = struct {
try self.chunk.emit_operand(try self.chunk.intern_string(identifier)); try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
}, },
.local_identifier => |identifier| { else => {
if (self.resolve_local(identifier)) |local| { try self.parse_expression();
try self.chunk.emit_opcode(.get_local); try self.previous_token.expect(.symbol_paren_right);
try self.chunk.emit_byte(local); try self.step();
} else {
try self.chunk.emit_opcode(.push_nil); local_depth = 0;
continue;
},
} }
},
.symbol_bang => try operations.push(.not), local_depth += 1;
.symbol_plus => while (operations.pop()) |operator| { var argument_count = @as(Operand, 0);
if (Operator.add.precedence() < operator.precedence()) break try operations.push(operator);
try self.chunk.emit_opcode(operator.opcode()); while (true) {
}, try self.step();
.symbol_dash => while (operations.pop()) |operator| { try switch (self.current_token) {
if (Operator.subtract.precedence() < operator.precedence()) break try operations.push(operator); .symbol_paren_right => break,
else => self.parse_expression(),
};
try self.chunk.emit_opcode(operator.opcode()); switch (self.previous_token) {
}, .symbol_paren_right => break,
.symbol_comma => {},
.symbol_asterisk => while (operations.pop()) |operator| { else => return error.UnexpectedToken,
if (Operator.multiply.precedence() < operator.precedence()) break try operations.push(operator);
try self.chunk.emit_opcode(operator.opcode());
},
.symbol_forward_slash => while (operations.pop()) |operator| {
if (Operator.divide.precedence() < operator.precedence()) break try operations.push(operator);
try self.chunk.emit_opcode(operator.opcode());
},
.symbol_period => {
const field_token = self.tokenizer.next() orelse return self.error_unexpected_end();
switch (field_token) {
.local_identifier => |identifier| {
try self.chunk.emit_opcode(.get_field);
try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
return field_token;
},
else => return self.error_unexpected_token(field_token),
} }
},
.symbol_paren_left => return try switch (previous_token) { argument_count += 1;
.local_identifier, .global_identifier => self.parse_arguments(), }
else => self.parse_group(),
try self.chunk.emit_opcode(.call);
try self.chunk.emit_operand(argument_count);
try self.step();
local_depth = 0;
}, },
.symbol_brace_left => { .symbol_brace_left => {
try self.parse_table(); const is_call_argument = switch (self.previous_token) {
.local_identifier, .global_identifier => true,
else => false,
};
switch (previous_token) {
.local_identifier, .global_identifier => {
// Created as function argument.
try self.chunk.emit_opcode(.call);
try self.chunk.emit_operand(1);
},
else => {},
}
return .symbol_brace_right;
},
else => return self.error_unexpected_token(current_token),
}
return current_token;
}
pub fn parse_statement(self: *Parser) ParseError!void {
// TODO: Implement.
return self.error_unexpected_end();
}
fn parse_table(self: *Parser) ParseError!void {
var field_count = @as(Operand, 0); var field_count = @as(Operand, 0);
while (self.tokenizer.next()) |field_token| { while (true) {
switch (field_token) { try self.step();
switch (self.current_token) {
.newline => {}, .newline => {},
.local_identifier => |field_identifier| { .local_identifier => {
const operation_token = self.tokenizer.next() orelse return self.error_unexpected_end(); // Create local copy of identifier because step() will overwrite captures.
const interned_identifier = try self.chunk.intern_string(field_identifier); const interned_identifier =
try self.chunk.intern_string(self.current_token.local_identifier);
try self.chunk.emit_opcode(.push_string);
try self.chunk.emit_operand(interned_identifier);
try self.step();
switch (self.current_token) {
.symbol_assign => {
try self.parse_expression();
field_count += 1; field_count += 1;
},
switch (operation_token) {
.symbol_assign => {
var operations = Operations{};
var previous_token = @as(tokens.Token, .symbol_assign);
while (self.tokenizer.next()) |token| : (previous_token = token) switch (token) {
.newline => {},
.symbol_comma => break,
.symbol_brace_right => { .symbol_brace_right => {
try self.chunk.emit_opcode(.push_string); try self.chunk.emit_opcode(.push_string);
try self.chunk.emit_operand(interned_identifier); try self.chunk.emit_operand(interned_identifier);
try self.chunk.emit_opcode(.push_table);
try self.chunk.emit_operand(field_count);
return; field_count += 1;
},
else => previous_token = try self.parse_operation(&operations, previous_token, token), break;
};
while (operations.pop()) |operator| try self.chunk.emit_opcode(operator.opcode());
try self.chunk.emit_opcode(.push_string);
try self.chunk.emit_operand(interned_identifier);
}, },
.symbol_comma => { .symbol_comma => {
try self.chunk.emit_opcode(.push_string); try self.chunk.emit_opcode(.push_string);
try self.chunk.emit_operand(interned_identifier); try self.chunk.emit_operand(interned_identifier);
field_count += 1;
}, },
.symbol_brace_right => { else => return error.UnexpectedToken,
try self.chunk.emit_opcode(.push_string); }
try self.chunk.emit_operand(interned_identifier); },
try self.chunk.emit_opcode(.push_table);
try self.chunk.emit_operand(field_count); .symbol_brace_right => break,
else => return error.UnexpectedToken,
}
}
if (is_call_argument) {
try self.chunk.emit_opcode(.call);
try self.chunk.emit_operand(1);
}
},
else => {
try self.previous_token.expect_any(&.{.keyword_nil, .keyword_true, .keyword_false, .integer_literal,
.real_literal, .string_literal, .global_identifier, .local_identifier, .symbol_brace_right,
.symbol_paren_right});
while (operators.pop()) |operator| {
try self.chunk.emit_opcode(operator.opcode());
}
return; return;
}, },
}
}
}
else => return self.error_unexpected_token(operation_token), fn parse_operator(self: *Parser, operators: *OperatorStack, rhs_operator: Operator) ParseError!void {
try self.previous_token.expect_any(operator_tokens);
while (operators.pop()) |lhs_operator| {
if (rhs_operator.precedence() < lhs_operator.precedence()) break try operators.push(lhs_operator);
try self.chunk.emit_opcode(lhs_operator.opcode());
}
try operators.push(rhs_operator);
try self.step();
}
fn parse_statement(self: *Parser) ParseError!void {
var local_depth = @as(usize, 0);
while (true) {
switch (self.current_token) {
.newline => self.step() catch |step_error| switch (step_error) {
error.UnexpectedEnd => return,
},
.keyword_return => {
try self.previous_token.expect(.newline);
self.step() catch |step_error| switch (step_error) {
error.UnexpectedEnd => return,
};
try self.parse_expression();
while (true) {
self.step() catch |step_error| switch (step_error) {
error.UnexpectedEnd => return,
};
try self.current_token.expect(.newline);
} }
}, },
.symbol_brace_right => { .local_identifier => {
try self.chunk.emit_opcode(.push_table); try self.previous_token.expect_any(&.{.newline, .symbol_period});
try self.chunk.emit_operand(field_count); try self.step();
return;
}, },
else => return self.error_unexpected_token(field_token), .global_identifier => {
try self.previous_token.expect(.newline);
try self.step();
},
.symbol_period => switch (self.previous_token) {
.global_identifier => {
// Create local copy of identifier because step() will overwrite captures.
const identifier = self.previous_token.local_identifier;
try self.step();
try self.current_token.expect(.local_identifier);
try self.chunk.emit_opcode(.get_global);
try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
local_depth += 1;
},
.local_identifier => {
// Create local copy of identifier because step() will overwrite captures.
const identifier = self.previous_token.global_identifier;
try self.step();
try self.current_token.expect(.local_identifier);
if (local_depth == 0) {
try self.chunk.emit_byte(self.chunk.resolve_local(identifier) orelse {
return error.UndefinedLocal;
});
} else {
try self.chunk.emit_opcode(.get_index);
try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
}
local_depth += 1;
},
else => return error.UnexpectedToken,
},
.symbol_assign => {
try self.previous_token.expect(.local_identifier);
const identifier = self.previous_token.local_identifier;
if (local_depth == 0) {
if (self.chunk.resolve_local(identifier)) |local_slot| {
try self.chunk.emit_opcode(.set_local);
try self.chunk.emit_byte(local_slot);
} else {
try self.chunk.declare_local(identifier);
}
} else {
try self.chunk.emit_opcode(.set_index);
try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
}
try self.step();
try self.parse_expression();
local_depth = 0;
},
.symbol_paren_left => {
switch (self.previous_token) {
.local_identifier => |identifier| {
if (local_depth == 0) {
try self.chunk.emit_byte(self.chunk.resolve_local(identifier) orelse {
return error.UndefinedLocal;
});
} else {
try self.chunk.emit_opcode(.get_index);
try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
}
},
.global_identifier => |identifier| {
try self.chunk.emit_opcode(.get_global);
try self.chunk.emit_operand(try self.chunk.intern_string(identifier));
},
else => return error.UnexpectedToken,
}
var argument_count = @as(Operand, 0);
while (true) {
try self.step();
try switch (self.current_token) {
.symbol_paren_right => break,
else => self.parse_expression(),
};
argument_count += 1;
switch (self.current_token) {
.symbol_paren_right => break,
.symbol_comma => {},
else => return error.UnexpectedToken,
} }
} }
return self.error_unexpected_end(); try self.chunk.emit_opcode(.call);
try self.chunk.emit_operand(argument_count);
try self.chunk.emit_opcode(.pop);
self.step() catch |step_error| switch (step_error) {
error.UnexpectedEnd => return,
};
local_depth = 0;
},
else => return error.UnexpectedToken,
}
}
} }
fn resolve_local(self: *Parser, name: []const u8) ?u8 { fn step(self: *Parser) StepError!void {
var count = @as(u8, self.locals.buffer.len); self.previous_token = self.current_token;
self.current_token = self.tokenizer.next() orelse return error.UnexpectedEnd;
while (count != 0) { @import("std").debug.print("{s}\n", .{self.current_token.text()});
const index = count - 1;
if (coral.io.equals(name, self.locals.buffer[index].name)) return index;
count = index;
}
return null;
} }
}; };
@ -536,17 +725,13 @@ fn SmallStack(comptime Element: type, comptime default: Element) type {
return self.buffer[self.count]; return self.buffer[self.count];
} }
fn push(self: *Self, local: Element) !void { fn push(self: *Self, element: Element) !void {
if (self.count == maximum) return error.OutOfMemory; if (self.count == maximum) return error.OutOfMemory;
self.buffer[self.count] = local; self.buffer[self.count] = element;
self.count += 1; self.count += 1;
} }
}; };
} }
const SymbolTable = coral.table.Hashed(coral.table.string_key, usize); const SymbolTable = coral.table.Hashed(coral.table.string_key, usize);
const SyntaxError = error{
BadSyntax,
};

View File

@ -144,10 +144,7 @@ pub const Vm = struct {
} }
}, },
pub const CompileError = error { pub const CompileError = bytecode.ParseError;
BadSyntax,
OutOfMemory,
};
const HeapAllocation = union(enum) { const HeapAllocation = union(enum) {
next_free: u32, next_free: u32,

View File

@ -33,6 +33,22 @@ pub const Token = union(enum) {
keyword_return, keyword_return,
keyword_self, keyword_self,
pub const ExpectError = error {
UnexpectedToken,
};
pub fn expect(self: Token, tag: coral.io.Tag(Token)) ExpectError!void {
if (self != tag) return error.UnexpectedToken;
}
pub fn expect_any(self: Token, tags: []const coral.io.Tag(Token)) ExpectError!void {
for (tags) |tag| {
if (self == tag) return;
}
return error.UnexpectedToken;
}
pub fn text(self: Token) []const u8 { pub fn text(self: Token) []const u8 {
return switch (self) { return switch (self) {
.unknown => |unknown| @ptrCast([*]const u8, &unknown)[0 .. 1], .unknown => |unknown| @ptrCast([*]const u8, &unknown)[0 .. 1],
@ -64,6 +80,7 @@ pub const Token = union(enum) {
.keyword_false => "false", .keyword_false => "false",
.keyword_true => "true", .keyword_true => "true",
.keyword_return => "return", .keyword_return => "return",
.keyword_self => "self",
}; };
} }
}; };
@ -72,8 +89,12 @@ pub const Tokenizer = struct {
source: []const u8, source: []const u8,
cursor: usize = 0, cursor: usize = 0,
pub fn has_next(self: Tokenizer) bool {
return self.cursor < self.source.len;
}
pub fn next(self: *Tokenizer) ?Token { pub fn next(self: *Tokenizer) ?Token {
while (self.cursor < self.source.len) switch (self.source[self.cursor]) { while (self.has_next()) switch (self.source[self.cursor]) {
' ', '\t' => self.cursor += 1, ' ', '\t' => self.cursor += 1,
'\n' => { '\n' => {
@ -87,13 +108,13 @@ pub const Tokenizer = struct {
self.cursor += 1; self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) { while (self.has_next()) switch (self.source[self.cursor]) {
'0' ... '9' => self.cursor += 1, '0' ... '9' => self.cursor += 1,
'.' => { '.' => {
self.cursor += 1; self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) { while (self.has_next()) switch (self.source[self.cursor]) {
'0' ... '9' => self.cursor += 1, '0' ... '9' => self.cursor += 1,
else => break, else => break,
}; };
@ -136,13 +157,13 @@ pub const Tokenizer = struct {
'@' => { '@' => {
self.cursor += 1; self.cursor += 1;
if (self.cursor < self.source.len) switch (self.source[self.cursor]) { if (self.has_next()) switch (self.source[self.cursor]) {
'A'...'Z', 'a'...'z', '_' => { 'A'...'Z', 'a'...'z', '_' => {
const begin = self.cursor; const begin = self.cursor;
self.cursor += 1; self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) { while (self.has_next()) switch (self.source[self.cursor]) {
'0'...'9', 'A'...'Z', 'a'...'z', '_' => self.cursor += 1, '0'...'9', 'A'...'Z', 'a'...'z', '_' => self.cursor += 1,
else => break, else => break,
}; };
@ -157,7 +178,7 @@ pub const Tokenizer = struct {
self.cursor += 1; self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) { while (self.has_next()) switch (self.source[self.cursor]) {
'"' => break, '"' => break,
else => self.cursor += 1, else => self.cursor += 1,
}; };
@ -180,7 +201,7 @@ pub const Tokenizer = struct {
self.cursor += 1; self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) { while (self.has_next()) switch (self.source[self.cursor]) {
'"' => break, '"' => break,
else => self.cursor += 1, else => self.cursor += 1,
}; };