Implement Control Flow Statements in Kym #37
|
@ -1,4 +1,4 @@
|
|||
const Ast = @import("./kym/Ast.zig");
|
||||
const ast = @import("./kym/ast.zig");
|
||||
|
||||
const coral = @import("coral");
|
||||
|
||||
|
@ -72,7 +72,7 @@ pub const RuntimeEnv = struct {
|
|||
local_identifiers_buffer: [255][]const coral.io.Byte = [_][]const coral.io.Byte{""} ** 255,
|
||||
local_identifiers_count: u8 = 0,
|
||||
|
||||
fn compile_expression(self: *CompilationUnit, chunk: *Chunk, expression: Ast.Expression) RuntimeError!void {
|
||||
fn compile_expression(self: *CompilationUnit, chunk: *Chunk, expression: ast.Expression) RuntimeError!void {
|
||||
const number_format = coral.utf8.DecimalFormat{
|
||||
.delimiter = "_",
|
||||
.positive_prefix = .none,
|
||||
|
@ -258,7 +258,7 @@ pub const RuntimeEnv = struct {
|
|||
}
|
||||
}
|
||||
|
||||
fn compile_statement(self: *CompilationUnit, chunk: *Chunk, statement: Ast.Statement) RuntimeError!void {
|
||||
fn compile_statement(self: *CompilationUnit, chunk: *Chunk, statement: ast.Statement) RuntimeError!void {
|
||||
switch (statement) {
|
||||
.@"return" => |@"return"| {
|
||||
if (@"return".expression) |expression| {
|
||||
|
@ -307,7 +307,7 @@ pub const RuntimeEnv = struct {
|
|||
}
|
||||
};
|
||||
|
||||
fn compile(self: *Chunk, statements: *const Ast.Statement.List) RuntimeError!void {
|
||||
fn compile(self: *Chunk, statements: *const ast.StatementList) RuntimeError!void {
|
||||
var unit = CompilationUnit{};
|
||||
|
||||
for (statements.values) |statement| {
|
||||
|
@ -1092,12 +1092,12 @@ pub const RuntimeEnv = struct {
|
|||
|
||||
defer chunk.free();
|
||||
|
||||
var ast = Ast.make(self.allocator, file_name);
|
||||
var ast_tree = ast.Tree.make(self.allocator, file_name);
|
||||
|
||||
defer ast.free();
|
||||
defer ast_tree.free();
|
||||
|
||||
try chunk.compile(ast.parse(file_data) catch |parse_error| return switch (parse_error) {
|
||||
error.BadSyntax => self.raise(error.BadSyntax, ast.error_message()),
|
||||
try chunk.compile(ast_tree.parse(file_data) catch |parse_error| return switch (parse_error) {
|
||||
error.BadSyntax => self.raise(error.BadSyntax, ast_tree.error_message()),
|
||||
error.OutOfMemory => error.OutOfMemory,
|
||||
});
|
||||
|
||||
|
|
|
@ -1,598 +0,0 @@
|
|||
const coral = @import("coral");
|
||||
|
||||
const tokens = @import("./tokens.zig");
|
||||
|
||||
name: []const coral.io.Byte,
|
||||
allocator: coral.io.Allocator,
|
||||
arena: coral.arena.Stacking,
|
||||
error_buffer: coral.list.ByteStack,
|
||||
tokenizer: tokens.Tokenizer,
|
||||
parsed_statements: Statement.List,
|
||||
has_returned: bool,
|
||||
|
||||
pub const Expression = union (enum) {
|
||||
nil_literal,
|
||||
true_literal,
|
||||
false_literal,
|
||||
builtin: []const coral.io.Byte,
|
||||
number_literal: []const coral.io.Byte,
|
||||
string_literal: []const coral.io.Byte,
|
||||
symbol_literal: []const coral.io.Byte,
|
||||
table_literal: TableLiteral,
|
||||
grouped_expression: *Expression,
|
||||
local_get: []const coral.io.Byte,
|
||||
local_set: []const coral.io.Byte,
|
||||
|
||||
field_get: struct {
|
||||
object_expression: *Expression,
|
||||
identifier: []const coral.io.Byte,
|
||||
},
|
||||
|
||||
field_set: struct {
|
||||
object_expression: *Expression,
|
||||
identifier: []const coral.io.Byte,
|
||||
value_expression: *Expression,
|
||||
},
|
||||
|
||||
subscript_get: struct {
|
||||
object_expression: *Expression,
|
||||
subscript_expression: *Expression,
|
||||
},
|
||||
|
||||
subscript_set: struct {
|
||||
object_expression: *Expression,
|
||||
subscript_expression: *Expression,
|
||||
value_expression: *Expression,
|
||||
},
|
||||
|
||||
binary_operation: struct {
|
||||
operator: BinaryOperator,
|
||||
lhs_expression: *Expression,
|
||||
rhs_expression: *Expression,
|
||||
},
|
||||
|
||||
unary_operation: struct {
|
||||
operator: UnaryOperator,
|
||||
expression: *Expression,
|
||||
},
|
||||
|
||||
invoke: struct {
|
||||
object_expression: *Expression,
|
||||
argument_expressions: List,
|
||||
},
|
||||
|
||||
pub const BinaryOperator = enum {
|
||||
addition,
|
||||
subtraction,
|
||||
multiplication,
|
||||
divsion,
|
||||
equals_comparison,
|
||||
greater_than_comparison,
|
||||
greater_equals_comparison,
|
||||
less_than_comparison,
|
||||
less_equals_comparison,
|
||||
|
||||
fn token(self: BinaryOperator) tokens.Token {
|
||||
return switch (self) {
|
||||
.addition => .symbol_plus,
|
||||
.subtraction => .symbol_minus,
|
||||
.multiplication => .symbol_asterisk,
|
||||
.divsion => .symbol_forward_slash,
|
||||
.equals_comparison => .symbol_double_equals,
|
||||
.greater_than_comparison => .symbol_greater_than,
|
||||
.greater_equals_comparison => .symbol_greater_equals,
|
||||
.less_than_comparison => .symbol_less_than,
|
||||
.less_equals_comparison => .symbol_less_equals,
|
||||
};
|
||||
}
|
||||
};
|
||||
|
||||
pub const TableLiteral = coral.list.Stack(struct {
|
||||
key_expression: Expression,
|
||||
value_expression: Expression,
|
||||
});
|
||||
|
||||
pub const List = coral.list.Stack(Expression);
|
||||
};
|
||||
|
||||
const ExpressionParser = fn (self: *Self) ParseError!Expression;
|
||||
|
||||
pub const ParseError = error {
|
||||
OutOfMemory,
|
||||
BadSyntax,
|
||||
};
|
||||
|
||||
const Self = @This();
|
||||
|
||||
pub const Statement = union (enum) {
|
||||
@"return": struct {
|
||||
expression: ?Expression,
|
||||
},
|
||||
|
||||
@"if": struct {
|
||||
condition_expression: Expression,
|
||||
block_statements: List,
|
||||
},
|
||||
|
||||
expression: Expression,
|
||||
|
||||
pub const List = coral.list.Stack(Statement);
|
||||
};
|
||||
|
||||
const UnaryOperator = enum {
|
||||
boolean_negation,
|
||||
numeric_negation,
|
||||
};
|
||||
|
||||
fn binary_operation_parser(
|
||||
comptime parse_next: ExpressionParser,
|
||||
comptime operators: []const Expression.BinaryOperator) ExpressionParser {
|
||||
|
||||
const BinaryOperationParser = struct {
|
||||
fn parse(self: *Self) ParseError!Expression {
|
||||
const allocator = self.arena.as_allocator();
|
||||
var expression = try parse_next(self);
|
||||
|
||||
inline for (operators) |operator| {
|
||||
const token = comptime operator.token();
|
||||
|
||||
if (self.tokenizer.token == coral.io.tag_of(token)) {
|
||||
self.tokenizer.step();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report("expected other half of expression after `" ++ comptime token.text() ++ "`");
|
||||
}
|
||||
|
||||
expression = .{
|
||||
.binary_operation = .{
|
||||
.operator = operator,
|
||||
.lhs_expression = try coral.io.allocate_one(allocator, expression),
|
||||
.rhs_expression = try coral.io.allocate_one(allocator, try parse_next(self)),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
};
|
||||
|
||||
return BinaryOperationParser.parse;
|
||||
}
|
||||
|
||||
pub fn error_message(self: Self) []const coral.io.Byte {
|
||||
return self.error_buffer.values;
|
||||
}
|
||||
|
||||
pub fn free(self: *Self) void {
|
||||
self.parsed_statements.free();
|
||||
self.error_buffer.free();
|
||||
self.arena.free();
|
||||
}
|
||||
|
||||
pub fn make(allocator: coral.io.Allocator, ast_name: []const coral.io.Byte) Self {
|
||||
return Self{
|
||||
.arena = coral.arena.Stacking.make(allocator, 4096),
|
||||
.error_buffer = coral.list.ByteStack.make(allocator),
|
||||
.parsed_statements = Statement.List.make(allocator),
|
||||
.tokenizer = .{.source = ""},
|
||||
.allocator = allocator,
|
||||
.name = ast_name,
|
||||
.has_returned = false,
|
||||
};
|
||||
}
|
||||
|
||||
fn report(self: *Self, message: []const coral.io.Byte) ParseError {
|
||||
coral.utf8.print_formatted(coral.list.stack_as_writer(&self.error_buffer), "{name}@{line}: {message}", .{
|
||||
.name = self.name,
|
||||
.line = self.tokenizer.lines_stepped,
|
||||
.message = message,
|
||||
}) catch return error.OutOfMemory;
|
||||
|
||||
return error.BadSyntax;
|
||||
}
|
||||
|
||||
pub fn parse(self: *Self, data: []const coral.io.Byte) ParseError!*const Statement.List {
|
||||
self.tokenizer = .{.source = data};
|
||||
self.has_returned = false;
|
||||
|
||||
self.parsed_statements.free();
|
||||
|
||||
self.parsed_statements = try self.parse_statements();
|
||||
|
||||
if (self.tokenizer.token == .keyword_end) {
|
||||
return self.report("unexpected `end` without matching `do` block");
|
||||
}
|
||||
|
||||
return &self.parsed_statements;
|
||||
}
|
||||
|
||||
const parse_additive = binary_operation_parser(parse_equality, &.{
|
||||
.addition,
|
||||
.subtraction,
|
||||
});
|
||||
|
||||
const parse_comparison = binary_operation_parser(parse_term, &.{
|
||||
.greater_than_comparison,
|
||||
.greater_equals_comparison,
|
||||
.less_than_comparison,
|
||||
.less_equals_comparison
|
||||
});
|
||||
|
||||
const parse_equality = binary_operation_parser(parse_comparison, &.{
|
||||
.equals_comparison,
|
||||
});
|
||||
|
||||
pub fn parse_expression(self: *Self) ParseError!Expression {
|
||||
const allocator = self.arena.as_allocator();
|
||||
const expression = try parse_additive(self);
|
||||
|
||||
if (self.tokenizer.token == .symbol_equals) {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report("expected assignment after `=`");
|
||||
}
|
||||
|
||||
return switch (expression) {
|
||||
.local_get => |local_get| .{.local_set = local_get},
|
||||
|
||||
.field_get => |field_get| .{
|
||||
.field_set = .{
|
||||
.object_expression = field_get.object_expression,
|
||||
.identifier = field_get.identifier,
|
||||
.value_expression = try coral.io.allocate_one(allocator, try self.parse_expression()),
|
||||
},
|
||||
},
|
||||
|
||||
.subscript_get => |subscript_get| .{
|
||||
.subscript_set = .{
|
||||
.object_expression = subscript_get.object_expression,
|
||||
.subscript_expression = subscript_get.subscript_expression,
|
||||
.value_expression = try coral.io.allocate_one(allocator, try self.parse_expression()),
|
||||
},
|
||||
},
|
||||
|
||||
else => self.report("expected local or field on left-hand side of expression"),
|
||||
};
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
fn parse_factor(self: *Self) ParseError!Expression {
|
||||
const allocator = self.arena.as_allocator();
|
||||
|
||||
var expression = @as(Expression, parse: {
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_paren_left => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report("expected an expression after `(`");
|
||||
}
|
||||
|
||||
const expression = try self.parse_expression();
|
||||
|
||||
if (self.tokenizer.token != .symbol_paren_right) {
|
||||
return self.report("expected a closing `)` after expression");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.grouped_expression = try coral.io.allocate_one(allocator, expression)};
|
||||
},
|
||||
|
||||
.keyword_nil => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .nil_literal;
|
||||
},
|
||||
|
||||
.keyword_true => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .true_literal;
|
||||
},
|
||||
|
||||
.keyword_false => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .false_literal;
|
||||
},
|
||||
|
||||
.number => |value| {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.number_literal = value};
|
||||
},
|
||||
|
||||
.string => |value| {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.string_literal = value};
|
||||
},
|
||||
|
||||
.identifier => |identifier| {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.local_get = identifier};
|
||||
},
|
||||
|
||||
.builtin => |builtin| {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.builtin = builtin};
|
||||
},
|
||||
|
||||
.symbol_brace_left => {
|
||||
var table_literal = Expression.TableLiteral.make(allocator);
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
while (true) {
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_brace_right => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.table_literal = table_literal};
|
||||
},
|
||||
|
||||
.symbol_period => {
|
||||
self.tokenizer.step();
|
||||
|
||||
const identifier = switch (self.tokenizer.token) {
|
||||
.identifier => |identifier| identifier,
|
||||
else => return self.report("expected identifier after `.`"),
|
||||
};
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token != .symbol_equals) {
|
||||
return self.report("expected `=` after symbol");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
try table_literal.push_one(.{
|
||||
.value_expression = try self.parse_expression(),
|
||||
.key_expression = .{.symbol_literal = identifier},
|
||||
});
|
||||
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_comma => self.tokenizer.skip_newlines(),
|
||||
|
||||
.symbol_brace_right => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.table_literal = table_literal};
|
||||
},
|
||||
|
||||
else => return self.report("expected `,` or `}` after expression"),
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_bracket_left => {
|
||||
self.tokenizer.step();
|
||||
|
||||
const subscript_expression = try self.parse_expression();
|
||||
|
||||
if (self.tokenizer.token != .symbol_bracket_right) {
|
||||
return self.report("expected `]` after subscript expression");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token != .symbol_equals) {
|
||||
return self.report("expected `=` after `]`");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
try table_literal.push_one(.{
|
||||
.value_expression = try self.parse_expression(),
|
||||
.key_expression = subscript_expression,
|
||||
});
|
||||
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_comma => self.tokenizer.skip_newlines(),
|
||||
|
||||
.symbol_brace_right => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.table_literal = table_literal};
|
||||
},
|
||||
|
||||
else => return self.report("expected `,` or `}` after expression"),
|
||||
}
|
||||
},
|
||||
|
||||
else => return self.report("expected `}` or fields in table literal"),
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_minus => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report("expected expression after numeric negation (`-`)");
|
||||
}
|
||||
|
||||
break: parse .{
|
||||
.unary_operation = .{
|
||||
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
||||
.operator = .numeric_negation,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
.symbol_bang => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report("expected expression after boolean negation (`!`)");
|
||||
}
|
||||
|
||||
break: parse .{
|
||||
.unary_operation = .{
|
||||
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
||||
.operator = .boolean_negation,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
else => return self.report("unexpected token in expression"),
|
||||
}
|
||||
});
|
||||
|
||||
while (true) {
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_period => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
|
||||
const unnecessary_temp = try coral.io.allocate_one(allocator, expression);
|
||||
|
||||
expression = .{
|
||||
.field_get = .{
|
||||
.identifier = switch (self.tokenizer.token) {
|
||||
.identifier => |field_identifier| field_identifier,
|
||||
else => return self.report("expected identifier after `.`"),
|
||||
},
|
||||
|
||||
.object_expression = unnecessary_temp,
|
||||
},
|
||||
};
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
},
|
||||
|
||||
.symbol_bracket_left => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
|
||||
const unnecessary_temp = try coral.io.allocate_one(allocator, expression);
|
||||
|
||||
expression = .{
|
||||
.subscript_get = .{
|
||||
.subscript_expression = try coral.io.allocate_one(allocator, try self.parse_expression()),
|
||||
.object_expression = unnecessary_temp,
|
||||
},
|
||||
};
|
||||
|
||||
if (self.tokenizer.token != .symbol_bracket_right) {
|
||||
return self.report("expected `]` subscript expression");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
},
|
||||
|
||||
.symbol_paren_left => {
|
||||
var argument_expressions = Expression.List.make(allocator);
|
||||
|
||||
while (true) {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_paren_right => break,
|
||||
|
||||
else => {
|
||||
try argument_expressions.push_one(try self.parse_expression());
|
||||
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_comma => continue,
|
||||
.symbol_paren_right => break,
|
||||
else => return self.report("expected `,` or `)` after function argument expression"),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
|
||||
const unnecessary_temp = try coral.io.allocate_one(allocator, expression);
|
||||
|
||||
expression = .{
|
||||
.invoke = .{
|
||||
.argument_expressions = argument_expressions,
|
||||
.object_expression = unnecessary_temp,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
fn parse_statements(self: *Self) ParseError!Statement.List {
|
||||
var statements = Statement.List.make(self.arena.as_allocator());
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
while (true) {
|
||||
try statements.push_one(parse_statement: {
|
||||
switch (self.tokenizer.token) {
|
||||
.end, .keyword_end => return statements,
|
||||
|
||||
.keyword_return => {
|
||||
if (self.has_returned) {
|
||||
return self.report("multiple returns in function scope but expected only one");
|
||||
}
|
||||
|
||||
self.tokenizer.step();
|
||||
|
||||
if (self.tokenizer.token != .end and self.tokenizer.token != .newline) {
|
||||
break: parse_statement .{.@"return" = .{.expression = try self.parse_expression()}};
|
||||
}
|
||||
|
||||
if (self.tokenizer.token != .end and self.tokenizer.token != .newline) {
|
||||
return self.report("expected end or newline after return statement");
|
||||
}
|
||||
|
||||
self.has_returned = true;
|
||||
|
||||
break: parse_statement .{.@"return" = .{.expression = null}};
|
||||
},
|
||||
|
||||
.keyword_if => {
|
||||
self.tokenizer.step();
|
||||
|
||||
const condition_expression = try self.parse_expression();
|
||||
|
||||
if (self.tokenizer.token != .keyword_do) {
|
||||
return self.report("expected `do` block after if statement");
|
||||
}
|
||||
|
||||
self.tokenizer.step();
|
||||
|
||||
const if_statement = Statement{
|
||||
.@"if" = .{
|
||||
.block_statements = try self.parse_statements(),
|
||||
.condition_expression = condition_expression,
|
||||
},
|
||||
};
|
||||
|
||||
if (self.tokenizer.token != .keyword_end) {
|
||||
return self.report("expected `end` after block");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse_statement if_statement;
|
||||
},
|
||||
|
||||
else => break: parse_statement .{.expression = try self.parse_expression()},
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const parse_term = binary_operation_parser(parse_factor, &.{
|
||||
.multiplication,
|
||||
.divsion,
|
||||
});
|
|
@ -0,0 +1,594 @@
|
|||
const coral = @import("coral");
|
||||
|
||||
const tokens = @import("./tokens.zig");
|
||||
|
||||
pub const BinaryOperator = enum {
|
||||
addition,
|
||||
subtraction,
|
||||
multiplication,
|
||||
divsion,
|
||||
equals_comparison,
|
||||
greater_than_comparison,
|
||||
greater_equals_comparison,
|
||||
less_than_comparison,
|
||||
less_equals_comparison,
|
||||
|
||||
fn builder(comptime build_next: ExpressionBuilder, comptime operators: []const BinaryOperator) ExpressionBuilder {
|
||||
const Builder = struct {
|
||||
fn build(self: *Tree) ParseError!Expression {
|
||||
const allocator = self.arena.as_allocator();
|
||||
var expression = try build_next(self);
|
||||
|
||||
inline for (operators) |operator| {
|
||||
const token = @as(tokens.Token, switch (operator) {
|
||||
.addition => .symbol_plus,
|
||||
.subtraction => .symbol_minus,
|
||||
.multiplication => .symbol_asterisk,
|
||||
.divsion => .symbol_forward_slash,
|
||||
.equals_comparison => .symbol_double_equals,
|
||||
.greater_than_comparison => .symbol_greater_than,
|
||||
.greater_equals_comparison => .symbol_greater_equals,
|
||||
.less_than_comparison => .symbol_less_than,
|
||||
.less_equals_comparison => .symbol_less_equals,
|
||||
});
|
||||
|
||||
if (self.tokenizer.token == coral.io.tag_of(token)) {
|
||||
self.tokenizer.step();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report(
|
||||
"expected other half of expression after `" ++
|
||||
comptime token.text() ++
|
||||
"`");
|
||||
}
|
||||
|
||||
expression = .{
|
||||
.binary_operation = .{
|
||||
.operator = operator,
|
||||
.lhs_expression = try coral.io.allocate_one(allocator, expression),
|
||||
.rhs_expression = try coral.io.allocate_one(allocator, try build_next(self)),
|
||||
},
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
};
|
||||
|
||||
return Builder.build;
|
||||
}
|
||||
};
|
||||
|
||||
pub const Expression = union (enum) {
|
||||
nil_literal,
|
||||
true_literal,
|
||||
false_literal,
|
||||
builtin: []const coral.io.Byte,
|
||||
number_literal: []const coral.io.Byte,
|
||||
string_literal: []const coral.io.Byte,
|
||||
symbol_literal: []const coral.io.Byte,
|
||||
table_literal: TableLiteral,
|
||||
grouped_expression: *Expression,
|
||||
local_get: []const coral.io.Byte,
|
||||
local_set: []const coral.io.Byte,
|
||||
|
||||
field_get: struct {
|
||||
object_expression: *Expression,
|
||||
identifier: []const coral.io.Byte,
|
||||
},
|
||||
|
||||
field_set: struct {
|
||||
object_expression: *Expression,
|
||||
identifier: []const coral.io.Byte,
|
||||
value_expression: *Expression,
|
||||
},
|
||||
|
||||
subscript_get: struct {
|
||||
object_expression: *Expression,
|
||||
subscript_expression: *Expression,
|
||||
},
|
||||
|
||||
subscript_set: struct {
|
||||
object_expression: *Expression,
|
||||
subscript_expression: *Expression,
|
||||
value_expression: *Expression,
|
||||
},
|
||||
|
||||
binary_operation: struct {
|
||||
operator: BinaryOperator,
|
||||
lhs_expression: *Expression,
|
||||
rhs_expression: *Expression,
|
||||
},
|
||||
|
||||
unary_operation: struct {
|
||||
operator: UnaryOperator,
|
||||
expression: *Expression,
|
||||
},
|
||||
|
||||
invoke: struct {
|
||||
object_expression: *Expression,
|
||||
argument_expressions: ExpressionList,
|
||||
},
|
||||
|
||||
pub const TableLiteral = coral.list.Stack(struct {
|
||||
key_expression: Expression,
|
||||
value_expression: Expression,
|
||||
});
|
||||
};
|
||||
|
||||
const ExpressionBuilder = fn (self: *Tree) ParseError!Expression;
|
||||
|
||||
pub const ExpressionList = coral.list.Stack(Expression);
|
||||
|
||||
pub const ParseError = error {
|
||||
OutOfMemory,
|
||||
BadSyntax,
|
||||
};
|
||||
|
||||
pub const Statement = union (enum) {
|
||||
@"return": struct {
|
||||
expression: ?Expression,
|
||||
},
|
||||
|
||||
@"if": struct {
|
||||
condition_expression: Expression,
|
||||
block_statements: StatementList,
|
||||
},
|
||||
|
||||
expression: Expression,
|
||||
};
|
||||
|
||||
pub const StatementList = coral.list.Stack(Statement);
|
||||
|
||||
pub const Tree = struct {
|
||||
name: []const coral.io.Byte,
|
||||
allocator: coral.io.Allocator,
|
||||
arena: coral.arena.Stacking,
|
||||
error_buffer: coral.list.ByteStack,
|
||||
tokenizer: tokens.Tokenizer,
|
||||
parsed_statements: StatementList,
|
||||
has_returned: bool,
|
||||
|
||||
pub fn error_message(self: Tree) []const coral.io.Byte {
|
||||
return self.error_buffer.values;
|
||||
}
|
||||
|
||||
pub fn free(self: *Tree) void {
|
||||
self.parsed_statements.free();
|
||||
self.error_buffer.free();
|
||||
self.arena.free();
|
||||
}
|
||||
|
||||
pub fn make(allocator: coral.io.Allocator, ast_name: []const coral.io.Byte) Tree {
|
||||
return .{
|
||||
.arena = coral.arena.Stacking.make(allocator, 4096),
|
||||
.error_buffer = coral.list.ByteStack.make(allocator),
|
||||
.parsed_statements = StatementList.make(allocator),
|
||||
.tokenizer = .{.source = ""},
|
||||
.allocator = allocator,
|
||||
.name = ast_name,
|
||||
.has_returned = false,
|
||||
};
|
||||
}
|
||||
|
||||
pub fn parse(self: *Tree, data: []const coral.io.Byte) ParseError!*const StatementList {
|
||||
self.tokenizer = .{.source = data};
|
||||
self.has_returned = false;
|
||||
|
||||
self.parsed_statements.free();
|
||||
|
||||
self.parsed_statements = try self.parse_statements();
|
||||
|
||||
if (self.tokenizer.token == .keyword_end) {
|
||||
return self.report("unexpected `end` without matching `do` block");
|
||||
}
|
||||
|
||||
return &self.parsed_statements;
|
||||
}
|
||||
|
||||
const parse_additive = BinaryOperator.builder(parse_equality, &.{
|
||||
.addition,
|
||||
.subtraction,
|
||||
});
|
||||
|
||||
const parse_comparison = BinaryOperator.builder(parse_term, &.{
|
||||
.greater_than_comparison,
|
||||
.greater_equals_comparison,
|
||||
.less_than_comparison,
|
||||
.less_equals_comparison
|
||||
});
|
||||
|
||||
const parse_equality = BinaryOperator.builder(parse_comparison, &.{
|
||||
.equals_comparison,
|
||||
});
|
||||
|
||||
pub fn parse_expression(self: *Tree) ParseError!Expression {
|
||||
const allocator = self.arena.as_allocator();
|
||||
const expression = try self.parse_additive();
|
||||
|
||||
if (self.tokenizer.token == .symbol_equals) {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report("expected assignment after `=`");
|
||||
}
|
||||
|
||||
return switch (expression) {
|
||||
.local_get => |local_get| .{.local_set = local_get},
|
||||
|
||||
.field_get => |field_get| .{
|
||||
.field_set = .{
|
||||
.object_expression = field_get.object_expression,
|
||||
.identifier = field_get.identifier,
|
||||
.value_expression = try coral.io.allocate_one(allocator, try self.parse_expression()),
|
||||
},
|
||||
},
|
||||
|
||||
.subscript_get => |subscript_get| .{
|
||||
.subscript_set = .{
|
||||
.object_expression = subscript_get.object_expression,
|
||||
.subscript_expression = subscript_get.subscript_expression,
|
||||
.value_expression = try coral.io.allocate_one(allocator, try self.parse_expression()),
|
||||
},
|
||||
},
|
||||
|
||||
else => self.report("expected local or field on left-hand side of expression"),
|
||||
};
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
fn parse_factor(self: *Tree) ParseError!Expression {
|
||||
const allocator = self.arena.as_allocator();
|
||||
|
||||
var expression = @as(Expression, parse: {
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_paren_left => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report("expected an expression after `(`");
|
||||
}
|
||||
|
||||
const expression = try self.parse_expression();
|
||||
|
||||
if (self.tokenizer.token != .symbol_paren_right) {
|
||||
return self.report("expected a closing `)` after expression");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.grouped_expression = try coral.io.allocate_one(allocator, expression)};
|
||||
},
|
||||
|
||||
.keyword_nil => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .nil_literal;
|
||||
},
|
||||
|
||||
.keyword_true => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .true_literal;
|
||||
},
|
||||
|
||||
.keyword_false => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .false_literal;
|
||||
},
|
||||
|
||||
.number => |value| {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.number_literal = value};
|
||||
},
|
||||
|
||||
.string => |value| {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.string_literal = value};
|
||||
},
|
||||
|
||||
.identifier => |identifier| {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.local_get = identifier};
|
||||
},
|
||||
|
||||
.builtin => |builtin| {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.builtin = builtin};
|
||||
},
|
||||
|
||||
.symbol_brace_left => {
|
||||
var table_literal = Expression.TableLiteral.make(allocator);
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
while (true) {
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_brace_right => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.table_literal = table_literal};
|
||||
},
|
||||
|
||||
.symbol_period => {
|
||||
self.tokenizer.step();
|
||||
|
||||
const identifier = switch (self.tokenizer.token) {
|
||||
.identifier => |identifier| identifier,
|
||||
else => return self.report("expected identifier after `.`"),
|
||||
};
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token != .symbol_equals) {
|
||||
return self.report("expected `=` after symbol");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
try table_literal.push_one(.{
|
||||
.value_expression = try self.parse_expression(),
|
||||
.key_expression = .{.symbol_literal = identifier},
|
||||
});
|
||||
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_comma => self.tokenizer.skip_newlines(),
|
||||
|
||||
.symbol_brace_right => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.table_literal = table_literal};
|
||||
},
|
||||
|
||||
else => return self.report("expected `,` or `}` after expression"),
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_bracket_left => {
|
||||
self.tokenizer.step();
|
||||
|
||||
const subscript_expression = try self.parse_expression();
|
||||
|
||||
if (self.tokenizer.token != .symbol_bracket_right) {
|
||||
return self.report("expected `]` after subscript expression");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token != .symbol_equals) {
|
||||
return self.report("expected `=` after `]`");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
try table_literal.push_one(.{
|
||||
.value_expression = try self.parse_expression(),
|
||||
.key_expression = subscript_expression,
|
||||
});
|
||||
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_comma => self.tokenizer.skip_newlines(),
|
||||
|
||||
.symbol_brace_right => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse .{.table_literal = table_literal};
|
||||
},
|
||||
|
||||
else => return self.report("expected `,` or `}` after expression"),
|
||||
}
|
||||
},
|
||||
|
||||
else => return self.report("expected `}` or fields in table literal"),
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
.symbol_minus => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report("expected expression after numeric negation (`-`)");
|
||||
}
|
||||
|
||||
break: parse .{
|
||||
.unary_operation = .{
|
||||
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
||||
.operator = .numeric_negation,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
.symbol_bang => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
if (self.tokenizer.token == .end) {
|
||||
return self.report("expected expression after boolean negation (`!`)");
|
||||
}
|
||||
|
||||
break: parse .{
|
||||
.unary_operation = .{
|
||||
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
||||
.operator = .boolean_negation,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
else => return self.report("unexpected token in expression"),
|
||||
}
|
||||
});
|
||||
|
||||
while (true) {
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_period => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
|
||||
const unnecessary_temp = try coral.io.allocate_one(allocator, expression);
|
||||
|
||||
expression = .{
|
||||
.field_get = .{
|
||||
.identifier = switch (self.tokenizer.token) {
|
||||
.identifier => |field_identifier| field_identifier,
|
||||
else => return self.report("expected identifier after `.`"),
|
||||
},
|
||||
|
||||
.object_expression = unnecessary_temp,
|
||||
},
|
||||
};
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
},
|
||||
|
||||
.symbol_bracket_left => {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
|
||||
const unnecessary_temp = try coral.io.allocate_one(allocator, expression);
|
||||
|
||||
expression = .{
|
||||
.subscript_get = .{
|
||||
.subscript_expression = try coral.io.allocate_one(allocator, try self.parse_expression()),
|
||||
.object_expression = unnecessary_temp,
|
||||
},
|
||||
};
|
||||
|
||||
if (self.tokenizer.token != .symbol_bracket_right) {
|
||||
return self.report("expected `]` subscript expression");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
},
|
||||
|
||||
.symbol_paren_left => {
|
||||
var argument_expressions = ExpressionList.make(allocator);
|
||||
|
||||
while (true) {
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_paren_right => break,
|
||||
|
||||
else => {
|
||||
try argument_expressions.push_one(try self.parse_expression());
|
||||
|
||||
switch (self.tokenizer.token) {
|
||||
.symbol_comma => continue,
|
||||
.symbol_paren_right => break,
|
||||
else => return self.report("expected `,` or `)` after function argument expression"),
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
// TODO: Remove when Zig fixes miscompilation with in-place struct re-assignment.
|
||||
const unnecessary_temp = try coral.io.allocate_one(allocator, expression);
|
||||
|
||||
expression = .{
|
||||
.invoke = .{
|
||||
.argument_expressions = argument_expressions,
|
||||
.object_expression = unnecessary_temp,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
else => break,
|
||||
}
|
||||
}
|
||||
|
||||
return expression;
|
||||
}
|
||||
|
||||
fn parse_statements(self: *Tree) ParseError!StatementList {
|
||||
var statements = StatementList.make(self.arena.as_allocator());
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
while (true) {
|
||||
try statements.push_one(parse_statement: {
|
||||
switch (self.tokenizer.token) {
|
||||
.end, .keyword_end => return statements,
|
||||
|
||||
.keyword_return => {
|
||||
if (self.has_returned) {
|
||||
return self.report("multiple returns in function scope but expected only one");
|
||||
}
|
||||
|
||||
self.tokenizer.step();
|
||||
|
||||
if (self.tokenizer.token != .end and self.tokenizer.token != .newline) {
|
||||
break: parse_statement .{.@"return" = .{.expression = try self.parse_expression()}};
|
||||
}
|
||||
|
||||
if (self.tokenizer.token != .end and self.tokenizer.token != .newline) {
|
||||
return self.report("expected end or newline after return statement");
|
||||
}
|
||||
|
||||
self.has_returned = true;
|
||||
|
||||
break: parse_statement .{.@"return" = .{.expression = null}};
|
||||
},
|
||||
|
||||
.keyword_if => {
|
||||
self.tokenizer.step();
|
||||
|
||||
const condition_expression = try self.parse_expression();
|
||||
|
||||
if (self.tokenizer.token != .keyword_do) {
|
||||
return self.report("expected `do` block after if statement");
|
||||
}
|
||||
|
||||
self.tokenizer.step();
|
||||
|
||||
const if_statement = Statement{
|
||||
.@"if" = .{
|
||||
.block_statements = try self.parse_statements(),
|
||||
.condition_expression = condition_expression,
|
||||
},
|
||||
};
|
||||
|
||||
if (self.tokenizer.token != .keyword_end) {
|
||||
return self.report("expected `end` after block");
|
||||
}
|
||||
|
||||
self.tokenizer.skip_newlines();
|
||||
|
||||
break: parse_statement if_statement;
|
||||
},
|
||||
|
||||
else => break: parse_statement .{.expression = try self.parse_expression()},
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
const parse_term = BinaryOperator.builder(parse_factor, &.{
|
||||
.multiplication,
|
||||
.divsion,
|
||||
});
|
||||
|
||||
fn report(self: *Tree, message: []const coral.io.Byte) ParseError {
|
||||
coral.utf8.print_formatted(coral.list.stack_as_writer(&self.error_buffer), "{name}@{line}: {message}", .{
|
||||
.name = self.name,
|
||||
.line = self.tokenizer.lines_stepped,
|
||||
.message = message,
|
||||
}) catch return error.OutOfMemory;
|
||||
|
||||
return error.BadSyntax;
|
||||
}
|
||||
};
|
||||
|
||||
pub const UnaryOperator = enum {
|
||||
boolean_negation,
|
||||
numeric_negation,
|
||||
};
|
Loading…
Reference in New Issue