Clean up statement AST generation logic

This commit is contained in:
kayomn 2023-08-25 22:45:48 +01:00
parent 08a63489b0
commit c4916a4195
3 changed files with 149 additions and 167 deletions

View File

@ -294,32 +294,19 @@ pub const RuntimeEnv = struct {
}, },
.@"if" => |@"if"| { .@"if" => |@"if"| {
try self.compile_expression(chunk, @"if".then_block.condition_expression); try self.compile_expression(chunk, @"if".condition_expression);
try chunk.opcodes.push_one(.{.jf = 0}); try chunk.opcodes.push_one(.{.jf = 0});
const then_origin_index = @as(u32, @intCast(chunk.opcodes.values.len - 1)); const then_origin_index = @as(u32, @intCast(chunk.opcodes.values.len - 1));
for (@"if".then_block.block_statements.values) |block_statement| { for (@"if".block_statements.values) |block_statement| {
try self.compile_statement(chunk, block_statement); try self.compile_statement(chunk, block_statement);
} }
chunk.opcodes.values[then_origin_index].jf = @intCast(chunk.opcodes.values.len - 1); chunk.opcodes.values[then_origin_index].jf = @intCast(chunk.opcodes.values.len - 1);
if (@"if".else_block) |else_block| { if (@"if".else_statement) |else_statement| {
if (else_block.condition_expression) |condition_expression| { try self.compile_statement(chunk, else_statement.*);
try self.compile_expression(chunk, condition_expression);
try chunk.opcodes.push_one(.{.jf = 0});
}
const else_origin_index = @as(u32, @intCast(chunk.opcodes.values.len - 1));
for (else_block.block_statements.values) |block_statement| {
try self.compile_statement(chunk, block_statement);
}
if (else_block.condition_expression != null) {
chunk.opcodes.values[else_origin_index].jf = @intCast(chunk.opcodes.values.len - 1);
}
} }
}, },
@ -352,10 +339,10 @@ pub const RuntimeEnv = struct {
} }
}; };
fn compile(self: *Chunk, statements: *const ast.StatementList) RuntimeError!void { fn compile(self: *Chunk, statements: []const ast.Statement) RuntimeError!void {
var unit = CompilationUnit{}; var unit = CompilationUnit{};
for (statements.values) |statement| { for (statements) |statement| {
try unit.compile_statement(self, statement); try unit.compile_statement(self, statement);
} }
} }
@ -887,12 +874,6 @@ pub const RuntimeEnv = struct {
} }
}, },
} }
// for (self.env.locals.values) |local| {
// self.env.print(if (local) |ref| ref.typename() else "nil");
// }
// self.env.print("------------");
} }
return self.pop_local(); return self.pop_local();

View File

@ -118,9 +118,11 @@ pub const Expression = union (enum) {
invoke: struct { invoke: struct {
object_expression: *Expression, object_expression: *Expression,
argument_expressions: ExpressionList, argument_expressions: List,
}, },
const List = coral.list.Stack(Expression);
const TableLiteral = coral.list.Stack(struct { const TableLiteral = coral.list.Stack(struct {
key_expression: Expression, key_expression: Expression,
value_expression: Expression, value_expression: Expression,
@ -129,8 +131,6 @@ pub const Expression = union (enum) {
const ExpressionBuilder = fn (self: *Tree) ParseError!Expression; const ExpressionBuilder = fn (self: *Tree) ParseError!Expression;
pub const ExpressionList = coral.list.Stack(Expression);
pub const ParseError = error { pub const ParseError = error {
OutOfMemory, OutOfMemory,
BadSyntax, BadSyntax,
@ -140,33 +140,29 @@ pub const Statement = union (enum) {
@"return": ?Expression, @"return": ?Expression,
@"if": struct { @"if": struct {
then_block: ConditionalBlock, condition_expression: Expression,
block_statements: List,
else_block: ?struct { else_statement: ?*Statement,
condition_expression: ?Expression,
block_statements: StatementList,
},
}, },
@"while": ConditionalBlock, @"while": struct {
block: StatementList, condition_expression: Expression,
block_statements: List,
},
block: List,
expression: Expression, expression: Expression,
const ConditionalBlock = struct { const List = coral.list.Stack(Statement);
condition_expression: Expression,
block_statements: StatementList,
};
}; };
pub const StatementList = coral.list.Stack(Statement);
pub const Tree = struct { pub const Tree = struct {
name: []const coral.io.Byte, name: []const coral.io.Byte,
allocator: coral.io.Allocator, allocator: coral.io.Allocator,
arena: coral.arena.Stacking, arena: coral.arena.Stacking,
error_buffer: coral.list.ByteStack, error_buffer: coral.list.ByteStack,
tokenizer: tokens.Tokenizer, tokenizer: tokens.Tokenizer,
parsed_statements: StatementList, parsed_statements: Statement.List,
has_returned: bool, has_returned: bool,
pub fn error_message(self: Tree) []const coral.io.Byte { pub fn error_message(self: Tree) []const coral.io.Byte {
@ -183,7 +179,7 @@ pub const Tree = struct {
return .{ return .{
.arena = coral.arena.Stacking.make(allocator, 4096), .arena = coral.arena.Stacking.make(allocator, 4096),
.error_buffer = coral.list.ByteStack.make(allocator), .error_buffer = coral.list.ByteStack.make(allocator),
.parsed_statements = StatementList.make(allocator), .parsed_statements = Statement.List.make(allocator),
.tokenizer = .{.source = ""}, .tokenizer = .{.source = ""},
.allocator = allocator, .allocator = allocator,
.name = ast_name, .name = ast_name,
@ -191,19 +187,19 @@ pub const Tree = struct {
}; };
} }
pub fn parse(self: *Tree, data: []const coral.io.Byte) ParseError!*const StatementList { pub fn parse(self: *Tree, data: []const coral.io.Byte) ParseError![]const Statement {
self.free();
self.tokenizer = .{.source = data}; self.tokenizer = .{.source = data};
self.has_returned = false; self.has_returned = false;
self.parsed_statements.free(); self.tokenizer.skip_newlines();
self.parsed_statements = try self.parse_statements(); while (self.tokenizer.token != .end) {
try self.parsed_statements.push_one(try self.parse_statement());
if (self.tokenizer.token == .keyword_end) {
return self.report("unexpected `end` without matching `do` block");
} }
return &self.parsed_statements; return self.parsed_statements.values;
} }
const parse_additive = BinaryOperator.builder(parse_equality, &.{ const parse_additive = BinaryOperator.builder(parse_equality, &.{
@ -211,6 +207,74 @@ pub const Tree = struct {
.subtraction, .subtraction,
}); });
fn parse_branch(self: *Tree) ParseError!Statement {
const allocator = self.arena.as_allocator();
defer self.tokenizer.skip_newlines();
self.tokenizer.step();
const condition_expression = try self.parse_expression();
if (self.tokenizer.token != .symbol_colon) {
return self.report("expected `:` after `if` statement condition");
}
var statements = Statement.List.make(allocator);
self.tokenizer.skip_newlines();
while (true) {
switch (self.tokenizer.token) {
.keyword_end => {
return .{
.@"if" = .{
.condition_expression = condition_expression,
.block_statements = statements,
.else_statement = null,
},
};
},
.keyword_else => {
self.tokenizer.step();
if (self.tokenizer.token != .symbol_colon) {
return self.report("expected `:` after `if` statement condition");
}
var else_statements = Statement.List.make(allocator);
self.tokenizer.skip_newlines();
while (self.tokenizer.token != .keyword_end) {
try else_statements.push_one(try self.parse_statement());
}
return .{
.@"if" = .{
.else_statement = try coral.io.allocate_one(allocator, Statement{.block = else_statements}),
.condition_expression = condition_expression,
.block_statements = statements,
},
};
},
.keyword_elif => {
return .{
.@"if" = .{
.else_statement = try coral.io.allocate_one(allocator, try self.parse_branch()),
.condition_expression = condition_expression,
.block_statements = statements,
},
};
},
else => try statements.push_one(try self.parse_statement()),
}
}
}
const parse_comparison = BinaryOperator.builder(parse_term, &.{ const parse_comparison = BinaryOperator.builder(parse_term, &.{
.greater_than_comparison, .greater_than_comparison,
.greater_equals_comparison, .greater_equals_comparison,
@ -493,7 +557,7 @@ pub const Tree = struct {
}, },
.symbol_paren_left => { .symbol_paren_left => {
var argument_expressions = ExpressionList.make(allocator); var argument_expressions = Expression.List.make(allocator);
while (true) { while (true) {
self.tokenizer.skip_newlines(); self.tokenizer.skip_newlines();
@ -533,26 +597,21 @@ pub const Tree = struct {
return expression; return expression;
} }
fn parse_statements(self: *Tree) ParseError!StatementList { fn parse_statement(self: *Tree) ParseError!Statement {
const allocator = self.arena.as_allocator(); const allocator = self.arena.as_allocator();
var statements = StatementList.make(allocator);
self.tokenizer.skip_newlines();
while (true) {
try statements.push_one(parse_statement: {
switch (self.tokenizer.token) { switch (self.tokenizer.token) {
.end, .keyword_end, .keyword_else, .keyword_elif => return statements,
.keyword_return => { .keyword_return => {
defer self.tokenizer.skip_newlines();
if (self.has_returned) { if (self.has_returned) {
return self.report("multiple returns in function scope but expected only one"); return self.report("multiple returns in lambda scope but expected only one");
} }
self.tokenizer.step(); self.tokenizer.step();
if (self.tokenizer.token != .end and self.tokenizer.token != .newline) { if (self.tokenizer.token != .end and self.tokenizer.token != .newline) {
break: parse_statement .{.@"return" = try self.parse_expression()}; return .{.@"return" = try self.parse_expression()};
} }
if (self.tokenizer.token != .end and self.tokenizer.token != .newline) { if (self.tokenizer.token != .end and self.tokenizer.token != .newline) {
@ -561,10 +620,12 @@ pub const Tree = struct {
self.has_returned = true; self.has_returned = true;
break: parse_statement .{.@"return" = null}; return .{.@"return" = null};
}, },
.keyword_while => { .keyword_while => {
defer self.tokenizer.skip_newlines();
self.tokenizer.step(); self.tokenizer.step();
const condition_expression = try self.parse_expression(); const condition_expression = try self.parse_expression();
@ -573,84 +634,24 @@ pub const Tree = struct {
return self.report("expected `:` after `while` statement"); return self.report("expected `:` after `while` statement");
} }
const while_statement = Statement{ var statements = Statement.List.make(allocator);
self.tokenizer.skip_newlines();
while (self.tokenizer.token != .keyword_end) {
try statements.push_one(try self.parse_statement());
}
return .{
.@"while" = .{ .@"while" = .{
.block_statements = try self.parse_statements(), .block_statements = statements,
.condition_expression = condition_expression, .condition_expression = condition_expression,
}, },
}; };
if (self.tokenizer.token != .keyword_end) {
return self.report("expected `end` after block");
}
self.tokenizer.skip_newlines();
break: parse_statement while_statement;
}, },
.keyword_if => { .keyword_if => return self.parse_branch(),
self.tokenizer.step(); else => return .{.expression = try self.parse_expression()},
const then_condition_expression = try self.parse_expression();
if (self.tokenizer.token != .symbol_colon) {
return self.report("expected `:` after `if` statement condition");
}
var if_statement = Statement{
.@"if" = .{
.then_block = .{
.block_statements = try self.parse_statements(),
.condition_expression = then_condition_expression,
},
.else_block = null,
},
};
switch (self.tokenizer.token) {
.keyword_end => {},
.keyword_else => {
self.tokenizer.step();
if (self.tokenizer.token != .symbol_colon) {
return self.report("expected newline after `else` statement");
}
if_statement.@"if".else_block = .{
.condition_expression = null,
.block_statements = try self.parse_statements(),
};
},
.keyword_elif => {
self.tokenizer.step();
const else_condition_expression = try self.parse_expression();
if (self.tokenizer.token != .symbol_colon) {
return self.report("expected newline after `elif` statement condition");
}
if_statement.@"if".else_block = .{
.condition_expression = else_condition_expression,
.block_statements = try self.parse_statements(),
};
},
else => return self.report("expected closing `end`, `elif`, or `else` statement on if block"),
}
self.tokenizer.skip_newlines();
break: parse_statement if_statement;
},
else => break: parse_statement .{.expression = try self.parse_expression()},
}
});
} }
} }

View File

@ -98,7 +98,7 @@ pub const Token = union(enum) {
pub const Tokenizer = struct { pub const Tokenizer = struct {
source: []const coral.io.Byte, source: []const coral.io.Byte,
lines_stepped: usize = 1, lines_stepped: usize = 1,
token: Token = .end, token: Token = .newline,
pub fn skip_newlines(self: *Tokenizer) void { pub fn skip_newlines(self: *Tokenizer) void {
self.step(); self.step();