Improve Syntax Error Messages and Fix Comments #29

Merged
kayomn merged 4 commits from kym-script-comments into main 2023-07-22 16:08:04 +02:00
7 changed files with 135 additions and 111 deletions

View File

@ -1,4 +1,5 @@
# Test comment.
@log_info("game is loading")
return {

View File

@ -118,5 +118,7 @@ pub fn stack_as_writer(self: *ByteStack) io.Writer {
}
fn write_stack(stack: *ByteStack, bytes: []const io.Byte) ?usize {
return stack.push_all(bytes) catch null;
stack.push_all(bytes) catch return null;
return bytes.len;
}

View File

@ -30,7 +30,9 @@ pub fn max_int(comptime int: std.builtin.Type.Int) comptime_int {
}
pub fn min_int(comptime int: std.builtin.Type.Int) comptime_int {
if (int.signedness == .unsigned) return 0;
if (int.signedness == .unsigned) {
return 0;
}
const bit_count = int.bits;

View File

@ -8,8 +8,6 @@ const coral = @import("coral");
const file = @import("./file.zig");
const tokens = @import("./kym/tokens.zig");
pub const Any = union (enum) {
nil,
boolean: bool,
@ -200,18 +198,14 @@ pub const RuntimeEnv = struct {
name: []const coral.io.Byte,
data: []const coral.io.Byte,
) RuntimeError!?*RuntimeRef {
var ast = Ast.make(self.allocator);
var ast = Ast.make(self.allocator, name);
defer ast.free();
{
var tokenizer = tokens.Tokenizer{.source = data};
ast.parse(&tokenizer) catch |parse_error| switch (parse_error) {
error.BadSyntax => return self.raise(error.BadSyntax, ast.error_message),
ast.parse(data) catch |parse_error| switch (parse_error) {
error.BadSyntax => return self.raise(error.BadSyntax, ast.error_message()),
error.OutOfMemory => return error.OutOfMemory,
};
kayomn marked this conversation as resolved Outdated

There's no benefit to handling the tokenizer as external state being passed to the AST. It may as well just be part of the AST state internally.

There's no benefit to handling the tokenizer as external state being passed to the AST. It may as well just be part of the AST state internally.
}
var chunk = Chunk.make(self);

View File

@ -2,10 +2,12 @@ const coral = @import("coral");
const tokens = @import("./tokens.zig");
name: []const coral.io.Byte,
allocator: coral.io.Allocator,
arena: coral.arena.Stacking,
statements: Statement.List,
error_message: []const coral.io.Byte,
error_buffer: coral.list.ByteStack,
tokenizer: tokens.Tokenizer,
pub const Expression = union (enum) {
nil_literal,
@ -67,7 +69,7 @@ pub const Expression = union (enum) {
pub const List = coral.list.Stack(Expression);
};
const ExpressionParser = fn (self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression;
const ExpressionParser = fn (self: *Self) ParseError!Expression;
pub const ParseError = error {
OutOfMemory,
@ -103,25 +105,25 @@ fn binary_operation_parser(
comptime operators: []const Expression.BinaryOperator) ExpressionParser {
const BinaryOperationParser = struct {
fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression {
fn parse(self: *Self) ParseError!Expression {
const allocator = self.arena.as_allocator();
var expression = try parse_next(self, tokenizer);
var expression = try parse_next(self);
inline for (operators) |operator| {
const token = comptime operator.token();
if (tokenizer.is_token(coral.io.tag_of(token))) {
tokenizer.step();
if (self.tokenizer.is_token(coral.io.tag_of(token))) {
self.tokenizer.step();
if (tokenizer.token == null) {
return self.raise("expected other half of expression after `" ++ comptime token.text() ++ "`");
if (self.tokenizer.token == null) {
return self.report("expected other half of expression after `" ++ comptime token.text() ++ "`");
}
expression = .{
.binary_operation = .{
.operator = operator,
.lhs_expression = try coral.io.allocate_one(allocator, expression),
.rhs_expression = try coral.io.allocate_one(allocator, try parse_next(self, tokenizer)),
.rhs_expression = try coral.io.allocate_one(allocator, try parse_next(self)),
},
};
}
@ -134,22 +136,33 @@ fn binary_operation_parser(
return BinaryOperationParser.parse;
}
pub fn error_message(self: Self) []const coral.io.Byte {
return self.error_buffer.values;
}
pub fn free(self: *Self) void {
self.arena.free();
self.statements.free();
self.error_buffer.free();
}
pub fn make(allocator: coral.io.Allocator) Self {
pub fn make(allocator: coral.io.Allocator, ast_name: []const coral.io.Byte) Self {
return Self{
.arena = coral.arena.Stacking.make(allocator, 4096),
.allocator = allocator,
.error_buffer = coral.list.ByteStack.make(allocator),
.statements = Statement.List.make(allocator),
.error_message = "",
.tokenizer = .{.source = ""},
.allocator = allocator,
.name = ast_name,
};
}
fn raise(self: *Self, message: []const u8) ParseError {
self.error_message = message;
fn report(self: *Self, message: []const coral.io.Byte) ParseError {
coral.utf8.print_formatted(coral.list.stack_as_writer(&self.error_buffer), "{name}@{line}: {message}", .{
.name = self.name,
.line = self.tokenizer.lines_stepped,
.message = message,
}) catch return error.OutOfMemory;
return error.BadSyntax;
}
@ -158,32 +171,30 @@ pub fn list_statements(self: Self) []const Statement {
return self.statements.values;
}
pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
self.free();
pub fn parse(self: *Self, data: []const coral.io.Byte) ParseError!void {
self.tokenizer = .{.source = data};
const allocator = self.arena.as_allocator();
var has_returned = false;
while (true) {
const no_effect_message = "statement has no effect";
self.tokenizer.skip(.newline);
tokenizer.skip(.newline);
switch (tokenizer.token orelse return) {
switch (self.tokenizer.token orelse return) {
.keyword_return => {
if (has_returned) {
return self.raise("multiple returns in function scope but expected only one");
return self.report("multiple returns in function scope but expected only one");
}
try self.statements.push_one(get_statement: {
tokenizer.step();
self.tokenizer.step();
if (!tokenizer.is_token_null_or(.newline)) {
break: get_statement .{.return_expression = try self.parse_expression(tokenizer)};
if (!self.tokenizer.is_token_null_or(.newline)) {
break: get_statement .{.return_expression = try self.parse_expression()};
}
if (!tokenizer.is_token_null_or(.newline)) {
return self.raise("unexpected token after return");
if (!self.tokenizer.is_token_null_or(.newline)) {
return self.report("unexpected token after return");
}
break: get_statement .return_nothing;
@ -193,60 +204,64 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
},
.identifier => |identifier| {
tokenizer.step();
self.tokenizer.step();
switch (tokenizer.token orelse return self.raise(no_effect_message)) {
.newline => return self.raise(no_effect_message),
const no_effect_message = "statement has no effect";
switch (self.tokenizer.token orelse return self.report(no_effect_message)) {
.newline => return self.report(no_effect_message),
.symbol_equals => {
tokenizer.step();
self.tokenizer.step();
if (tokenizer.token == null) {
return self.raise("expected expression after `=`");
if (self.tokenizer.token == null) {
return self.report("expected expression after `=`");
}
try self.statements.push_one(.{
.set_local = .{
.expression = try self.parse_expression(tokenizer),
.expression = try self.parse_expression(),
.identifier = identifier,
},
});
if (!tokenizer.is_token_null_or(.newline)) {
return self.raise("unexpected token after assignment");
if (!self.tokenizer.is_token_null_or(.newline)) {
return self.report("unexpected token after assignment");
}
},
else => return self.raise("expected `=` after local"),
else => return self.report("expected `=` after local"),
}
},
.special_identifier => |identifier| {
tokenizer.step();
self.tokenizer.step();
switch (tokenizer.token orelse return self.raise(no_effect_message)) {
.newline => return self.raise(no_effect_message),
const missing_arguments_message = "system call is missing arguments";
switch (self.tokenizer.token orelse return self.report(missing_arguments_message)) {
.newline => return self.report(missing_arguments_message),
.symbol_paren_left => {
tokenizer.step();
self.tokenizer.step();
var expressions_list = Expression.List.make(allocator);
while (true) {
if (tokenizer.is_token(.symbol_paren_right)) {
if (self.tokenizer.is_token(.symbol_paren_right)) {
break;
}
try expressions_list.push_one(try self.parse_expression(tokenizer));
try expressions_list.push_one(try self.parse_expression());
switch (tokenizer.token orelse return self.raise("unexpected end after after `(`")) {
switch (self.tokenizer.token orelse return self.report("unexpected end after after `(`")) {
.symbol_comma => continue,
.symbol_paren_right => break,
else => return self.raise("expected `)` or argument after `(`"),
else => return self.report("expected `)` or argument after `(`"),
}
}
tokenizer.step();
self.tokenizer.step();
try self.statements.push_one(.{
.call_system = .{
@ -256,11 +271,11 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
});
},
else => return self.raise("expected `=` after local"),
else => return self.report("expected `=` after local"),
}
},
else => return self.raise("invalid statement"),
else => return self.report("invalid statement"),
}
}
}
@ -281,67 +296,67 @@ const parse_expression = binary_operation_parser(parse_equality, &.{
.subtraction,
});
fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression {
fn parse_factor(self: *Self) ParseError!Expression {
const allocator = self.arena.as_allocator();
switch (tokenizer.token orelse return self.raise("expected operand after operator")) {
switch (self.tokenizer.token orelse return self.report("expected operand after operator")) {
.symbol_paren_left => {
tokenizer.skip(.newline);
self.tokenizer.skip(.newline);
if (tokenizer.token == null) {
return self.raise("expected an expression after `(`");
if (self.tokenizer.token == null) {
return self.report("expected an expression after `(`");
}
const expression = try self.parse_expression(tokenizer);
const expression = try self.parse_expression();
if (!tokenizer.is_token(.symbol_paren_right)) {
return self.raise("expected a closing `)` after expression");
if (!self.tokenizer.is_token(.symbol_paren_right)) {
return self.report("expected a closing `)` after expression");
}
tokenizer.step();
self.tokenizer.step();
return Expression{.grouped_expression = try coral.io.allocate_one(allocator, expression)};
},
.keyword_nil => {
tokenizer.step();
self.tokenizer.step();
return .nil_literal;
},
.keyword_true => {
tokenizer.step();
self.tokenizer.step();
return .true_literal;
},
.keyword_false => {
tokenizer.step();
self.tokenizer.step();
return .false_literal;
},
.number => |value| {
tokenizer.step();
self.tokenizer.step();
return Expression{.number_literal = value};
},
.string => |value| {
tokenizer.step();
self.tokenizer.step();
return Expression{.string_literal = value};
},
.special_identifier => |identifier| {
tokenizer.skip(.newline);
self.tokenizer.skip(.newline);
var expression_list = Expression.List.make(allocator);
while (true) {
switch (tokenizer.token orelse return self.raise("expected expression or `)` after `(`")) {
switch (self.tokenizer.token orelse return self.report("expected expression or `)` after `(`")) {
.symbol_paren_right => {
tokenizer.step();
self.tokenizer.step();
return Expression{
.call_system = .{
@ -352,13 +367,13 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
},
else => {
try expression_list.push_one(try self.parse_expression(tokenizer));
try expression_list.push_one(try self.parse_expression());
switch (tokenizer.token orelse return self.raise("expected `,` or `)` after argument")) {
switch (self.tokenizer.token orelse return self.report("expected `,` or `)` after argument")) {
.symbol_comma => continue,
.symbol_paren_right => {
tokenizer.step();
self.tokenizer.step();
return Expression{
.call_system = .{
@ -368,7 +383,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
};
},
else => return self.raise("expected `,` or `)` after argument"),
else => return self.report("expected `,` or `)` after argument"),
}
},
}
@ -376,7 +391,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
},
.identifier => |identifier| {
tokenizer.step();
self.tokenizer.step();
return Expression{.get_local = identifier};
},
@ -384,83 +399,83 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
.symbol_brace_left => {
var table_fields = Expression.NamedList.make(allocator);
tokenizer.skip(.newline);
self.tokenizer.skip(.newline);
while (true) {
switch (tokenizer.token orelse return self.raise("unexpected end of table literal")) {
switch (self.tokenizer.token orelse return self.report("unexpected end of table literal")) {
.symbol_brace_right => {
tokenizer.step();
self.tokenizer.step();
return Expression{.table_literal = table_fields};
},
.identifier => |identifier| {
tokenizer.skip(.newline);
self.tokenizer.skip(.newline);
if (!tokenizer.is_token(.symbol_equals)) {
return self.raise("expected `=` after identifier");
if (!self.tokenizer.is_token(.symbol_equals)) {
return self.report("expected `=` after identifier");
}
tokenizer.skip(.newline);
self.tokenizer.skip(.newline);
if (tokenizer.token == null) {
return self.raise("unexpected end after `=`");
if (self.tokenizer.token == null) {
return self.report("unexpected end after `=`");
}
try table_fields.push_one(.{
.expression = try self.parse_expression(tokenizer),
.expression = try self.parse_expression(),
.identifier = identifier,
});
switch (tokenizer.token orelse return self.raise("unexpected end of table")) {
.symbol_comma => tokenizer.skip(.newline),
switch (self.tokenizer.token orelse return self.report("unexpected end of table")) {
.symbol_comma => self.tokenizer.skip(.newline),
.symbol_brace_right => {
tokenizer.step();
self.tokenizer.step();
return Expression{.table_literal = table_fields};
},
else => return self.raise("expected `,` or `}` after expression"),
else => return self.report("expected `,` or `}` after expression"),
}
},
else => return self.raise("expected `}` or fields in table literal"),
else => return self.report("expected `}` or fields in table literal"),
}
}
},
.symbol_minus => {
tokenizer.skip(.newline);
self.tokenizer.skip(.newline);
if (tokenizer.token == null) {
return self.raise("expected expression after numeric negation (`-`)");
if (self.tokenizer.token == null) {
return self.report("expected expression after numeric negation (`-`)");
}
return Expression{
.unary_operation = .{
.expression = try coral.io.allocate_one(allocator, try self.parse_factor(tokenizer)),
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
.operator = .numeric_negation,
},
};
},
.symbol_bang => {
tokenizer.skip(.newline);
self.tokenizer.skip(.newline);
if (tokenizer.token == null) {
return self.raise("expected expression after boolean negation (`!`)");
if (self.tokenizer.token == null) {
return self.report("expected expression after boolean negation (`!`)");
}
return Expression{
.unary_operation = .{
.expression = try coral.io.allocate_one(allocator, try self.parse_factor(tokenizer)),
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
.operator = .boolean_negation,
},
};
},
else => return self.raise("unexpected token in expression"),
else => return self.report("unexpected token in expression"),
}
}

View File

@ -119,7 +119,7 @@ pub const Tokenizer = struct {
'#' => {
cursor += 1;
while (cursor < self.source.len and self.source[cursor] == '\n') {
while (cursor < self.source.len and self.source[cursor] != '\n') {
cursor += 1;
}
},

View File

@ -11,13 +11,19 @@ const heap = @import("./heap.zig");
const kym = @import("./kym.zig");
fn kym_handle_errors(info: kym.ErrorInfo) void {
app.log_fail(info.message);
var remaining_frames = info.frames.len;
if (remaining_frames != 0) {
app.log_fail("stack trace:");
while (remaining_frames != 0) {
remaining_frames -= 1;
app.log_fail(info.frames[remaining_frames].name);
}
}
}
fn kym_log_info(env: *kym.RuntimeEnv) kym.RuntimeError!?*kym.RuntimeRef {
@ -62,6 +68,10 @@ pub fn run_app(file_access: file.Access) void {
.name = "log_info",
.caller = kym.Caller.from(kym_log_info),
},
.{
.name = "log_warn",
.caller = kym.Caller.from(kym_log_warn),
},
.{
.name = "log_fail",
.caller = kym.Caller.from(kym_log_fail),