Improve Syntax Error Messages and Fix Comments #29

Merged
kayomn merged 4 commits from kym-script-comments into main 2023-07-22 16:08:04 +02:00
6 changed files with 66 additions and 42 deletions
Showing only changes of commit b311c73c43 - Show all commits

View File

@ -1,4 +1,5 @@
// Test comment.
@log_info("game is loading")
return {

View File

@ -118,5 +118,7 @@ pub fn stack_as_writer(self: *ByteStack) io.Writer {
}
fn write_stack(stack: *ByteStack, bytes: []const io.Byte) ?usize {
return stack.push_all(bytes) catch null;
stack.push_all(bytes) catch return null;
return bytes.len;
}

View File

@ -30,13 +30,15 @@ pub fn max_int(comptime int: std.builtin.Type.Int) comptime_int {
}
pub fn min_int(comptime int: std.builtin.Type.Int) comptime_int {
if (int.signedness == .unsigned) return 0;
if (int.signedness == .unsigned) {
return 0;
}
const bit_count = int.bits;
if (bit_count == 0) return 0;
return -(1 << (bit_count - 1));
return -(1 << (bit_count - 1));
}
pub fn wrap(value: anytype, lower: anytype, upper: anytype) @TypeOf(value, lower, upper) {

View File

@ -200,7 +200,7 @@ pub const RuntimeEnv = struct {
name: []const coral.io.Byte,
data: []const coral.io.Byte,
) RuntimeError!?*RuntimeRef {
var ast = Ast.make(self.allocator);
var ast = Ast.make(self.allocator, name);
defer ast.free();
@ -208,7 +208,7 @@ pub const RuntimeEnv = struct {
var tokenizer = tokens.Tokenizer{.source = data};
ast.parse(&tokenizer) catch |parse_error| switch (parse_error) {
error.BadSyntax => return self.raise(error.BadSyntax, ast.error_message),
error.BadSyntax => return self.raise(error.BadSyntax, ast.error_message()),
error.OutOfMemory => return error.OutOfMemory,
};
}

View File

@ -2,10 +2,11 @@ const coral = @import("coral");
const tokens = @import("./tokens.zig");
name: []const coral.io.Byte,
allocator: coral.io.Allocator,
arena: coral.arena.Stacking,
statements: Statement.List,
error_message: []const coral.io.Byte,
error_buffer: coral.list.ByteStack,
pub const Expression = union (enum) {
nil_literal,
@ -114,7 +115,7 @@ fn binary_operation_parser(
tokenizer.step();
if (tokenizer.token == null) {
return self.raise("expected other half of expression after `" ++ comptime token.text() ++ "`");
return self.report(tokenizer, "expected other half of expression after `" ++ comptime token.text() ++ "`");
}
expression = .{
@ -134,22 +135,32 @@ fn binary_operation_parser(
return BinaryOperationParser.parse;
}
pub fn error_message(self: Self) []const coral.io.Byte {
return self.error_buffer.values;
}
pub fn free(self: *Self) void {
self.arena.free();
self.statements.free();
self.error_buffer.free();
}
pub fn make(allocator: coral.io.Allocator) Self {
pub fn make(allocator: coral.io.Allocator, ast_name: []const coral.io.Byte) Self {
return Self{
.arena = coral.arena.Stacking.make(allocator, 4096),
.allocator = allocator,
.error_buffer = coral.list.ByteStack.make(allocator),
.statements = Statement.List.make(allocator),
.error_message = "",
.allocator = allocator,
.name = ast_name,
};
}
fn raise(self: *Self, message: []const u8) ParseError {
self.error_message = message;
fn report(self: *Self, tokenizer: *tokens.Tokenizer, message: []const coral.io.Byte) ParseError {
coral.utf8.print_formatted(coral.list.stack_as_writer(&self.error_buffer), "{name}@{line}: {message}", .{
.name = self.name,
.line = tokenizer.lines_stepped,
.message = message,
}) catch return error.OutOfMemory;
return error.BadSyntax;
}
@ -165,14 +176,12 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
var has_returned = false;
while (true) {
const no_effect_message = "statement has no effect";
tokenizer.skip(.newline);
switch (tokenizer.token orelse return) {
.keyword_return => {
if (has_returned) {
return self.raise("multiple returns in function scope but expected only one");
return self.report(tokenizer, "multiple returns in function scope but expected only one");
}
try self.statements.push_one(get_statement: {
@ -183,7 +192,7 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
}
if (!tokenizer.is_token_null_or(.newline)) {
return self.raise("unexpected token after return");
return self.report(tokenizer, "unexpected token after return");
}
break: get_statement .return_nothing;
@ -195,14 +204,16 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
.identifier => |identifier| {
tokenizer.step();
switch (tokenizer.token orelse return self.raise(no_effect_message)) {
.newline => return self.raise(no_effect_message),
const no_effect_message = "statement has no effect";
switch (tokenizer.token orelse return self.report(tokenizer, no_effect_message)) {
.newline => return self.report(tokenizer, no_effect_message),
.symbol_equals => {
tokenizer.step();
if (tokenizer.token == null) {
return self.raise("expected expression after `=`");
return self.report(tokenizer, "expected expression after `=`");
}
try self.statements.push_one(.{
@ -213,19 +224,21 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
});
if (!tokenizer.is_token_null_or(.newline)) {
return self.raise("unexpected token after assignment");
return self.report(tokenizer, "unexpected token after assignment");
}
},
else => return self.raise("expected `=` after local"),
else => return self.report(tokenizer, "expected `=` after local"),
}
},
.special_identifier => |identifier| {
tokenizer.step();
switch (tokenizer.token orelse return self.raise(no_effect_message)) {
.newline => return self.raise(no_effect_message),
const missing_arguments_message = "system call is missing arguments";
switch (tokenizer.token orelse return self.report(tokenizer, missing_arguments_message)) {
.newline => return self.report(tokenizer, missing_arguments_message),
.symbol_paren_left => {
tokenizer.step();
@ -239,10 +252,10 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
try expressions_list.push_one(try self.parse_expression(tokenizer));
switch (tokenizer.token orelse return self.raise("unexpected end after after `(`")) {
switch (tokenizer.token orelse return self.report(tokenizer, "unexpected end after after `(`")) {
.symbol_comma => continue,
.symbol_paren_right => break,
else => return self.raise("expected `)` or argument after `(`"),
else => return self.report(tokenizer, "expected `)` or argument after `(`"),
}
}
@ -256,11 +269,11 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
});
},
else => return self.raise("expected `=` after local"),
else => return self.report(tokenizer, "expected `=` after local"),
}
},
else => return self.raise("invalid statement"),
else => return self.report(tokenizer, "invalid statement"),
}
}
}
@ -284,18 +297,18 @@ const parse_expression = binary_operation_parser(parse_equality, &.{
fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression {
const allocator = self.arena.as_allocator();
switch (tokenizer.token orelse return self.raise("expected operand after operator")) {
switch (tokenizer.token orelse return self.report(tokenizer, "expected operand after operator")) {
.symbol_paren_left => {
tokenizer.skip(.newline);
if (tokenizer.token == null) {
return self.raise("expected an expression after `(`");
return self.report(tokenizer, "expected an expression after `(`");
}
const expression = try self.parse_expression(tokenizer);
if (!tokenizer.is_token(.symbol_paren_right)) {
return self.raise("expected a closing `)` after expression");
return self.report(tokenizer, "expected a closing `)` after expression");
}
tokenizer.step();
@ -339,7 +352,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
var expression_list = Expression.List.make(allocator);
while (true) {
switch (tokenizer.token orelse return self.raise("expected expression or `)` after `(`")) {
switch (tokenizer.token orelse return self.report(tokenizer, "expected expression or `)` after `(`")) {
.symbol_paren_right => {
tokenizer.step();
@ -354,7 +367,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
else => {
try expression_list.push_one(try self.parse_expression(tokenizer));
switch (tokenizer.token orelse return self.raise("expected `,` or `)` after argument")) {
switch (tokenizer.token orelse return self.report(tokenizer, "expected `,` or `)` after argument")) {
.symbol_comma => continue,
.symbol_paren_right => {
@ -368,7 +381,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
};
},
else => return self.raise("expected `,` or `)` after argument"),
else => return self.report(tokenizer, "expected `,` or `)` after argument"),
}
},
}
@ -387,7 +400,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
tokenizer.skip(.newline);
while (true) {
switch (tokenizer.token orelse return self.raise("unexpected end of table literal")) {
switch (tokenizer.token orelse return self.report(tokenizer, "unexpected end of table literal")) {
.symbol_brace_right => {
tokenizer.step();
@ -398,13 +411,13 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
tokenizer.skip(.newline);
if (!tokenizer.is_token(.symbol_equals)) {
return self.raise("expected `=` after identifier");
return self.report(tokenizer, "expected `=` after identifier");
}
tokenizer.skip(.newline);
if (tokenizer.token == null) {
return self.raise("unexpected end after `=`");
return self.report(tokenizer, "unexpected end after `=`");
}
try table_fields.push_one(.{
@ -412,7 +425,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
.identifier = identifier,
});
switch (tokenizer.token orelse return self.raise("unexpected end of table")) {
switch (tokenizer.token orelse return self.report(tokenizer, "unexpected end of table")) {
.symbol_comma => tokenizer.skip(.newline),
.symbol_brace_right => {
@ -421,11 +434,11 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
return Expression{.table_literal = table_fields};
},
else => return self.raise("expected `,` or `}` after expression"),
else => return self.report(tokenizer, "expected `,` or `}` after expression"),
}
},
else => return self.raise("expected `}` or fields in table literal"),
else => return self.report(tokenizer, "expected `}` or fields in table literal"),
}
}
},
@ -434,7 +447,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
tokenizer.skip(.newline);
if (tokenizer.token == null) {
return self.raise("expected expression after numeric negation (`-`)");
return self.report(tokenizer, "expected expression after numeric negation (`-`)");
}
return Expression{
@ -449,7 +462,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
tokenizer.skip(.newline);
if (tokenizer.token == null) {
return self.raise("expected expression after boolean negation (`!`)");
return self.report(tokenizer, "expected expression after boolean negation (`!`)");
}
return Expression{
@ -460,7 +473,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
};
},
else => return self.raise("unexpected token in expression"),
else => return self.report(tokenizer, "unexpected token in expression"),
}
}

View File

@ -11,6 +11,8 @@ const heap = @import("./heap.zig");
const kym = @import("./kym.zig");
fn kym_handle_errors(info: kym.ErrorInfo) void {
app.log_fail(info.message);
var remaining_frames = info.frames.len;
while (remaining_frames != 0) {
@ -62,6 +64,10 @@ pub fn run_app(file_access: file.Access) void {
.name = "log_info",
.caller = kym.Caller.from(kym_log_info),
},
.{
.name = "log_warn",
.caller = kym.Caller.from(kym_log_warn),
},
.{
.name = "log_fail",
.caller = kym.Caller.from(kym_log_fail),