Merge pull request 'Improve Syntax Error Messages and Fix Comments' (#29) from kym-script-comments into main
continuous-integration/drone/push Build is passing
Details
continuous-integration/drone/push Build is passing
Details
Reviewed-on: #29
This commit is contained in:
commit
e71048e83d
|
@ -1,4 +1,5 @@
|
||||||
|
|
||||||
|
# Test comment.
|
||||||
@log_info("game is loading")
|
@log_info("game is loading")
|
||||||
|
|
||||||
return {
|
return {
|
||||||
|
|
|
@ -118,5 +118,7 @@ pub fn stack_as_writer(self: *ByteStack) io.Writer {
|
||||||
}
|
}
|
||||||
|
|
||||||
fn write_stack(stack: *ByteStack, bytes: []const io.Byte) ?usize {
|
fn write_stack(stack: *ByteStack, bytes: []const io.Byte) ?usize {
|
||||||
return stack.push_all(bytes) catch null;
|
stack.push_all(bytes) catch return null;
|
||||||
|
|
||||||
|
return bytes.len;
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,13 +30,15 @@ pub fn max_int(comptime int: std.builtin.Type.Int) comptime_int {
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn min_int(comptime int: std.builtin.Type.Int) comptime_int {
|
pub fn min_int(comptime int: std.builtin.Type.Int) comptime_int {
|
||||||
if (int.signedness == .unsigned) return 0;
|
if (int.signedness == .unsigned) {
|
||||||
|
return 0;
|
||||||
|
}
|
||||||
|
|
||||||
const bit_count = int.bits;
|
const bit_count = int.bits;
|
||||||
|
|
||||||
if (bit_count == 0) return 0;
|
if (bit_count == 0) return 0;
|
||||||
|
|
||||||
return -(1 << (bit_count - 1));
|
return -(1 << (bit_count - 1));
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn wrap(value: anytype, lower: anytype, upper: anytype) @TypeOf(value, lower, upper) {
|
pub fn wrap(value: anytype, lower: anytype, upper: anytype) @TypeOf(value, lower, upper) {
|
||||||
|
|
|
@ -8,8 +8,6 @@ const coral = @import("coral");
|
||||||
|
|
||||||
const file = @import("./file.zig");
|
const file = @import("./file.zig");
|
||||||
|
|
||||||
const tokens = @import("./kym/tokens.zig");
|
|
||||||
|
|
||||||
pub const Any = union (enum) {
|
pub const Any = union (enum) {
|
||||||
nil,
|
nil,
|
||||||
boolean: bool,
|
boolean: bool,
|
||||||
|
@ -200,18 +198,14 @@ pub const RuntimeEnv = struct {
|
||||||
name: []const coral.io.Byte,
|
name: []const coral.io.Byte,
|
||||||
data: []const coral.io.Byte,
|
data: []const coral.io.Byte,
|
||||||
) RuntimeError!?*RuntimeRef {
|
) RuntimeError!?*RuntimeRef {
|
||||||
var ast = Ast.make(self.allocator);
|
var ast = Ast.make(self.allocator, name);
|
||||||
|
|
||||||
defer ast.free();
|
defer ast.free();
|
||||||
|
|
||||||
{
|
ast.parse(data) catch |parse_error| switch (parse_error) {
|
||||||
var tokenizer = tokens.Tokenizer{.source = data};
|
error.BadSyntax => return self.raise(error.BadSyntax, ast.error_message()),
|
||||||
|
error.OutOfMemory => return error.OutOfMemory,
|
||||||
ast.parse(&tokenizer) catch |parse_error| switch (parse_error) {
|
};
|
||||||
error.BadSyntax => return self.raise(error.BadSyntax, ast.error_message),
|
|
||||||
error.OutOfMemory => return error.OutOfMemory,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
var chunk = Chunk.make(self);
|
var chunk = Chunk.make(self);
|
||||||
|
|
||||||
|
|
|
@ -2,10 +2,12 @@ const coral = @import("coral");
|
||||||
|
|
||||||
const tokens = @import("./tokens.zig");
|
const tokens = @import("./tokens.zig");
|
||||||
|
|
||||||
|
name: []const coral.io.Byte,
|
||||||
allocator: coral.io.Allocator,
|
allocator: coral.io.Allocator,
|
||||||
arena: coral.arena.Stacking,
|
arena: coral.arena.Stacking,
|
||||||
statements: Statement.List,
|
statements: Statement.List,
|
||||||
error_message: []const coral.io.Byte,
|
error_buffer: coral.list.ByteStack,
|
||||||
|
tokenizer: tokens.Tokenizer,
|
||||||
|
|
||||||
pub const Expression = union (enum) {
|
pub const Expression = union (enum) {
|
||||||
nil_literal,
|
nil_literal,
|
||||||
|
@ -67,7 +69,7 @@ pub const Expression = union (enum) {
|
||||||
pub const List = coral.list.Stack(Expression);
|
pub const List = coral.list.Stack(Expression);
|
||||||
};
|
};
|
||||||
|
|
||||||
const ExpressionParser = fn (self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression;
|
const ExpressionParser = fn (self: *Self) ParseError!Expression;
|
||||||
|
|
||||||
pub const ParseError = error {
|
pub const ParseError = error {
|
||||||
OutOfMemory,
|
OutOfMemory,
|
||||||
|
@ -103,25 +105,25 @@ fn binary_operation_parser(
|
||||||
comptime operators: []const Expression.BinaryOperator) ExpressionParser {
|
comptime operators: []const Expression.BinaryOperator) ExpressionParser {
|
||||||
|
|
||||||
const BinaryOperationParser = struct {
|
const BinaryOperationParser = struct {
|
||||||
fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression {
|
fn parse(self: *Self) ParseError!Expression {
|
||||||
const allocator = self.arena.as_allocator();
|
const allocator = self.arena.as_allocator();
|
||||||
var expression = try parse_next(self, tokenizer);
|
var expression = try parse_next(self);
|
||||||
|
|
||||||
inline for (operators) |operator| {
|
inline for (operators) |operator| {
|
||||||
const token = comptime operator.token();
|
const token = comptime operator.token();
|
||||||
|
|
||||||
if (tokenizer.is_token(coral.io.tag_of(token))) {
|
if (self.tokenizer.is_token(coral.io.tag_of(token))) {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
if (tokenizer.token == null) {
|
if (self.tokenizer.token == null) {
|
||||||
return self.raise("expected other half of expression after `" ++ comptime token.text() ++ "`");
|
return self.report("expected other half of expression after `" ++ comptime token.text() ++ "`");
|
||||||
}
|
}
|
||||||
|
|
||||||
expression = .{
|
expression = .{
|
||||||
.binary_operation = .{
|
.binary_operation = .{
|
||||||
.operator = operator,
|
.operator = operator,
|
||||||
.lhs_expression = try coral.io.allocate_one(allocator, expression),
|
.lhs_expression = try coral.io.allocate_one(allocator, expression),
|
||||||
.rhs_expression = try coral.io.allocate_one(allocator, try parse_next(self, tokenizer)),
|
.rhs_expression = try coral.io.allocate_one(allocator, try parse_next(self)),
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -134,22 +136,33 @@ fn binary_operation_parser(
|
||||||
return BinaryOperationParser.parse;
|
return BinaryOperationParser.parse;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn error_message(self: Self) []const coral.io.Byte {
|
||||||
|
return self.error_buffer.values;
|
||||||
|
}
|
||||||
|
|
||||||
pub fn free(self: *Self) void {
|
pub fn free(self: *Self) void {
|
||||||
self.arena.free();
|
self.arena.free();
|
||||||
self.statements.free();
|
self.statements.free();
|
||||||
|
self.error_buffer.free();
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn make(allocator: coral.io.Allocator) Self {
|
pub fn make(allocator: coral.io.Allocator, ast_name: []const coral.io.Byte) Self {
|
||||||
return Self{
|
return Self{
|
||||||
.arena = coral.arena.Stacking.make(allocator, 4096),
|
.arena = coral.arena.Stacking.make(allocator, 4096),
|
||||||
.allocator = allocator,
|
.error_buffer = coral.list.ByteStack.make(allocator),
|
||||||
.statements = Statement.List.make(allocator),
|
.statements = Statement.List.make(allocator),
|
||||||
.error_message = "",
|
.tokenizer = .{.source = ""},
|
||||||
|
.allocator = allocator,
|
||||||
|
.name = ast_name,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
fn raise(self: *Self, message: []const u8) ParseError {
|
fn report(self: *Self, message: []const coral.io.Byte) ParseError {
|
||||||
self.error_message = message;
|
coral.utf8.print_formatted(coral.list.stack_as_writer(&self.error_buffer), "{name}@{line}: {message}", .{
|
||||||
|
.name = self.name,
|
||||||
|
.line = self.tokenizer.lines_stepped,
|
||||||
|
.message = message,
|
||||||
|
}) catch return error.OutOfMemory;
|
||||||
|
|
||||||
return error.BadSyntax;
|
return error.BadSyntax;
|
||||||
}
|
}
|
||||||
|
@ -158,32 +171,30 @@ pub fn list_statements(self: Self) []const Statement {
|
||||||
return self.statements.values;
|
return self.statements.values;
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
|
pub fn parse(self: *Self, data: []const coral.io.Byte) ParseError!void {
|
||||||
self.free();
|
self.tokenizer = .{.source = data};
|
||||||
|
|
||||||
const allocator = self.arena.as_allocator();
|
const allocator = self.arena.as_allocator();
|
||||||
var has_returned = false;
|
var has_returned = false;
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
const no_effect_message = "statement has no effect";
|
self.tokenizer.skip(.newline);
|
||||||
|
|
||||||
tokenizer.skip(.newline);
|
switch (self.tokenizer.token orelse return) {
|
||||||
|
|
||||||
switch (tokenizer.token orelse return) {
|
|
||||||
.keyword_return => {
|
.keyword_return => {
|
||||||
if (has_returned) {
|
if (has_returned) {
|
||||||
return self.raise("multiple returns in function scope but expected only one");
|
return self.report("multiple returns in function scope but expected only one");
|
||||||
}
|
}
|
||||||
|
|
||||||
try self.statements.push_one(get_statement: {
|
try self.statements.push_one(get_statement: {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
if (!tokenizer.is_token_null_or(.newline)) {
|
if (!self.tokenizer.is_token_null_or(.newline)) {
|
||||||
break: get_statement .{.return_expression = try self.parse_expression(tokenizer)};
|
break: get_statement .{.return_expression = try self.parse_expression()};
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!tokenizer.is_token_null_or(.newline)) {
|
if (!self.tokenizer.is_token_null_or(.newline)) {
|
||||||
return self.raise("unexpected token after return");
|
return self.report("unexpected token after return");
|
||||||
}
|
}
|
||||||
|
|
||||||
break: get_statement .return_nothing;
|
break: get_statement .return_nothing;
|
||||||
|
@ -193,60 +204,64 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
|
||||||
},
|
},
|
||||||
|
|
||||||
.identifier => |identifier| {
|
.identifier => |identifier| {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
switch (tokenizer.token orelse return self.raise(no_effect_message)) {
|
const no_effect_message = "statement has no effect";
|
||||||
.newline => return self.raise(no_effect_message),
|
|
||||||
|
switch (self.tokenizer.token orelse return self.report(no_effect_message)) {
|
||||||
|
.newline => return self.report(no_effect_message),
|
||||||
|
|
||||||
.symbol_equals => {
|
.symbol_equals => {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
if (tokenizer.token == null) {
|
if (self.tokenizer.token == null) {
|
||||||
return self.raise("expected expression after `=`");
|
return self.report("expected expression after `=`");
|
||||||
}
|
}
|
||||||
|
|
||||||
try self.statements.push_one(.{
|
try self.statements.push_one(.{
|
||||||
.set_local = .{
|
.set_local = .{
|
||||||
.expression = try self.parse_expression(tokenizer),
|
.expression = try self.parse_expression(),
|
||||||
.identifier = identifier,
|
.identifier = identifier,
|
||||||
},
|
},
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!tokenizer.is_token_null_or(.newline)) {
|
if (!self.tokenizer.is_token_null_or(.newline)) {
|
||||||
return self.raise("unexpected token after assignment");
|
return self.report("unexpected token after assignment");
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
else => return self.raise("expected `=` after local"),
|
else => return self.report("expected `=` after local"),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
.special_identifier => |identifier| {
|
.special_identifier => |identifier| {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
switch (tokenizer.token orelse return self.raise(no_effect_message)) {
|
const missing_arguments_message = "system call is missing arguments";
|
||||||
.newline => return self.raise(no_effect_message),
|
|
||||||
|
switch (self.tokenizer.token orelse return self.report(missing_arguments_message)) {
|
||||||
|
.newline => return self.report(missing_arguments_message),
|
||||||
|
|
||||||
.symbol_paren_left => {
|
.symbol_paren_left => {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
var expressions_list = Expression.List.make(allocator);
|
var expressions_list = Expression.List.make(allocator);
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
if (tokenizer.is_token(.symbol_paren_right)) {
|
if (self.tokenizer.is_token(.symbol_paren_right)) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
try expressions_list.push_one(try self.parse_expression(tokenizer));
|
try expressions_list.push_one(try self.parse_expression());
|
||||||
|
|
||||||
switch (tokenizer.token orelse return self.raise("unexpected end after after `(`")) {
|
switch (self.tokenizer.token orelse return self.report("unexpected end after after `(`")) {
|
||||||
.symbol_comma => continue,
|
.symbol_comma => continue,
|
||||||
.symbol_paren_right => break,
|
.symbol_paren_right => break,
|
||||||
else => return self.raise("expected `)` or argument after `(`"),
|
else => return self.report("expected `)` or argument after `(`"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
try self.statements.push_one(.{
|
try self.statements.push_one(.{
|
||||||
.call_system = .{
|
.call_system = .{
|
||||||
|
@ -256,11 +271,11 @@ pub fn parse(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!void {
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
|
|
||||||
else => return self.raise("expected `=` after local"),
|
else => return self.report("expected `=` after local"),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
else => return self.raise("invalid statement"),
|
else => return self.report("invalid statement"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -281,67 +296,67 @@ const parse_expression = binary_operation_parser(parse_equality, &.{
|
||||||
.subtraction,
|
.subtraction,
|
||||||
});
|
});
|
||||||
|
|
||||||
fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression {
|
fn parse_factor(self: *Self) ParseError!Expression {
|
||||||
const allocator = self.arena.as_allocator();
|
const allocator = self.arena.as_allocator();
|
||||||
|
|
||||||
switch (tokenizer.token orelse return self.raise("expected operand after operator")) {
|
switch (self.tokenizer.token orelse return self.report("expected operand after operator")) {
|
||||||
.symbol_paren_left => {
|
.symbol_paren_left => {
|
||||||
tokenizer.skip(.newline);
|
self.tokenizer.skip(.newline);
|
||||||
|
|
||||||
if (tokenizer.token == null) {
|
if (self.tokenizer.token == null) {
|
||||||
return self.raise("expected an expression after `(`");
|
return self.report("expected an expression after `(`");
|
||||||
}
|
}
|
||||||
|
|
||||||
const expression = try self.parse_expression(tokenizer);
|
const expression = try self.parse_expression();
|
||||||
|
|
||||||
if (!tokenizer.is_token(.symbol_paren_right)) {
|
if (!self.tokenizer.is_token(.symbol_paren_right)) {
|
||||||
return self.raise("expected a closing `)` after expression");
|
return self.report("expected a closing `)` after expression");
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return Expression{.grouped_expression = try coral.io.allocate_one(allocator, expression)};
|
return Expression{.grouped_expression = try coral.io.allocate_one(allocator, expression)};
|
||||||
},
|
},
|
||||||
|
|
||||||
.keyword_nil => {
|
.keyword_nil => {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return .nil_literal;
|
return .nil_literal;
|
||||||
},
|
},
|
||||||
|
|
||||||
.keyword_true => {
|
.keyword_true => {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return .true_literal;
|
return .true_literal;
|
||||||
},
|
},
|
||||||
|
|
||||||
.keyword_false => {
|
.keyword_false => {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return .false_literal;
|
return .false_literal;
|
||||||
},
|
},
|
||||||
|
|
||||||
.number => |value| {
|
.number => |value| {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return Expression{.number_literal = value};
|
return Expression{.number_literal = value};
|
||||||
},
|
},
|
||||||
|
|
||||||
.string => |value| {
|
.string => |value| {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return Expression{.string_literal = value};
|
return Expression{.string_literal = value};
|
||||||
},
|
},
|
||||||
|
|
||||||
.special_identifier => |identifier| {
|
.special_identifier => |identifier| {
|
||||||
tokenizer.skip(.newline);
|
self.tokenizer.skip(.newline);
|
||||||
|
|
||||||
var expression_list = Expression.List.make(allocator);
|
var expression_list = Expression.List.make(allocator);
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
switch (tokenizer.token orelse return self.raise("expected expression or `)` after `(`")) {
|
switch (self.tokenizer.token orelse return self.report("expected expression or `)` after `(`")) {
|
||||||
.symbol_paren_right => {
|
.symbol_paren_right => {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return Expression{
|
return Expression{
|
||||||
.call_system = .{
|
.call_system = .{
|
||||||
|
@ -352,13 +367,13 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
|
||||||
},
|
},
|
||||||
|
|
||||||
else => {
|
else => {
|
||||||
try expression_list.push_one(try self.parse_expression(tokenizer));
|
try expression_list.push_one(try self.parse_expression());
|
||||||
|
|
||||||
switch (tokenizer.token orelse return self.raise("expected `,` or `)` after argument")) {
|
switch (self.tokenizer.token orelse return self.report("expected `,` or `)` after argument")) {
|
||||||
.symbol_comma => continue,
|
.symbol_comma => continue,
|
||||||
|
|
||||||
.symbol_paren_right => {
|
.symbol_paren_right => {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return Expression{
|
return Expression{
|
||||||
.call_system = .{
|
.call_system = .{
|
||||||
|
@ -368,7 +383,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
else => return self.raise("expected `,` or `)` after argument"),
|
else => return self.report("expected `,` or `)` after argument"),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
@ -376,7 +391,7 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
|
||||||
},
|
},
|
||||||
|
|
||||||
.identifier => |identifier| {
|
.identifier => |identifier| {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return Expression{.get_local = identifier};
|
return Expression{.get_local = identifier};
|
||||||
},
|
},
|
||||||
|
@ -384,83 +399,83 @@ fn parse_factor(self: *Self, tokenizer: *tokens.Tokenizer) ParseError!Expression
|
||||||
.symbol_brace_left => {
|
.symbol_brace_left => {
|
||||||
var table_fields = Expression.NamedList.make(allocator);
|
var table_fields = Expression.NamedList.make(allocator);
|
||||||
|
|
||||||
tokenizer.skip(.newline);
|
self.tokenizer.skip(.newline);
|
||||||
|
|
||||||
while (true) {
|
while (true) {
|
||||||
switch (tokenizer.token orelse return self.raise("unexpected end of table literal")) {
|
switch (self.tokenizer.token orelse return self.report("unexpected end of table literal")) {
|
||||||
.symbol_brace_right => {
|
.symbol_brace_right => {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return Expression{.table_literal = table_fields};
|
return Expression{.table_literal = table_fields};
|
||||||
},
|
},
|
||||||
|
|
||||||
.identifier => |identifier| {
|
.identifier => |identifier| {
|
||||||
tokenizer.skip(.newline);
|
self.tokenizer.skip(.newline);
|
||||||
|
|
||||||
if (!tokenizer.is_token(.symbol_equals)) {
|
if (!self.tokenizer.is_token(.symbol_equals)) {
|
||||||
return self.raise("expected `=` after identifier");
|
return self.report("expected `=` after identifier");
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenizer.skip(.newline);
|
self.tokenizer.skip(.newline);
|
||||||
|
|
||||||
if (tokenizer.token == null) {
|
if (self.tokenizer.token == null) {
|
||||||
return self.raise("unexpected end after `=`");
|
return self.report("unexpected end after `=`");
|
||||||
}
|
}
|
||||||
|
|
||||||
try table_fields.push_one(.{
|
try table_fields.push_one(.{
|
||||||
.expression = try self.parse_expression(tokenizer),
|
.expression = try self.parse_expression(),
|
||||||
.identifier = identifier,
|
.identifier = identifier,
|
||||||
});
|
});
|
||||||
|
|
||||||
switch (tokenizer.token orelse return self.raise("unexpected end of table")) {
|
switch (self.tokenizer.token orelse return self.report("unexpected end of table")) {
|
||||||
.symbol_comma => tokenizer.skip(.newline),
|
.symbol_comma => self.tokenizer.skip(.newline),
|
||||||
|
|
||||||
.symbol_brace_right => {
|
.symbol_brace_right => {
|
||||||
tokenizer.step();
|
self.tokenizer.step();
|
||||||
|
|
||||||
return Expression{.table_literal = table_fields};
|
return Expression{.table_literal = table_fields};
|
||||||
},
|
},
|
||||||
|
|
||||||
else => return self.raise("expected `,` or `}` after expression"),
|
else => return self.report("expected `,` or `}` after expression"),
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
else => return self.raise("expected `}` or fields in table literal"),
|
else => return self.report("expected `}` or fields in table literal"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
||||||
.symbol_minus => {
|
.symbol_minus => {
|
||||||
tokenizer.skip(.newline);
|
self.tokenizer.skip(.newline);
|
||||||
|
|
||||||
if (tokenizer.token == null) {
|
if (self.tokenizer.token == null) {
|
||||||
return self.raise("expected expression after numeric negation (`-`)");
|
return self.report("expected expression after numeric negation (`-`)");
|
||||||
}
|
}
|
||||||
|
|
||||||
return Expression{
|
return Expression{
|
||||||
.unary_operation = .{
|
.unary_operation = .{
|
||||||
.expression = try coral.io.allocate_one(allocator, try self.parse_factor(tokenizer)),
|
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
||||||
.operator = .numeric_negation,
|
.operator = .numeric_negation,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
.symbol_bang => {
|
.symbol_bang => {
|
||||||
tokenizer.skip(.newline);
|
self.tokenizer.skip(.newline);
|
||||||
|
|
||||||
if (tokenizer.token == null) {
|
if (self.tokenizer.token == null) {
|
||||||
return self.raise("expected expression after boolean negation (`!`)");
|
return self.report("expected expression after boolean negation (`!`)");
|
||||||
}
|
}
|
||||||
|
|
||||||
return Expression{
|
return Expression{
|
||||||
.unary_operation = .{
|
.unary_operation = .{
|
||||||
.expression = try coral.io.allocate_one(allocator, try self.parse_factor(tokenizer)),
|
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
||||||
.operator = .boolean_negation,
|
.operator = .boolean_negation,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
},
|
},
|
||||||
|
|
||||||
else => return self.raise("unexpected token in expression"),
|
else => return self.report("unexpected token in expression"),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -119,7 +119,7 @@ pub const Tokenizer = struct {
|
||||||
'#' => {
|
'#' => {
|
||||||
cursor += 1;
|
cursor += 1;
|
||||||
|
|
||||||
while (cursor < self.source.len and self.source[cursor] == '\n') {
|
while (cursor < self.source.len and self.source[cursor] != '\n') {
|
||||||
cursor += 1;
|
cursor += 1;
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
|
|
@ -11,12 +11,18 @@ const heap = @import("./heap.zig");
|
||||||
const kym = @import("./kym.zig");
|
const kym = @import("./kym.zig");
|
||||||
|
|
||||||
fn kym_handle_errors(info: kym.ErrorInfo) void {
|
fn kym_handle_errors(info: kym.ErrorInfo) void {
|
||||||
|
app.log_fail(info.message);
|
||||||
|
|
||||||
var remaining_frames = info.frames.len;
|
var remaining_frames = info.frames.len;
|
||||||
|
|
||||||
while (remaining_frames != 0) {
|
if (remaining_frames != 0) {
|
||||||
remaining_frames -= 1;
|
app.log_fail("stack trace:");
|
||||||
|
|
||||||
app.log_fail(info.frames[remaining_frames].name);
|
while (remaining_frames != 0) {
|
||||||
|
remaining_frames -= 1;
|
||||||
|
|
||||||
|
app.log_fail(info.frames[remaining_frames].name);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -62,6 +68,10 @@ pub fn run_app(file_access: file.Access) void {
|
||||||
.name = "log_info",
|
.name = "log_info",
|
||||||
.caller = kym.Caller.from(kym_log_info),
|
.caller = kym.Caller.from(kym_log_info),
|
||||||
},
|
},
|
||||||
|
.{
|
||||||
|
.name = "log_warn",
|
||||||
|
.caller = kym.Caller.from(kym_log_warn),
|
||||||
|
},
|
||||||
.{
|
.{
|
||||||
.name = "log_fail",
|
.name = "log_fail",
|
||||||
.caller = kym.Caller.from(kym_log_fail),
|
.caller = kym.Caller.from(kym_log_fail),
|
||||||
|
|
Loading…
Reference in New Issue