|
|
|
@ -18,7 +18,8 @@ pub const Expression = union (enum) {
|
|
|
|
|
symbol_literal: []const coral.io.Byte,
|
|
|
|
|
table_literal: TableLiteral,
|
|
|
|
|
grouped_expression: *Expression,
|
|
|
|
|
resolve_local: []const coral.io.Byte,
|
|
|
|
|
get_local: []const coral.io.Byte,
|
|
|
|
|
set_local: []const coral.io.Byte,
|
|
|
|
|
|
|
|
|
|
get_field: struct {
|
|
|
|
|
object_expression: *Expression,
|
|
|
|
@ -31,11 +32,6 @@ pub const Expression = union (enum) {
|
|
|
|
|
value_expression: *Expression,
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
call_system: struct {
|
|
|
|
|
identifier: []const coral.io.Byte,
|
|
|
|
|
argument_expressions: List,
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
binary_operation: struct {
|
|
|
|
|
operator: BinaryOperator,
|
|
|
|
|
lhs_expression: *Expression,
|
|
|
|
@ -91,18 +87,11 @@ pub const ParseError = error {
|
|
|
|
|
const Self = @This();
|
|
|
|
|
|
|
|
|
|
pub const Statement = union (enum) {
|
|
|
|
|
return_nothing,
|
|
|
|
|
return_expression: Expression,
|
|
|
|
|
|
|
|
|
|
assign_local: struct {
|
|
|
|
|
identifier: []const coral.io.Byte,
|
|
|
|
|
expression: Expression,
|
|
|
|
|
@"return": struct {
|
|
|
|
|
expression: ?Expression,
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
call_system: struct {
|
|
|
|
|
identifier: []const coral.io.Byte,
|
|
|
|
|
argument_expressions: Expression.List,
|
|
|
|
|
},
|
|
|
|
|
expression: Expression,
|
|
|
|
|
|
|
|
|
|
const List = coral.list.Stack(Statement);
|
|
|
|
|
};
|
|
|
|
@ -186,106 +175,58 @@ pub fn list_statements(self: Self) []const Statement {
|
|
|
|
|
pub fn parse(self: *Self, data: []const coral.io.Byte) ParseError!void {
|
|
|
|
|
self.tokenizer = .{.source = data};
|
|
|
|
|
|
|
|
|
|
const allocator = self.arena.as_allocator();
|
|
|
|
|
var has_returned = false;
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.end => return,
|
|
|
|
|
try self.statements.push_one(parse_statement: {
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.end => return,
|
|
|
|
|
|
|
|
|
|
.keyword_return => {
|
|
|
|
|
if (has_returned) {
|
|
|
|
|
return self.report("multiple returns in function scope but expected only one");
|
|
|
|
|
}
|
|
|
|
|
.keyword_return => {
|
|
|
|
|
if (has_returned) {
|
|
|
|
|
return self.report("multiple returns in function scope but expected only one");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try self.statements.push_one(get_statement: {
|
|
|
|
|
self.tokenizer.step();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token != .end and self.tokenizer.token != .newline) {
|
|
|
|
|
break: get_statement .{.return_expression = try self.parse_expression()};
|
|
|
|
|
break: parse_statement .{
|
|
|
|
|
.@"return" = .{
|
|
|
|
|
.expression = try self.parse_expression(),
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token != .end and self.tokenizer.token != .newline) {
|
|
|
|
|
return self.report("expected end or newline after return statement");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
break: get_statement .return_nothing;
|
|
|
|
|
});
|
|
|
|
|
has_returned = true;
|
|
|
|
|
|
|
|
|
|
has_returned = true;
|
|
|
|
|
},
|
|
|
|
|
break: parse_statement .{
|
|
|
|
|
.@"return" = .{
|
|
|
|
|
.expression = null,
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.identifier => |identifier| {
|
|
|
|
|
self.tokenizer.step();
|
|
|
|
|
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.end, .newline => return self.report("statement has no effect"),
|
|
|
|
|
|
|
|
|
|
.symbol_equals => {
|
|
|
|
|
self.tokenizer.step();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("expected expression after `=`");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try self.statements.push_one(.{
|
|
|
|
|
.assign_local = .{
|
|
|
|
|
.expression = try self.parse_expression(),
|
|
|
|
|
.identifier = identifier,
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("expected `=` after local"),
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.special_identifier => |identifier| {
|
|
|
|
|
self.tokenizer.step();
|
|
|
|
|
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.end, .newline => return self.report("system call is missing arguments"),
|
|
|
|
|
|
|
|
|
|
.symbol_paren_left => {
|
|
|
|
|
self.tokenizer.step();
|
|
|
|
|
|
|
|
|
|
var expressions_list = Expression.List.make(allocator);
|
|
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
if (self.tokenizer.token == .symbol_paren_right) {
|
|
|
|
|
break;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try expressions_list.push_one(try self.parse_expression());
|
|
|
|
|
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.symbol_comma => continue,
|
|
|
|
|
.symbol_paren_right => break,
|
|
|
|
|
else => return self.report("expected `)` or argument after `(`"),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
try self.statements.push_one(.{
|
|
|
|
|
.call_system = .{
|
|
|
|
|
.argument_expressions = expressions_list,
|
|
|
|
|
.identifier = identifier,
|
|
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("expected `=` after local"),
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("invalid statement"),
|
|
|
|
|
}
|
|
|
|
|
else => {
|
|
|
|
|
break: parse_statement .{
|
|
|
|
|
.expression = try self.parse_expression()
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const parse_additive = binary_operation_parser(parse_equality, &.{
|
|
|
|
|
.addition,
|
|
|
|
|
.subtraction,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const parse_comparison = binary_operation_parser(parse_term, &.{
|
|
|
|
|
.greater_than_comparison,
|
|
|
|
|
.greater_equals_comparison,
|
|
|
|
@ -297,244 +238,210 @@ const parse_equality = binary_operation_parser(parse_comparison, &.{
|
|
|
|
|
.equals_comparison,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
const parse_expression = binary_operation_parser(parse_equality, &.{
|
|
|
|
|
.addition,
|
|
|
|
|
.subtraction,
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
fn parse_factor(self: *Self) ParseError!Expression {
|
|
|
|
|
pub fn parse_expression(self: *Self) ParseError!Expression {
|
|
|
|
|
const allocator = self.arena.as_allocator();
|
|
|
|
|
const expression = try parse_additive(self);
|
|
|
|
|
|
|
|
|
|
var expression = @as(Expression, get: {
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.symbol_paren_left => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
if (self.tokenizer.token == .symbol_equals) {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("expected an expression after `(`");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const expression = try self.parse_expression();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token != .symbol_paren_right) {
|
|
|
|
|
return self.report("expected a closing `)` after expression");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get Expression{.grouped_expression = try coral.io.allocate_one(allocator, expression)};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.keyword_nil => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .nil_literal;
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.keyword_true => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .true_literal;
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.keyword_false => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .false_literal;
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.number => |value| {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .{.number_literal = value};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.string => |value| {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .{.string_literal = value};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.special_identifier => |identifier| {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
var expression_list = Expression.List.make(allocator);
|
|
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.end => return self.report("expected expression or `)` after `(`"),
|
|
|
|
|
|
|
|
|
|
.symbol_paren_right => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .{
|
|
|
|
|
.call_system = .{
|
|
|
|
|
.identifier = identifier,
|
|
|
|
|
.argument_expressions = expression_list,
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => {
|
|
|
|
|
try expression_list.push_one(try self.parse_expression());
|
|
|
|
|
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.end => return self.report("expected `,` or `)` after argument"),
|
|
|
|
|
.symbol_comma => continue,
|
|
|
|
|
|
|
|
|
|
.symbol_paren_right => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .{
|
|
|
|
|
.call_system = .{
|
|
|
|
|
.identifier = identifier,
|
|
|
|
|
.argument_expressions = expression_list,
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("expected `,` or `)` after argument"),
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.identifier => |identifier| {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .{.resolve_local = identifier};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_brace_left => {
|
|
|
|
|
var table_literal = Expression.TableLiteral.make(allocator);
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.symbol_brace_right => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .{.table_literal = table_literal};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_bracket_left => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token != .symbol_equals) {
|
|
|
|
|
return self.report("expected expression after identifier");
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_period => {
|
|
|
|
|
self.tokenizer.step();
|
|
|
|
|
|
|
|
|
|
const identifier = switch (self.tokenizer.token) {
|
|
|
|
|
.identifier => |identifier| identifier,
|
|
|
|
|
else => return self.report("expected identifier after `.`"),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token != .symbol_equals) {
|
|
|
|
|
return self.report("expected `=` after key");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("unexpected end after `=`");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try table_literal.push_one(.{
|
|
|
|
|
.value_expression = try self.parse_expression(),
|
|
|
|
|
.key_expression = .{.symbol_literal = identifier},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.symbol_comma => self.tokenizer.skip_newlines(),
|
|
|
|
|
|
|
|
|
|
.symbol_brace_right => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
break: get .{.table_literal = table_literal};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("expected `,` or `}` after expression"),
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("expected `}` or fields in table literal"),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_minus => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("expected expression after numeric negation (`-`)");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
break: get .{
|
|
|
|
|
.unary_operation = .{
|
|
|
|
|
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
|
|
|
|
.operator = .numeric_negation,
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_bang => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("expected expression after boolean negation (`!`)");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
break: get .{
|
|
|
|
|
.unary_operation = .{
|
|
|
|
|
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
|
|
|
|
.operator = .boolean_negation,
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("unexpected token in expression"),
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("expected assignment after `=`");
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
while (self.tokenizer.token == .symbol_period) {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
return switch (expression) {
|
|
|
|
|
.get_local => |get_local| .{.set_local = get_local},
|
|
|
|
|
|
|
|
|
|
const identifier = switch (self.tokenizer.token) {
|
|
|
|
|
.identifier => |identifier| identifier,
|
|
|
|
|
else => return self.report("expected identifier after `.`"),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
expression = switch (self.tokenizer.token) {
|
|
|
|
|
.symbol_equals => .{
|
|
|
|
|
.get_field => |get_field| .{
|
|
|
|
|
.set_field = .{
|
|
|
|
|
.object_expression = get_field.object_expression,
|
|
|
|
|
.identifier = get_field.identifier,
|
|
|
|
|
.value_expression = try coral.io.allocate_one(allocator, try self.parse_expression()),
|
|
|
|
|
.object_expression = try coral.io.allocate_one(allocator, expression),
|
|
|
|
|
.identifier = identifier,
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => .{
|
|
|
|
|
.get_field = .{
|
|
|
|
|
.object_expression = try coral.io.allocate_one(allocator, expression),
|
|
|
|
|
.identifier = identifier,
|
|
|
|
|
},
|
|
|
|
|
},
|
|
|
|
|
else => self.report("expected local or field on left-hand side of expression"),
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return expression;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn parse_factor(self: *Self) ParseError!Expression {
|
|
|
|
|
const allocator = self.arena.as_allocator();
|
|
|
|
|
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.symbol_paren_left => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("expected an expression after `(`");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const expression = try self.parse_expression();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token != .symbol_paren_right) {
|
|
|
|
|
return self.report("expected a closing `)` after expression");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
return Expression{.grouped_expression = try coral.io.allocate_one(allocator, expression)};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.keyword_nil => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
return .nil_literal;
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.keyword_true => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
return .true_literal;
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.keyword_false => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
return .false_literal;
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.number => |value| {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
return .{.number_literal = value};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.string => |value| {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
return .{.string_literal = value};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.identifier => |local_identifier| {
|
|
|
|
|
var expression = Expression{.get_local = local_identifier};
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
while (self.tokenizer.token == .symbol_period) {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
expression = .{
|
|
|
|
|
.get_field = .{
|
|
|
|
|
.identifier = switch (self.tokenizer.token) {
|
|
|
|
|
.identifier => |field_identifier| field_identifier,
|
|
|
|
|
else => return self.report("expected identifier after `.`"),
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.object_expression = try coral.io.allocate_one(allocator, expression),
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return expression;
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_brace_left => {
|
|
|
|
|
var table_literal = Expression.TableLiteral.make(allocator);
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
while (true) {
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.symbol_brace_right => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
return .{.table_literal = table_literal};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_bracket_left => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token != .symbol_equals) {
|
|
|
|
|
return self.report("expected expression after identifier");
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_period => {
|
|
|
|
|
self.tokenizer.step();
|
|
|
|
|
|
|
|
|
|
const identifier = switch (self.tokenizer.token) {
|
|
|
|
|
.identifier => |identifier| identifier,
|
|
|
|
|
else => return self.report("expected identifier after `.`"),
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token != .symbol_equals) {
|
|
|
|
|
return self.report("expected `=` after key");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("unexpected end after `=`");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try table_literal.push_one(.{
|
|
|
|
|
.value_expression = try self.parse_expression(),
|
|
|
|
|
.key_expression = .{.symbol_literal = identifier},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
switch (self.tokenizer.token) {
|
|
|
|
|
.symbol_comma => self.tokenizer.skip_newlines(),
|
|
|
|
|
|
|
|
|
|
.symbol_brace_right => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
return .{.table_literal = table_literal};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("expected `,` or `}` after expression"),
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("expected `}` or fields in table literal"),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_minus => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("expected expression after numeric negation (`-`)");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return .{
|
|
|
|
|
.unary_operation = .{
|
|
|
|
|
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
|
|
|
|
.operator = .numeric_negation,
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
.symbol_bang => {
|
|
|
|
|
self.tokenizer.skip_newlines();
|
|
|
|
|
|
|
|
|
|
if (self.tokenizer.token == .end) {
|
|
|
|
|
return self.report("expected expression after boolean negation (`!`)");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return .{
|
|
|
|
|
.unary_operation = .{
|
|
|
|
|
.expression = try coral.io.allocate_one(allocator, try self.parse_factor()),
|
|
|
|
|
.operator = .boolean_negation,
|
|
|
|
|
},
|
|
|
|
|
};
|
|
|
|
|
},
|
|
|
|
|
|
|
|
|
|
else => return self.report("unexpected token in expression"),
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const parse_term = binary_operation_parser(parse_factor, &.{
|
|
|
|
|
.multiplication,
|
|
|
|
|
.divsion,
|
|
|
|
|