Compare commits

..

No commits in common. "27b809f2c5167ab680756d648526466c15569478" and "a8104308a6d314f90ea13cfc37fa24b537a1c626" have entirely different histories.

6 changed files with 99 additions and 241 deletions

View File

@ -143,6 +143,10 @@ pub fn allocate_many(comptime Type: type, amount: usize, allocator: Allocator) A
@compileError("Cannot allocate memory for 0-byte type " ++ @typeName(Type)); @compileError("Cannot allocate memory for 0-byte type " ++ @typeName(Type));
} }
if (amount == 0) {
return &.{};
}
return @ptrCast([*]Type, @alignCast(@alignOf(Type), allocator.invoke(.{.size = @sizeOf(Type) * amount}) orelse { return @ptrCast([*]Type, @alignCast(@alignOf(Type), allocator.invoke(.{.size = @sizeOf(Type) * amount}) orelse {
return error.OutOfMemory; return error.OutOfMemory;
}))[0 .. amount]; }))[0 .. amount];

View File

@ -144,7 +144,7 @@ pub fn Stack(comptime Value: type) type {
const new_length = self.values.len + values.len; const new_length = self.values.len + values.len;
if (new_length >= self.capacity) { if (new_length >= self.capacity) {
try self.grow(allocator, values.len + values.len); try self.grow(allocator, math.min(new_length, self.capacity));
} }
const offset_index = self.values.len; const offset_index = self.values.len;
@ -170,7 +170,7 @@ pub fn Stack(comptime Value: type) type {
const new_length = self.values.len + amount; const new_length = self.values.len + amount;
if (new_length >= self.capacity) { if (new_length >= self.capacity) {
try self.grow(allocator, amount + amount); try self.grow(allocator, math.max(usize, new_length, self.capacity));
} }
const offset_index = self.values.len; const offset_index = self.values.len;

View File

@ -7,23 +7,15 @@ const Context = struct {
const Self = @This(); const Self = @This();
const empty_allocation = [0]u8{};
fn reallocate(self: *Self, options: coral.io.AllocationOptions) ?[]u8 { fn reallocate(self: *Self, options: coral.io.AllocationOptions) ?[]u8 {
if (options.size == 0) { if (options.size == 0) {
if (options.allocation) |allocation| { if (options.allocation) |allocation| {
if (allocation.ptr != &empty_allocation) { ext.SDL_free(allocation.ptr);
ext.SDL_free(allocation.ptr);
}
self.live_allocations -= 1; self.live_allocations -= 1;
return null;
} }
self.live_allocations += 1; return null;
return &empty_allocation;
} }
if (options.allocation) |allocation| { if (options.allocation) |allocation| {

View File

@ -25,7 +25,6 @@ const Opcode = enum (u8) {
push_integer, push_integer,
push_float, push_float,
push_object, push_object,
push_array,
push_table, push_table,
not, not,
@ -127,32 +126,19 @@ pub fn compile_expression(self: *Self, expression: ast.Expression) types.Runtime
try self.emit_object(try self.intern(literal)); try self.emit_object(try self.intern(literal));
}, },
.array_literal => |elements| { .table_literal => |literal| {
if (elements.values.len > coral.math.max_int(@typeInfo(types.Integer).Int)) { if (literal.values.len > coral.math.max_int(@typeInfo(types.Integer).Int)) {
return error.OutOfMemory; return error.OutOfMemory;
} }
for (elements.values) |element_expression| { for (literal.values) |field| {
try self.compile_expression(element_expression); try self.compile_expression(field.expression.*);
}
try self.emit_opcode(.push_array);
try self.emit_integer(@intCast(types.Integer, elements.values.len));
},
.table_literal => |fields| {
if (fields.values.len > coral.math.max_int(@typeInfo(types.Integer).Int)) {
return error.OutOfMemory;
}
for (fields.values) |field| {
try self.compile_expression(field.expression);
try self.emit_opcode(.push_object); try self.emit_opcode(.push_object);
try self.emit_object(try self.intern(field.identifier)); try self.emit_object(try self.intern(field.identifier));
} }
try self.emit_opcode(.push_table); try self.emit_opcode(.push_table);
try self.emit_integer(@intCast(types.Integer, fields.values.len)); try self.emit_integer(@intCast(types.Integer, literal.values.len));
}, },
.binary_operation => |operation| { .binary_operation => |operation| {

View File

@ -4,8 +4,6 @@ const tokens = @import("./tokens.zig");
const types = @import("./types.zig"); const types = @import("./types.zig");
const ArrayElements = coral.list.Stack(Expression);
pub const BinaryOperation = enum { pub const BinaryOperation = enum {
addition, addition,
subtraction, subtraction,
@ -35,7 +33,7 @@ pub const ParsedExpression = union (enum) {
}; };
if (tokenizer.current_token == .symbol_plus) { if (tokenizer.current_token == .symbol_plus) {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected right-hand side of expression after `+`"}; return ParsedExpression{.invalid = "expected right-hand side of expression after `+`"};
} }
@ -57,7 +55,7 @@ pub const ParsedExpression = union (enum) {
} }
if (tokenizer.current_token == .symbol_minus) { if (tokenizer.current_token == .symbol_minus) {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected right-hand side of expression after `-`"}; return ParsedExpression{.invalid = "expected right-hand side of expression after `-`"};
} }
@ -103,7 +101,7 @@ pub const ParsedExpression = union (enum) {
}; };
if (tokenizer.current_token == .symbol_greater_than) { if (tokenizer.current_token == .symbol_greater_than) {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected right-hand side of expression after `>`"}; return ParsedExpression{.invalid = "expected right-hand side of expression after `>`"};
} }
@ -125,7 +123,7 @@ pub const ParsedExpression = union (enum) {
} }
if (tokenizer.current_token == .symbol_greater_equals) { if (tokenizer.current_token == .symbol_greater_equals) {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected right-hand side of expression after `>=`"}; return ParsedExpression{.invalid = "expected right-hand side of expression after `>=`"};
} }
@ -147,7 +145,7 @@ pub const ParsedExpression = union (enum) {
} }
if (tokenizer.current_token == .symbol_less_than) { if (tokenizer.current_token == .symbol_less_than) {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected right-hand side of expression after `<`"}; return ParsedExpression{.invalid = "expected right-hand side of expression after `<`"};
} }
@ -169,7 +167,7 @@ pub const ParsedExpression = union (enum) {
} }
if (tokenizer.current_token == .symbol_less_equals) { if (tokenizer.current_token == .symbol_less_equals) {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected right-hand side of expression after `<=`"}; return ParsedExpression{.invalid = "expected right-hand side of expression after `<=`"};
} }
@ -215,7 +213,7 @@ pub const ParsedExpression = union (enum) {
}; };
if (tokenizer.current_token == .symbol_double_equals) { if (tokenizer.current_token == .symbol_double_equals) {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected right-hand side of expression after `==`"}; return ParsedExpression{.invalid = "expected right-hand side of expression after `==`"};
} }
@ -251,7 +249,7 @@ pub const ParsedExpression = union (enum) {
switch (tokenizer.current_token) { switch (tokenizer.current_token) {
.symbol_paren_left => { .symbol_paren_left => {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected an expression after `(`"}; return ParsedExpression{.invalid = "expected an expression after `(`"};
} }
@ -265,7 +263,7 @@ pub const ParsedExpression = union (enum) {
expression.deinit(allocator); expression.deinit(allocator);
}; };
if ((!tokenizer.step(.ignore_newlines)) or (tokenizer.current_token != .symbol_paren_right)) { if ((!tokenizer.step()) or (tokenizer.current_token != .symbol_paren_right)) {
return ParsedExpression{.invalid = "expected a closing `)` after expression"}; return ParsedExpression{.invalid = "expected a closing `)` after expression"};
} }
@ -280,161 +278,57 @@ pub const ParsedExpression = union (enum) {
} }
}, },
.integer => |value| return ParsedExpression{ .integer => |value| {
.valid = .{ defer _ = tokenizer.step();
.integer_literal = coral.utf8.parse_int(
@typeInfo(types.Integer).Int,
value, .{}) catch |parse_error| {
return ParsedExpression{ return ParsedExpression{
.invalid = switch (parse_error) { .valid = .{
error.BadSyntax => "invalid integer literal", .integer_literal = coral.utf8.parse_int(
error.IntOverflow => "integer literal is too big", @typeInfo(types.Integer).Int,
} value, .{}) catch |parse_error| {
};
},
},
},
.real => |value| return ParsedExpression{
.valid = .{
.float_literal = coral.utf8.parse_float(
@typeInfo(types.Float).Float,
value) catch |parse_error| {
return ParsedExpression{
.invalid = switch (parse_error) {
error.BadSyntax => "invalid float literal",
},
};
},
},
},
.string => |value| return ParsedExpression{
.valid = .{.string_literal = value},
},
.symbol_bracket_left => {
if (tokenizer.step(.ignore_newlines)) {
return ParsedExpression{.invalid = "unexpected end of array literal"};
}
var is_invalid = true;
var array_elements = try ArrayElements.init(allocator, 0);
defer if (is_invalid) {
array_elements.deinit(allocator);
};
while (true) {
switch (tokenizer.current_token) {
.symbol_bracket_right => {
_ = tokenizer.step(.ignore_newlines);
is_invalid = false;
return ParsedExpression{ return ParsedExpression{
.valid = .{.array_literal = array_elements}, .invalid = switch (parse_error) {
error.BadSyntax => "invalid integer literal",
error.IntOverflow => "integer literal is too big",
}
}; };
}, },
},
else => { };
if (!tokenizer.step(.ignore_newlines)) {
return ParsedExpression{.invalid = "expected `]` or expression after `[`"};
}
var parsed_expression = try ParsedExpression.init(allocator, tokenizer);
switch (parsed_expression) {
.valid => |*expression| {
errdefer expression.deinit(allocator);
try array_elements.push_one(allocator, expression.*);
},
.invalid => |detail| return ParsedExpression{.invalid = detail},
}
},
}
}
}, },
.symbol_brace_left => { .real => |value| {
if (!tokenizer.step(.ignore_newlines)) { defer _ = tokenizer.step();
return ParsedExpression{.invalid = "unexpected end of table literal"};
}
var is_invalid = true; return ParsedExpression{
var table_fields = try TableFields.init(allocator, 0); .valid = .{
.float_literal = coral.utf8.parse_float(
defer if (is_invalid) { @typeInfo(types.Float).Float,
table_fields.deinit(allocator); value) catch |parse_error| {
};
while (true) {
switch (tokenizer.current_token) {
.symbol_brace_right => {
_ = tokenizer.step(.ignore_newlines);
is_invalid = false;
return ParsedExpression{ return ParsedExpression{
.valid = .{.table_literal = table_fields}, .invalid = switch (parse_error) {
error.BadSyntax => "invalid float literal",
},
}; };
}, },
},
};
},
.local => |identifier| { .string => |value| {
const key = identifier; defer _ = tokenizer.step();
if (!tokenizer.step(.ignore_newlines) or tokenizer.current_token != .symbol_equals) { return ParsedExpression{
return ParsedExpression{.invalid = "expected `=` after identifier"}; .valid = .{
} .string_literal = value,
},
if (!tokenizer.step(.ignore_newlines)) { };
return ParsedExpression{.invalid = "unexpected end after `=`"};
}
var parsed_expression = try init(allocator, tokenizer);
switch (parsed_expression) {
.valid => |*expression| {
errdefer expression.deinit(allocator);
try table_fields.push_one(allocator, .{
.identifier = key,
.expression = expression.*,
});
},
.invalid => |details| return ParsedExpression{.invalid = details},
}
if (!tokenizer.step(.ignore_newlines)) {
return ParsedExpression{.invalid = "unexpected end after expression"};
}
switch (tokenizer.current_token) {
.symbol_comma => _ = tokenizer.step(.ignore_newlines),
.symbol_brace_right => {
_ = tokenizer.step(.ignore_newlines);
is_invalid = false;
return ParsedExpression{
.valid = .{.table_literal = table_fields},
};
},
else => return ParsedExpression{.invalid = "expected `,` or `}` after expression"},
}
},
else => return ParsedExpression{.invalid = "expected `}` or fields in table literal"},
}
}
}, },
.symbol_minus => { .symbol_minus => {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected expression after numeric negation (`-`)"}; return ParsedExpression{.invalid = "expected expression after numeric negation (`-`)"};
} }
@ -457,7 +351,7 @@ pub const ParsedExpression = union (enum) {
}, },
.symbol_bang => { .symbol_bang => {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected expression after boolean negation (`!`)"}; return ParsedExpression{.invalid = "expected expression after boolean negation (`!`)"};
} }
@ -496,7 +390,7 @@ pub const ParsedExpression = union (enum) {
}; };
if (tokenizer.current_token == .symbol_asterisk) { if (tokenizer.current_token == .symbol_asterisk) {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected right-hand side of expression after `*`"}; return ParsedExpression{.invalid = "expected right-hand side of expression after `*`"};
} }
@ -518,7 +412,7 @@ pub const ParsedExpression = union (enum) {
} }
if (tokenizer.current_token == .symbol_forward_slash) { if (tokenizer.current_token == .symbol_forward_slash) {
if (!tokenizer.step(.ignore_newlines)) { if (!tokenizer.step()) {
return ParsedExpression{.invalid = "expected right-hand side of expression after `/`"}; return ParsedExpression{.invalid = "expected right-hand side of expression after `/`"};
} }
@ -566,14 +460,16 @@ pub const ParsedStatements = union (enum) {
statements_list.deinit(allocator); statements_list.deinit(allocator);
}; };
while (tokenizer.step(.ignore_newlines)) { while (tokenizer.step()) {
switch (tokenizer.current_token) { switch (tokenizer.current_token) {
.newline => {},
.keyword_return => { .keyword_return => {
if (has_returned) { if (has_returned) {
return ParsedStatements{.invalid = "cannot return more than once per function scope"}; return ParsedStatements{.invalid = "cannot return more than once per function scope"};
} }
if (tokenizer.step(.include_newlines) and (tokenizer.current_token != .newline)) { if (tokenizer.step() and (tokenizer.current_token != .newline)) {
var parsed_expression = try ParsedExpression.init(allocator, tokenizer); var parsed_expression = try ParsedExpression.init(allocator, tokenizer);
switch (parsed_expression) { switch (parsed_expression) {
@ -593,14 +489,16 @@ pub const ParsedStatements = union (enum) {
try statements_list.push_one(allocator, .return_nothing); try statements_list.push_one(allocator, .return_nothing);
} }
if (tokenizer.step(.ignore_newlines) and tokenizer.current_token != .newline) { if (tokenizer.step() and tokenizer.current_token != .newline) {
return ParsedStatements{.invalid = "expected newline"}; return ParsedStatements{.invalid = "expected newline after expression"};
} }
has_returned = true; has_returned = true;
}, },
else => return ParsedStatements{.invalid = "invalid statement"}, else => {
return ParsedStatements{.invalid = "invalid statement"};
},
} }
} }
@ -621,8 +519,7 @@ pub const Expression = union (enum) {
integer_literal: types.Integer, integer_literal: types.Integer,
float_literal: types.Float, float_literal: types.Float,
string_literal: []const u8, string_literal: []const u8,
array_literal: ArrayElements, table_literal: TableLiteral,
table_literal: TableFields,
grouped_expression: *Expression, grouped_expression: *Expression,
binary_operation: struct { binary_operation: struct {
@ -636,24 +533,22 @@ pub const Expression = union (enum) {
expression: *Expression, expression: *Expression,
}, },
const TableLiteral = coral.list.Stack(struct {
identifier: []const u8,
expression: *Expression,
});
fn deinit(self: *Expression, allocator: coral.io.Allocator) void { fn deinit(self: *Expression, allocator: coral.io.Allocator) void {
switch (self.*) { switch (self.*) {
.nil_literal, .true_literal, .false_literal, .integer_literal, .float_literal, .string_literal => {}, .nil_literal, .true_literal, .false_literal, .integer_literal, .float_literal, .string_literal => {},
.array_literal => |*elements| { .table_literal => |*literal| {
for (elements.values) |*element_expression| { for (literal.values) |field| {
element_expression.deinit(allocator);
}
elements.deinit(allocator);
},
.table_literal => |*fields| {
for (fields.values) |*field| {
field.expression.deinit(allocator); field.expression.deinit(allocator);
coral.io.deallocate(allocator, field.expression);
} }
fields.deinit(allocator); literal.deinit(allocator);
}, },
.grouped_expression => |expression| { .grouped_expression => |expression| {
@ -744,11 +639,6 @@ pub const Statements = struct {
} }
}; };
const TableFields = coral.list.Stack(struct {
identifier: []const u8,
expression: Expression,
});
pub const UnaryOperation = enum { pub const UnaryOperation = enum {
boolean_negation, boolean_negation,
numeric_negation, numeric_negation,

View File

@ -43,6 +43,7 @@ pub const Token = union(enum) {
pub fn text(self: Token) []const u8 { pub fn text(self: Token) []const u8 {
return switch (self) { return switch (self) {
.unknown => |unknown| @ptrCast([*]const u8, &unknown)[0 .. 1], .unknown => |unknown| @ptrCast([*]const u8, &unknown)[0 .. 1],
.newline => "newline",
.identifier_global => |identifier| identifier, .identifier_global => |identifier| identifier,
.identifier_local => |identifier| identifier, .identifier_local => |identifier| identifier,
@ -82,18 +83,22 @@ pub const Tokenizer = struct {
previous_token: Token = .newline, previous_token: Token = .newline,
current_token: Token = .newline, current_token: Token = .newline,
pub fn step(self: *Tokenizer, newline_rules: enum { ignore_newlines, include_newlines }) bool { pub fn has_next(self: Tokenizer) bool {
return self.source.len != 0;
}
pub fn step(self: *Tokenizer) bool {
self.previous_token = self.current_token; self.previous_token = self.current_token;
var cursor = @as(usize, 0); var cursor = @as(usize, 0);
defer self.source = self.source[cursor ..]; defer self.source = self.source[cursor ..];
while (cursor < self.source.len) switch (self.source[cursor]) { while (self.has_next()) switch (self.source[cursor]) {
'#' => { '#' => {
cursor += 1; cursor += 1;
while (cursor < self.source.len and self.source[cursor] == '\n') { while (self.has_next() and (self.source[cursor] == '\n')) {
cursor += 1; cursor += 1;
} }
}, },
@ -102,17 +107,10 @@ pub const Tokenizer = struct {
'\n' => { '\n' => {
cursor += 1; cursor += 1;
self.lines_stepped += 1;
self.current_token = .newline;
switch (newline_rules) { return true;
.include_newlines => {
self.lines_stepped += 1;
self.current_token = .newline;
return true;
},
else => {},
}
}, },
'0' ... '9' => { '0' ... '9' => {
@ -120,13 +118,13 @@ pub const Tokenizer = struct {
cursor += 1; cursor += 1;
while (cursor < self.source.len) switch (self.source[cursor]) { while (self.has_next()) switch (self.source[cursor]) {
'0' ... '9' => cursor += 1, '0' ... '9' => cursor += 1,
'.' => { '.' => {
cursor += 1; cursor += 1;
while (cursor < self.source.len) switch (self.source[cursor]) { while (self.has_next()) switch (self.source[cursor]) {
'0' ... '9' => cursor += 1, '0' ... '9' => cursor += 1,
else => break, else => break,
}; };
@ -161,52 +159,40 @@ pub const Tokenizer = struct {
switch (identifier[0]) { switch (identifier[0]) {
'n' => if (coral.io.ends_with(identifier, "il")) { 'n' => if (coral.io.ends_with(identifier, "il")) {
self.current_token = .keyword_nil; self.current_token = .keyword_nil;
return true;
}, },
'f' => if (coral.io.ends_with(identifier, "alse")) { 'f' => if (coral.io.ends_with(identifier, "alse")) {
self.current_token = .keyword_false; self.current_token = .keyword_false;
return true;
}, },
't' => if (coral.io.ends_with(identifier, "rue")) { 't' => if (coral.io.ends_with(identifier, "rue")) {
self.current_token = .keyword_true; self.current_token = .keyword_true;
return true;
}, },
'r' => if (coral.io.ends_with(identifier, "eturn")) { 'r' => if (coral.io.ends_with(identifier, "eturn")) {
self.current_token = .keyword_return; self.current_token = .keyword_return;
return true;
}, },
's' => if (coral.io.ends_with(identifier, "elf")) { 's' => if (coral.io.ends_with(identifier, "elf")) {
self.current_token = .keyword_self; self.current_token = .keyword_self;
return true;
}, },
else => {}, else => self.current_token = .{.local = identifier},
} }
self.current_token = .{.local = identifier};
return true; return true;
}, },
'@' => { '@' => {
cursor += 1; cursor += 1;
if (cursor < self.source.len) switch (self.source[cursor]) { if (self.has_next()) switch (self.source[cursor]) {
'A'...'Z', 'a'...'z', '_' => { 'A'...'Z', 'a'...'z', '_' => {
const begin = cursor; const begin = cursor;
cursor += 1; cursor += 1;
while (cursor < self.source.len) switch (self.source[cursor]) { while (self.has_next()) switch (self.source[cursor]) {
'0'...'9', 'A'...'Z', 'a'...'z', '_' => cursor += 1, '0'...'9', 'A'...'Z', 'a'...'z', '_' => cursor += 1,
else => break, else => break,
}; };
@ -223,7 +209,7 @@ pub const Tokenizer = struct {
cursor += 1; cursor += 1;
while (cursor < self.source.len) switch (self.source[cursor]) { while (self.has_next()) switch (self.source[cursor]) {
'"' => break, '"' => break,
else => cursor += 1, else => cursor += 1,
}; };
@ -249,7 +235,7 @@ pub const Tokenizer = struct {
cursor += 1; cursor += 1;
while (cursor < self.source.len) switch (self.source[cursor]) { while (self.has_next()) switch (self.source[cursor]) {
'"' => break, '"' => break,
else => cursor += 1, else => cursor += 1,
}; };
@ -333,7 +319,7 @@ pub const Tokenizer = struct {
'=' => { '=' => {
cursor += 1; cursor += 1;
if (cursor < self.source.len) { if (self.has_next()) {
switch (self.source[cursor]) { switch (self.source[cursor]) {
'=' => { '=' => {
cursor += 1; cursor += 1;
@ -361,7 +347,7 @@ pub const Tokenizer = struct {
'<' => { '<' => {
cursor += 1; cursor += 1;
if (cursor < self.source.len and (self.source[cursor] == '=')) { if (self.has_next() and (self.source[cursor] == '=')) {
cursor += 1; cursor += 1;
self.current_token = .symbol_less_equals; self.current_token = .symbol_less_equals;
@ -376,7 +362,7 @@ pub const Tokenizer = struct {
'>' => { '>' => {
cursor += 1; cursor += 1;
if (cursor < self.source.len and (self.source[cursor] == '=')) { if (self.has_next() and (self.source[cursor] == '=')) {
cursor += 1; cursor += 1;
self.current_token = .symbol_greater_equals; self.current_token = .symbol_greater_equals;