Tidy up Kym implementation

This commit is contained in:
kayomn 2023-04-23 15:53:50 +01:00
parent d1110d8683
commit 0974cb016b
3 changed files with 766 additions and 563 deletions

1289
source/kym/bytecode.zig Normal file → Executable file

File diff suppressed because it is too large Load Diff

View File

@ -144,10 +144,7 @@ pub const Vm = struct {
}
},
pub const CompileError = error {
BadSyntax,
OutOfMemory,
};
pub const CompileError = bytecode.ParseError;
const HeapAllocation = union(enum) {
next_free: u32,

View File

@ -33,6 +33,22 @@ pub const Token = union(enum) {
keyword_return,
keyword_self,
pub const ExpectError = error {
UnexpectedToken,
};
pub fn expect(self: Token, tag: coral.io.Tag(Token)) ExpectError!void {
if (self != tag) return error.UnexpectedToken;
}
pub fn expect_any(self: Token, tags: []const coral.io.Tag(Token)) ExpectError!void {
for (tags) |tag| {
if (self == tag) return;
}
return error.UnexpectedToken;
}
pub fn text(self: Token) []const u8 {
return switch (self) {
.unknown => |unknown| @ptrCast([*]const u8, &unknown)[0 .. 1],
@ -64,6 +80,7 @@ pub const Token = union(enum) {
.keyword_false => "false",
.keyword_true => "true",
.keyword_return => "return",
.keyword_self => "self",
};
}
};
@ -72,8 +89,12 @@ pub const Tokenizer = struct {
source: []const u8,
cursor: usize = 0,
pub fn has_next(self: Tokenizer) bool {
return self.cursor < self.source.len;
}
pub fn next(self: *Tokenizer) ?Token {
while (self.cursor < self.source.len) switch (self.source[self.cursor]) {
while (self.has_next()) switch (self.source[self.cursor]) {
' ', '\t' => self.cursor += 1,
'\n' => {
@ -87,13 +108,13 @@ pub const Tokenizer = struct {
self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) {
while (self.has_next()) switch (self.source[self.cursor]) {
'0' ... '9' => self.cursor += 1,
'.' => {
self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) {
while (self.has_next()) switch (self.source[self.cursor]) {
'0' ... '9' => self.cursor += 1,
else => break,
};
@ -136,13 +157,13 @@ pub const Tokenizer = struct {
'@' => {
self.cursor += 1;
if (self.cursor < self.source.len) switch (self.source[self.cursor]) {
if (self.has_next()) switch (self.source[self.cursor]) {
'A'...'Z', 'a'...'z', '_' => {
const begin = self.cursor;
self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) {
while (self.has_next()) switch (self.source[self.cursor]) {
'0'...'9', 'A'...'Z', 'a'...'z', '_' => self.cursor += 1,
else => break,
};
@ -157,7 +178,7 @@ pub const Tokenizer = struct {
self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) {
while (self.has_next()) switch (self.source[self.cursor]) {
'"' => break,
else => self.cursor += 1,
};
@ -180,7 +201,7 @@ pub const Tokenizer = struct {
self.cursor += 1;
while (self.cursor < self.source.len) switch (self.source[self.cursor]) {
while (self.has_next()) switch (self.source[self.cursor]) {
'"' => break,
else => self.cursor += 1,
};